diff --git a/local-test-libxml2-delta-02/afc-libxml2/doc/Makefile.am b/local-test-libxml2-delta-02/afc-libxml2/doc/Makefile.am new file mode 100644 index 0000000000000000000000000000000000000000..aebad07ae6fec705105c9b0fc54d32953562289b --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/doc/Makefile.am @@ -0,0 +1,28 @@ +## Process this file with automake to produce Makefile.in +SUBDIRS = . devhelp + +nobase_dist_doc_DATA = \ + xmlcatalog.html \ + xmllint.html + +dist_man_MANS = xml2-config.1 xmllint.1 xmlcatalog.1 + +EXTRA_DIST = \ + apibuild.py \ + libxml2-api.xml \ + xmlcatalog.xml \ + xmllint.xml \ + meson.build + +DOCBOOK_HTML = http://docbook.sourceforge.net/release/xsl/current/html/docbook.xsl + +rebuild: + cd $(srcdir) && ./apibuild.py + cd $(srcdir) && $(XSLTPROC) --nonet xmllint.xml + cd $(srcdir) && $(XSLTPROC) --nonet -o xmllint.html $(DOCBOOK_HTML) xmllint.xml + cd $(srcdir) && $(XSLTPROC) --nonet xmlcatalog.xml + cd $(srcdir) && $(XSLTPROC) --nonet -o xmlcatalog.html $(DOCBOOK_HTML) xmlcatalog.xml + cd devhelp && $(MAKE) rebuild + cd .. && $(MAKE) rebuild_testapi + +.PHONY: rebuild diff --git a/local-test-libxml2-delta-02/afc-libxml2/doc/apibuild.py b/local-test-libxml2-delta-02/afc-libxml2/doc/apibuild.py new file mode 100644 index 0000000000000000000000000000000000000000..40a2ba0f14538191b51bf6d741fe3c66edf37528 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/doc/apibuild.py @@ -0,0 +1,1930 @@ +#!/usr/bin/env python3 +# +# This is the API builder, it parses the C sources and build the +# API formal description in XML. +# +# See Copyright for the status of this software. +# +# daniel@veillard.com +# +import os, sys +import string +import glob + +debug=0 +#debugsym='ignorableWhitespaceSAXFunc' +debugsym=None + +# +# C parser analysis code +# +ignored_files = { + "config.h": "generated portability layer", + "libxml.h": "internal only", + "legacy.c": "legacy code", + "testModule.c": "test tool", + "testapi.c": "generated regression tests", + "runtest.c": "regression tests program", + "runsuite.c": "regression tests program", + "tst.c": "not part of the library", + "test.c": "not part of the library", + "testdso.c": "test for dynamid shared libraries", + "testrecurse.c": "test for entities recursions", + "timsort.h": "Internal header only for xpath.c 2.9.0", + "nanoftp.h": "empty", + "SAX.h": "empty", +} + +ignored_words = { + "WINAPI": (0, "Windows keyword"), + "LIBXML_DLL_IMPORT": (0, "Special macro to flag external keywords"), + "XMLPUBVAR": (0, "Special macro for extern vars for win32"), + "XSLTPUBVAR": (0, "Special macro for extern vars for win32"), + "EXSLTPUBVAR": (0, "Special macro for extern vars for win32"), + "XMLPUBFUN": (0, "Special macro for extern funcs for win32"), + "XSLTPUBFUN": (0, "Special macro for extern funcs for win32"), + "EXSLTPUBFUN": (0, "Special macro for extern funcs for win32"), + "XSLTCALL": (0, "Special macro for win32 calls"), + "EXSLTCALL": (0, "Special macro for win32 calls"), + "__declspec": (3, "Windows keyword"), + "__stdcall": (0, "Windows keyword"), + "ATTRIBUTE_UNUSED": (0, "macro keyword"), + "ATTRIBUTE_DESTRUCTOR": (0, "macro keyword"), + "LIBEXSLT_PUBLIC": (0, "macro keyword"), + "X_IN_Y": (5, "macro function builder"), + "ATTRIBUTE_ALLOC_SIZE": (3, "macro for gcc checking extension"), + "ATTRIBUTE_PRINTF": (5, "macro for gcc printf args checking extension"), + "LIBXML_ATTR_FORMAT": (5, "macro for gcc printf args checking extension"), + "LIBXML_ATTR_ALLOC_SIZE": (3, "macro for gcc checking extension"), + "ATTRIBUTE_NO_SANITIZE": (3, "macro keyword"), + "ATTRIBUTE_NO_SANITIZE_INTEGER": (0, "macro keyword"), + "ATTRIBUTE_COUNTED_BY": (3, "macro keyword"), + "XML_DEPRECATED": (0, "macro keyword"), + "XML_DEPRECATED_MEMBER": (0, "macro keyword"), + "XML_GLOBALS_ALLOC": (0, "macro keyword"), + "XML_GLOBALS_ERROR": (0, "macro keyword"), + "XML_GLOBALS_IO": (0, "macro keyword"), + "XML_GLOBALS_PARSER": (0, "macro keyword"), + "XML_GLOBALS_TREE": (0, "macro keyword"), + "XML_THREAD_LOCAL": (0, "macro keyword"), +} + +def escape(raw): + raw = raw.replace('&', '&') + raw = raw.replace('<', '<') + raw = raw.replace('>', '>') + raw = raw.replace("'", ''') + raw = raw.replace('"', '"') + return raw + +class identifier: + def __init__(self, name, header=None, module=None, type=None, lineno = 0, + info=None, extra=None, conditionals = None): + self.name = name + self.header = header + self.module = module + self.type = type + self.info = info + self.extra = extra + self.lineno = lineno + self.static = 0 + if conditionals == None or len(conditionals) == 0: + self.conditionals = None + else: + self.conditionals = conditionals[:] + if self.name == debugsym: + print("=> define %s : %s" % (debugsym, (module, type, info, + extra, conditionals))) + + def __repr__(self): + r = "%s %s:" % (self.type, self.name) + if self.static: + r = r + " static" + if self.module != None: + r = r + " from %s" % (self.module) + if self.info != None: + r = r + " " + repr(self.info) + if self.extra != None: + r = r + " " + repr(self.extra) + if self.conditionals != None: + r = r + " " + repr(self.conditionals) + return r + + + def set_header(self, header): + self.header = header + def set_module(self, module): + self.module = module + def set_type(self, type): + self.type = type + def set_info(self, info): + self.info = info + def set_extra(self, extra): + self.extra = extra + def set_lineno(self, lineno): + self.lineno = lineno + def set_static(self, static): + self.static = static + def set_conditionals(self, conditionals): + if conditionals == None or len(conditionals) == 0: + self.conditionals = None + else: + self.conditionals = conditionals[:] + + def get_name(self): + return self.name + def get_header(self): + return self.module + def get_module(self): + return self.module + def get_type(self): + return self.type + def get_info(self): + return self.info + def get_lineno(self): + return self.lineno + def get_extra(self): + return self.extra + def get_static(self): + return self.static + def get_conditionals(self): + return self.conditionals + + def update(self, header, module, type = None, info = None, extra=None, + conditionals=None): + if self.name == debugsym: + print("=> update %s : %s" % (debugsym, (module, type, info, + extra, conditionals))) + if header != None and self.header == None: + self.set_header(module) + if module != None and (self.module == None or self.header == self.module): + self.set_module(module) + if type != None and self.type == None: + self.set_type(type) + if info != None: + self.set_info(info) + if extra != None: + self.set_extra(extra) + if conditionals != None: + self.set_conditionals(conditionals) + +class index: + def __init__(self, name = "noname"): + self.name = name + self.identifiers = {} + self.functions = {} + self.variables = {} + self.includes = {} + self.structs = {} + self.enums = {} + self.typedefs = {} + self.macros = {} + self.references = {} + self.info = {} + + def add_ref(self, name, header, module, static, type, lineno, info=None, extra=None, conditionals = None): + if name[0:2] == '__': + return None + d = None + if name in self.identifiers: + d = self.identifiers[name] + d.update(header, module, type, info, extra, conditionals) + else: + d = identifier(name, header, module, type, lineno, info, extra, conditionals) + self.identifiers[name] = d + + if d != None and static == 1: + d.set_static(1) + + if d != None and name != None and type != None: + self.references[name] = d + + if name == debugsym: + print("New ref: %s" % (d)) + + return d + + def add(self, name, header, module, static, type, lineno, info=None, extra=None, conditionals = None): + if name[0:2] == '__': + return None + d = None + if name in self.identifiers: + d = self.identifiers[name] + d.update(header, module, type, info, extra, conditionals) + else: + d = identifier(name, header, module, type, lineno, info, extra, conditionals) + self.identifiers[name] = d + + if d != None and static == 1: + d.set_static(1) + + if d != None and name != None and type != None: + if type == "function": + self.functions[name] = d + elif type == "functype": + self.functions[name] = d + elif type == "variable": + self.variables[name] = d + elif type == "include": + self.includes[name] = d + elif type == "struct": + self.structs[name] = d + elif type == "enum": + self.enums[name] = d + elif type == "typedef": + self.typedefs[name] = d + elif type == "macro": + self.macros[name] = d + else: + print("Unable to register type ", type) + + if name == debugsym: + print("New symbol: %s" % (d)) + + return d + + def merge(self, idx): + for id in list(idx.functions.keys()): + # + # macro might be used to override functions or variables + # definitions + # + if id in self.macros: + del self.macros[id] + if id in self.functions: + print("function %s from %s redeclared in %s" % ( + id, self.functions[id].header, idx.functions[id].header)) + else: + self.functions[id] = idx.functions[id] + self.identifiers[id] = idx.functions[id] + for id in list(idx.variables.keys()): + # + # macro might be used to override functions or variables + # definitions + # + if id in self.macros: + del self.macros[id] + if id in self.variables: + print("variable %s from %s redeclared in %s" % ( + id, self.variables[id].header, idx.variables[id].header)) + else: + self.variables[id] = idx.variables[id] + self.identifiers[id] = idx.variables[id] + for id in list(idx.structs.keys()): + if id in self.structs: + print("struct %s from %s redeclared in %s" % ( + id, self.structs[id].header, idx.structs[id].header)) + else: + self.structs[id] = idx.structs[id] + self.identifiers[id] = idx.structs[id] + for id in list(idx.typedefs.keys()): + if id in self.typedefs: + print("typedef %s from %s redeclared in %s" % ( + id, self.typedefs[id].header, idx.typedefs[id].header)) + else: + self.typedefs[id] = idx.typedefs[id] + self.identifiers[id] = idx.typedefs[id] + for id in list(idx.macros.keys()): + # + # macro might be used to override functions or variables + # definitions + # + if id in self.variables: + continue + if id in self.functions: + continue + if id in self.enums: + continue + if id in self.macros and id != 'XML_OP': + print("macro %s from %s redeclared in %s" % ( + id, self.macros[id].header, idx.macros[id].header)) + else: + self.macros[id] = idx.macros[id] + self.identifiers[id] = idx.macros[id] + for id in list(idx.enums.keys()): + if id in self.enums: + print("enum %s from %s redeclared in %s" % ( + id, self.enums[id].header, idx.enums[id].header)) + else: + self.enums[id] = idx.enums[id] + self.identifiers[id] = idx.enums[id] + + def merge_public(self, idx): + for id in list(idx.functions.keys()): + if id in self.functions: + # check that function condition agrees with header + if idx.functions[id].conditionals != \ + self.functions[id].conditionals: + print("Header condition differs from Function for %s:" \ + % id) + print(" H: %s" % self.functions[id].conditionals) + print(" C: %s" % idx.functions[id].conditionals) + up = idx.functions[id] + self.functions[id].update(None, up.module, up.type, up.info, up.extra) + # else: + # print "Function %s from %s is not declared in headers" % ( + # id, idx.functions[id].module) + + for id in list(idx.variables.keys()): + if id in self.variables: + # check that variable condition agrees with header + # TODO: produces many false positives + #if idx.variables[id].conditionals != \ + # self.variables[id].conditionals: + # print("Header condition differs from Variable for %s:" \ + # % id) + # print(" H: %s" % self.variables[id].conditionals) + # print(" C: %s" % idx.variables[id].conditionals) + up = idx.variables[id] + self.variables[id].update(None, up.module, up.type, up.info, up.extra) + + def analyze_dict(self, type, dict): + count = 0 + public = 0 + for name in list(dict.keys()): + id = dict[name] + count = count + 1 + if id.static == 0: + public = public + 1 + if count != public: + print(" %d %s , %d public" % (count, type, public)) + elif count != 0: + print(" %d public %s" % (count, type)) + + + def analyze(self): + self.analyze_dict("functions", self.functions) + self.analyze_dict("variables", self.variables) + self.analyze_dict("structs", self.structs) + self.analyze_dict("typedefs", self.typedefs) + self.analyze_dict("macros", self.macros) + +class CLexer: + """A lexer for the C language, tokenize the input by reading and + analyzing it line by line""" + def __init__(self, input): + self.input = input + self.tokens = [] + self.line = "" + self.lineno = 0 + + def getline(self): + line = '' + while line == '': + line = self.input.readline() + if not line: + return None + self.lineno = self.lineno + 1 + line = line.lstrip() + line = line.rstrip() + if line == '': + continue + while line[-1] == '\\': + line = line[:-1] + n = self.input.readline() + self.lineno = self.lineno + 1 + n = n.lstrip() + n = n.rstrip() + if not n: + break + else: + line = line + n + return line + + def getlineno(self): + return self.lineno + + def push(self, token): + self.tokens.insert(0, token); + + def debug(self): + print("Last token: ", self.last) + print("Token queue: ", self.tokens) + print("Line %d end: " % (self.lineno), self.line) + + def token(self): + while self.tokens == []: + if self.line == "": + line = self.getline() + else: + line = self.line + self.line = "" + if line == None: + return None + + if line[0] == '#': + self.tokens = list(map((lambda x: ('preproc', x)), + line.split())) + break; + l = len(line) + if line[0] == '"' or line[0] == "'": + end = line[0] + line = line[1:] + found = 0 + tok = "" + while found == 0: + i = 0 + l = len(line) + while i < l: + if line[i] == end: + self.line = line[i+1:] + line = line[:i] + l = i + found = 1 + break + if line[i] == '\\': + i = i + 1 + i = i + 1 + tok = tok + line + if found == 0: + line = self.getline() + if line == None: + return None + self.last = ('string', tok) + return self.last + + if l >= 2 and line[0] == '/' and line[1] == '*': + line = line[2:] + found = 0 + tok = "" + while found == 0: + i = 0 + l = len(line) + while i < l: + if line[i] == '*' and i+1 < l and line[i+1] == '/': + self.line = line[i+2:] + line = line[:i-1] + l = i + found = 1 + break + i = i + 1 + if tok != "": + tok = tok + "\n" + tok = tok + line + if found == 0: + line = self.getline() + if line == None: + return None + self.last = ('comment', tok) + return self.last + if l >= 2 and line[0] == '/' and line[1] == '/': + line = line[2:] + self.last = ('comment', line) + return self.last + i = 0 + while i < l: + if line[i] == '/' and i+1 < l and line[i+1] == '/': + self.line = line[i:] + line = line[:i] + break + if line[i] == '/' and i+1 < l and line[i+1] == '*': + self.line = line[i:] + line = line[:i] + break + if line[i] == '"' or line[i] == "'": + self.line = line[i:] + line = line[:i] + break + i = i + 1 + l = len(line) + i = 0 + while i < l: + if line[i] == ' ' or line[i] == '\t': + i = i + 1 + continue + o = ord(line[i]) + if (o >= 97 and o <= 122) or (o >= 65 and o <= 90) or \ + (o >= 48 and o <= 57): + s = i + while i < l: + o = ord(line[i]) + if (o >= 97 and o <= 122) or (o >= 65 and o <= 90) or \ + (o >= 48 and o <= 57) or \ + (" \t(){}:;,+-*/%&!|[]=><".find(line[i])) == -1: + i = i + 1 + else: + break + self.tokens.append(('name', line[s:i])) + continue + if "(){}:;,[]".find(line[i]) != -1: +# if line[i] == '(' or line[i] == ')' or line[i] == '{' or \ +# line[i] == '}' or line[i] == ':' or line[i] == ';' or \ +# line[i] == ',' or line[i] == '[' or line[i] == ']': + self.tokens.append(('sep', line[i])) + i = i + 1 + continue + if "+-*><=/%&!|.".find(line[i]) != -1: +# if line[i] == '+' or line[i] == '-' or line[i] == '*' or \ +# line[i] == '>' or line[i] == '<' or line[i] == '=' or \ +# line[i] == '/' or line[i] == '%' or line[i] == '&' or \ +# line[i] == '!' or line[i] == '|' or line[i] == '.': + if line[i] == '.' and i + 2 < l and \ + line[i+1] == '.' and line[i+2] == '.': + self.tokens.append(('name', '...')) + i = i + 3 + continue + + j = i + 1 + if j < l and ( + "+-*><=/%&!|".find(line[j]) != -1): +# line[j] == '+' or line[j] == '-' or line[j] == '*' or \ +# line[j] == '>' or line[j] == '<' or line[j] == '=' or \ +# line[j] == '/' or line[j] == '%' or line[j] == '&' or \ +# line[j] == '!' or line[j] == '|'): + self.tokens.append(('op', line[i:j+1])) + i = j + 1 + else: + self.tokens.append(('op', line[i])) + i = i + 1 + continue + s = i + while i < l: + o = ord(line[i]) + if (o >= 97 and o <= 122) or (o >= 65 and o <= 90) or \ + (o >= 48 and o <= 57) or ( + " \t(){}:;,+-*/%&!|[]=><".find(line[i]) == -1): +# line[i] != ' ' and line[i] != '\t' and +# line[i] != '(' and line[i] != ')' and +# line[i] != '{' and line[i] != '}' and +# line[i] != ':' and line[i] != ';' and +# line[i] != ',' and line[i] != '+' and +# line[i] != '-' and line[i] != '*' and +# line[i] != '/' and line[i] != '%' and +# line[i] != '&' and line[i] != '!' and +# line[i] != '|' and line[i] != '[' and +# line[i] != ']' and line[i] != '=' and +# line[i] != '*' and line[i] != '>' and +# line[i] != '<'): + i = i + 1 + else: + break + self.tokens.append(('name', line[s:i])) + + tok = self.tokens[0] + self.tokens = self.tokens[1:] + self.last = tok + return tok + +class CParser: + """The C module parser""" + def __init__(self, filename, idx = None): + self.filename = filename + if len(filename) > 2 and filename[-2:] == '.h': + self.is_header = 1 + else: + self.is_header = 0 + self.input = open(filename) + self.lexer = CLexer(self.input) + if idx == None: + self.index = index() + else: + self.index = idx + self.top_comment = "" + self.last_comment = "" + self.comment = None + self.collect_ref = 0 + self.doc_disable = 0 + self.conditionals = [] + self.defines = [] + + def collect_references(self): + self.collect_ref = 1 + + def disable(self): + self.doc_disable = 1 + + def enable(self): + self.doc_disable = 0 + + def lineno(self): + return self.lexer.getlineno() + + def index_add(self, name, module, static, type, info=None, extra = None): + if self.doc_disable: + return + if self.is_header == 1: + self.index.add(name, module, module, static, type, self.lineno(), + info, extra, self.conditionals) + else: + self.index.add(name, None, module, static, type, self.lineno(), + info, extra, self.conditionals) + + def index_add_ref(self, name, module, static, type, info=None, + extra = None): + if self.is_header == 1: + self.index.add_ref(name, module, module, static, type, + self.lineno(), info, extra, self.conditionals) + else: + self.index.add_ref(name, None, module, static, type, self.lineno(), + info, extra, self.conditionals) + + def warning(self, msg): + if self.doc_disable: + return + print(msg) + + def error(self, msg, token=-1): + if self.doc_disable: + return + + print("Parse Error: " + msg) + if token != -1: + print("Got token ", token) + self.lexer.debug() + sys.exit(1) + + def debug(self, msg, token=-1): + print("Debug: " + msg) + if token != -1: + print("Got token ", token) + self.lexer.debug() + + def parseTopComment(self, comment): + res = {} + lines = comment.split("\n") + item = None + for line in lines: + while line != "" and (line[0] == ' ' or line[0] == '\t'): + line = line[1:] + while line != "" and line[0] == '*': + line = line[1:] + while line != "" and (line[0] == ' ' or line[0] == '\t'): + line = line[1:] + try: + (it, line) = line.split(":", 1) + item = it + while line != "" and (line[0] == ' ' or line[0] == '\t'): + line = line[1:] + if item in res: + res[item] = res[item] + " " + line + else: + res[item] = line + except: + if item != None: + if item in res: + res[item] = res[item] + " " + line + else: + res[item] = line + self.index.info = res + + def parseComment(self, token): + if self.top_comment == "": + self.top_comment = token[1] + if self.comment == None or token[1][0] == '*': + self.comment = token[1]; + else: + self.comment = self.comment + token[1] + token = self.lexer.token() + + if self.comment.find("DOC_DISABLE") != -1: + self.disable() + + if self.comment.find("DOC_ENABLE") != -1: + self.enable() + + return token + + # + # Parse a simple comment block for typedefs or global variables + # + def parseSimpleComment(self, name, quiet = False): + if name[0:2] == '__': + quiet = 1 + + args = [] + desc = "" + + if self.comment == None: + if not quiet: + self.warning("Missing comment for %s" % (name)) + return(None) + if self.comment[0] != '*': + if not quiet: + self.warning("Missing * in comment for %s" % (name)) + return(None) + lines = self.comment.split('\n') + if lines[0] == '*': + del lines[0] + if lines[0] != "* %s:" % (name): + if not quiet: + self.warning("Misformatted comment for %s" % (name)) + self.warning(" Expecting '* %s:' got '%s'" % (name, lines[0])) + return(None) + del lines[0] + while len(lines) > 0 and lines[0] == '*': + del lines[0] + desc = "" + while len(lines) > 0: + l = lines[0] + while len(l) > 0 and l[0] == '*': + l = l[1:] + l = l.strip() + desc = desc + " " + l + del lines[0] + + desc = desc.strip() + + if quiet == 0: + if desc == "": + self.warning("Comment for %s lacks description" % (name)) + + return(desc) + # + # Parse a comment block associate to a macro + # + def parseMacroComment(self, name, quiet = 0): + if name[0:2] == '__': + quiet = 1 + + args = [] + desc = "" + + if self.comment == None: + if not quiet: + self.warning("Missing comment for macro %s" % (name)) + return((args, desc)) + if self.comment[0] != '*': + if not quiet: + self.warning("Missing * in macro comment for %s" % (name)) + return((args, desc)) + lines = self.comment.split('\n') + if lines[0] == '*': + del lines[0] + if lines[0] != "* %s:" % (name): + if not quiet: + self.warning("Misformatted macro comment for %s" % (name)) + self.warning(" Expecting '* %s:' got '%s'" % (name, lines[0])) + return((args, desc)) + del lines[0] + while lines[0] == '*': + del lines[0] + while len(lines) > 0 and lines[0][0:3] == '* @': + l = lines[0][3:] + try: + (arg, desc) = l.split(':', 1) + desc=desc.strip() + arg=arg.strip() + except: + if not quiet: + self.warning("Misformatted macro comment for %s" % (name)) + self.warning(" problem with '%s'" % (lines[0])) + del lines[0] + continue + del lines[0] + l = lines[0].strip() + while len(l) > 2 and l[0:3] != '* @': + while l[0] == '*': + l = l[1:] + desc = desc + ' ' + l.strip() + del lines[0] + if len(lines) == 0: + break + l = lines[0] + args.append((arg, desc)) + while len(lines) > 0 and lines[0] == '*': + del lines[0] + desc = "" + while len(lines) > 0: + l = lines[0] + while len(l) > 0 and l[0] == '*': + l = l[1:] + l = l.strip() + desc = desc + " " + l + del lines[0] + + desc = desc.strip() + + if quiet == 0: + if desc == "": + self.warning("Macro comment for %s lack description of the macro" % (name)) + + return((args, desc)) + + # + # Parse a comment block and merge the information found in the + # parameters descriptions, finally returns a block as complete + # as possible + # + def mergeFunctionComment(self, name, description, quiet = 0): + if name == 'main': + quiet = 1 + if name[0:2] == '__': + quiet = 1 + + (ret, args) = description + desc = "" + retdesc = "" + + if self.comment == None: + if not quiet: + self.warning("Missing comment for function %s" % (name)) + return(((ret[0], retdesc), args, desc)) + if self.comment[0] != '*': + if not quiet: + self.warning("Missing * in function comment for %s" % (name)) + return(((ret[0], retdesc), args, desc)) + lines = self.comment.split('\n') + if lines[0] == '*': + del lines[0] + if lines[0] != "* %s:" % (name): + if not quiet: + self.warning("Misformatted function comment for %s" % (name)) + self.warning(" Expecting '* %s:' got '%s'" % (name, lines[0])) + return(((ret[0], retdesc), args, desc)) + del lines[0] + while lines[0] == '*': + del lines[0] + nbargs = len(args) + while len(lines) > 0 and lines[0][0:3] == '* @': + l = lines[0][3:] + try: + (arg, desc) = l.split(':', 1) + desc=desc.strip() + arg=arg.strip() + except: + if not quiet: + self.warning("Misformatted function comment for %s" % (name)) + self.warning(" problem with '%s'" % (lines[0])) + del lines[0] + continue + del lines[0] + l = lines[0].strip() + while len(l) > 2 and l[0:3] != '* @': + while l[0] == '*': + l = l[1:] + desc = desc + ' ' + l.strip() + del lines[0] + if len(lines) == 0: + break + l = lines[0] + i = 0 + while i < nbargs: + if args[i][1] == arg: + args[i] = (args[i][0], arg, desc) + break; + i = i + 1 + if i >= nbargs: + if not quiet: + self.warning("Unable to find arg %s from function comment for %s" % ( + arg, name)) + while len(lines) > 0 and lines[0] == '*': + del lines[0] + desc = "" + while len(lines) > 0: + l = lines[0] + while len(l) > 0 and l[0] == '*': + l = l[1:] + l = l.strip() + if len(l) >= 6 and l[0:6] == "return" or l[0:6] == "Return": + try: + l = l.split(' ', 1)[1] + except: + l = "" + retdesc = l.strip() + del lines[0] + while len(lines) > 0: + l = lines[0] + while len(l) > 0 and l[0] == '*': + l = l[1:] + l = l.strip() + retdesc = retdesc + " " + l + del lines[0] + else: + desc = desc + " " + l + del lines[0] + + retdesc = retdesc.strip() + desc = desc.strip() + + if quiet == 0: + # + # report missing comments + # + i = 0 + while i < nbargs: + if args[i][2] == None and args[i][0] != "void" and \ + ((args[i][1] != None) or (args[i][1] == '')): + self.warning("Function comment for %s lacks description of arg %s" % (name, args[i][1])) + i = i + 1 + if retdesc == "" and ret[0] != "void": + self.warning("Function comment for %s lacks description of return value" % (name)) + if desc == "" and retdesc == "": + self.warning("Function comment for %s lacks description of the function" % (name)) + + return(((ret[0], retdesc), args, desc)) + + def parsePreproc(self, token): + if debug: + print("=> preproc ", token, self.lexer.tokens) + name = token[1] + if name == "#include": + token = self.lexer.token() + if token == None: + return None + if token[0] == 'preproc': + self.index_add(token[1], self.filename, not self.is_header, + "include") + return self.lexer.token() + return token + if name == "#define": + token = self.lexer.token() + if token == None: + return None + if token[0] == 'preproc': + # TODO macros with arguments + name = token[1] + lst = [] + token = self.lexer.token() + while token != None and token[0] == 'preproc' and \ + token[1][0] != '#': + lst.append(token[1]) + token = self.lexer.token() + try: + name = name.split('(') [0] + except: + pass + info = self.parseMacroComment(name, True) + self.index_add(name, self.filename, not self.is_header, + "macro", info) + return token + + # + # Processing of conditionals modified by Bill 1/1/05 + # + # We process conditionals (i.e. tokens from #ifdef, #ifndef, + # #if, #else and #endif) for headers and mainline code, + # store the ones from the header in libxml2-api.xml, and later + # (in the routine merge_public) verify that the two (header and + # mainline code) agree. + # + # There is a small problem with processing the headers. Some of + # the variables are not concerned with enabling / disabling of + # library functions (e.g. '__XML_PARSER_H__'), and we don't want + # them to be included in libxml2-api.xml, or involved in + # the check between the header and the mainline code. To + # accomplish this, we ignore any conditional which doesn't include + # the string 'ENABLED' + # + if name == "#ifdef": + apstr = self.lexer.tokens[0][1] + try: + self.defines.append(apstr) + if apstr.find('ENABLED') != -1: + self.conditionals.append("defined(%s)" % apstr) + except: + pass + elif name == "#ifndef": + apstr = self.lexer.tokens[0][1] + try: + self.defines.append(apstr) + if apstr.find('ENABLED') != -1: + self.conditionals.append("!defined(%s)" % apstr) + except: + pass + elif name == "#if": + apstr = "" + for tok in self.lexer.tokens: + if apstr != "": + apstr = apstr + " " + apstr = apstr + tok[1] + try: + self.defines.append(apstr) + if apstr.find('ENABLED') != -1: + self.conditionals.append(apstr) + except: + pass + elif name == "#else": + if self.conditionals != [] and \ + self.defines[-1].find('ENABLED') != -1: + self.conditionals[-1] = "!(%s)" % self.conditionals[-1] + elif name == "#endif": + if self.conditionals != [] and \ + self.defines[-1].find('ENABLED') != -1: + self.conditionals = self.conditionals[:-1] + self.defines = self.defines[:-1] + token = self.lexer.token() + while token != None and token[0] == 'preproc' and \ + token[1][0] != '#': + token = self.lexer.token() + return token + + # + # token acquisition on top of the lexer, it handle internally + # preprocessor and comments since they are logically not part of + # the program structure. + # + def token(self): + global ignored_words + + token = self.lexer.token() + while token != None: + if token[0] == 'comment': + token = self.parseComment(token) + continue + elif token[0] == 'preproc': + token = self.parsePreproc(token) + continue + elif token[0] == "name" and token[1] == "__const": + token = ("name", "const") + return token + elif token[0] == "name" and token[1] == "__attribute": + token = self.lexer.token() + while token != None and token[1] != ";": + token = self.lexer.token() + return token + elif token[0] == "name" and token[1] in ignored_words: + (n, info) = ignored_words[token[1]] + i = 0 + while i < n: + token = self.lexer.token() + i = i + 1 + token = self.lexer.token() + continue + else: + if debug: + print("=> ", token) + return token + return None + + # + # Parse a typedef, it records the type and its name. + # + def parseTypedef(self, token): + if token == None: + return None + token = self.parseType(token) + if token == None: + self.error("parsing typedef") + return None + base_type = self.type + type = base_type + #self.debug("end typedef type", token) + while token != None: + if token[0] == "name": + name = token[1] + signature = self.signature + if signature != None: + type = type.split('(')[0] + d = self.mergeFunctionComment(name, + ((type, None), signature), 1) + self.index_add(name, self.filename, not self.is_header, + "functype", d) + else: + if base_type == "struct": + self.index_add(name, self.filename, not self.is_header, + "struct", type) + base_type = "struct " + name + else: + # TODO report missing or misformatted comments + info = self.parseSimpleComment(name, True) + self.index_add(name, self.filename, not self.is_header, + "typedef", type, info) + token = self.token() + else: + self.error("parsing typedef: expecting a name") + return token + #self.debug("end typedef", token) + if token != None and token[0] == 'sep' and token[1] == ',': + type = base_type + token = self.token() + while token != None and token[0] == "op": + type = type + token[1] + token = self.token() + elif token != None and token[0] == 'sep' and token[1] == ';': + break; + elif token != None and token[0] == 'name': + type = base_type + continue; + else: + self.error("parsing typedef: expecting ';'", token) + return token + token = self.token() + return token + + # + # Parse a C code block, used for functions it parse till + # the balancing } included + # + def parseBlock(self, token): + while token != None: + if token[0] == "sep" and token[1] == "{": + token = self.token() + token = self.parseBlock(token) + elif token[0] == "sep" and token[1] == "}": + token = self.token() + return token + else: + if self.collect_ref == 1: + oldtok = token + token = self.token() + if oldtok[0] == "name" and oldtok[1][0:3] == "xml": + if token[0] == "sep" and token[1] == "(": + self.index_add_ref(oldtok[1], self.filename, + 0, "function") + token = self.token() + elif token[0] == "name": + token = self.token() + if token[0] == "sep" and (token[1] == ";" or + token[1] == "," or token[1] == "="): + self.index_add_ref(oldtok[1], self.filename, + 0, "type") + elif oldtok[0] == "name" and oldtok[1][0:4] == "XML_": + self.index_add_ref(oldtok[1], self.filename, + 0, "typedef") + elif oldtok[0] == "name" and oldtok[1][0:7] == "LIBXML_": + self.index_add_ref(oldtok[1], self.filename, + 0, "typedef") + + else: + token = self.token() + return token + + # + # Parse a C struct definition till the balancing } + # + def parseStruct(self, token): + fields = [] + #self.debug("start parseStruct", token) + while token != None: + if token[0] == "sep" and token[1] == "{": + token = self.token() + token = self.parseTypeBlock(token) + elif token[0] == "sep" and token[1] == "}": + self.struct_fields = fields + #self.debug("end parseStruct", token) + #print fields + token = self.token() + return token + else: + base_type = self.type + #self.debug("before parseType", token) + token = self.parseType(token) + #self.debug("after parseType", token) + if token != None and token[0] == "name": + fname = token[1] + token = self.token() + if token[0] == "sep" and token[1] == ";": + token = self.token() + fields.append((self.type, fname)) + else: + self.error("parseStruct: expecting ;", token) + elif token != None and token[0] == "sep" and token[1] == "{": + token = self.token() + token = self.parseTypeBlock(token) + if token != None and token[0] == "name": + token = self.token() + if token != None and token[0] == "sep" and token[1] == ";": + token = self.token() + else: + self.error("parseStruct: expecting ;", token) + else: + self.error("parseStruct: name", token) + token = self.token() + self.type = base_type; + self.struct_fields = fields + #self.debug("end parseStruct", token) + #print fields + return token + + # + # Parse a C enum block, parse till the balancing } + # + def parseEnumBlock(self, token): + self.enums = [] + name = None + self.comment = None + comment = "" + value = "0" + while token != None: + if token[0] == "sep" and token[1] == "{": + token = self.token() + token = self.parseTypeBlock(token) + elif token[0] == "sep" and token[1] == "}": + if name != None: + if self.comment != None: + comment = self.comment + self.comment = None + self.enums.append((name, value, comment)) + token = self.token() + return token + elif token[0] == "name": + if name != None: + if self.comment != None: + comment = self.comment.strip() + self.comment = None + self.enums.append((name, value, comment)) + name = token[1] + comment = "" + token = self.token() + if token[0] == "op" and token[1][0] == "=": + value = "" + if len(token[1]) > 1: + value = token[1][1:] + token = self.token() + while token[0] != "sep" or (token[1] != ',' and + token[1] != '}'): + value = value + token[1] + token = self.token() + else: + try: + value = "%d" % (int(value) + 1) + except: + self.warning("Failed to compute value of enum %s" % (name)) + value="" + if token[0] == "sep" and token[1] == ",": + token = self.token() + else: + token = self.token() + return token + + # + # Parse a C definition block, used for structs it parse till + # the balancing } + # + def parseTypeBlock(self, token): + while token != None: + if token[0] == "sep" and token[1] == "{": + token = self.token() + token = self.parseTypeBlock(token) + elif token[0] == "sep" and token[1] == "}": + token = self.token() + return token + else: + token = self.token() + return token + + # + # Parse a type: the fact that the type name can either occur after + # the definition or within the definition makes it a little harder + # if inside, the name token is pushed back before returning + # + def parseType(self, token): + self.type = "" + self.struct_fields = [] + self.signature = None + if token == None: + return token + + have_sign = 0 + done = 0 + + while token[0] == "name" and ( + token[1] == "const" or \ + token[1] == "unsigned" or \ + token[1] == "signed"): + if token[1] == "unsigned" or token[1] == "signed": + have_sign = 1 + if self.type == "": + self.type = token[1] + else: + self.type = self.type + " " + token[1] + token = self.token() + + if token[0] == "name" and token[1] in ("char", "short", "int", "long"): + if self.type == "": + self.type = token[1] + else: + self.type = self.type + " " + token[1] + + elif have_sign: + done = 1 + + elif token[0] == "name" and token[1] == "struct": + if self.type == "": + self.type = token[1] + else: + self.type = self.type + " " + token[1] + token = self.token() + nametok = None + if token[0] == "name": + nametok = token + token = self.token() + if token != None and token[0] == "sep" and token[1] == "{": + token = self.token() + token = self.parseStruct(token) + elif token != None and token[0] == "op" and token[1] == "*": + self.type = self.type + " " + nametok[1] + " *" + token = self.token() + while token != None and token[0] == "op" and token[1] == "*": + self.type = self.type + " *" + token = self.token() + if token[0] == "name": + nametok = token + token = self.token() + else: + self.error("struct : expecting name", token) + return token + elif token != None and token[0] == "name" and nametok != None: + self.type = self.type + " " + nametok[1] + return token + + if nametok != None: + self.lexer.push(token) + token = nametok + return token + + elif token[0] == "name" and token[1] == "enum": + if self.type == "": + self.type = token[1] + else: + self.type = self.type + " " + token[1] + self.enums = [] + token = self.token() + if token != None and token[0] == "sep" and token[1] == "{": + token = self.token() + token = self.parseEnumBlock(token) + else: + self.error("parsing enum: expecting '{'", token) + enum_type = None + if token != None and token[0] != "name": + self.lexer.push(token) + token = ("name", "enum") + else: + enum_type = token[1] + for enum in self.enums: + self.index_add(enum[0], self.filename, + not self.is_header, "enum", + (enum[1], enum[2], enum_type)) + return token + + elif token[0] == "name": + if self.type == "": + self.type = token[1] + else: + self.type = self.type + " " + token[1] + else: + self.error("parsing type %s: expecting a name" % (self.type), + token) + return token + if not done: + token = self.token() + while token != None and (token[0] == "op" or + token[0] == "name" and token[1] == "const"): + self.type = self.type + " " + token[1] + token = self.token() + + # + # if there is a parenthesis here, this means a function type + # + if token != None and token[0] == "sep" and token[1] == '(': + self.type = self.type + token[1] + token = self.token() + while token != None and token[0] == "op" and token[1] == '*': + self.type = self.type + token[1] + token = self.token() + if token == None or token[0] != "name" : + self.error("parsing function type, name expected", token); + return token + self.type = self.type + token[1] + nametok = token + token = self.token() + if token != None and token[0] == "sep" and token[1] == ')': + self.type = self.type + token[1] + token = self.token() + if token != None and token[0] == "sep" and token[1] == '(': + token = self.token() + type = self.type; + token = self.parseSignature(token); + self.type = type; + else: + self.error("parsing function type, '(' expected", token); + return token + else: + self.error("parsing function type, ')' expected", token); + return token + self.lexer.push(token) + token = nametok + return token + + # + # do some lookahead for arrays + # + if token != None and token[0] == "name": + nametok = token + token = self.token() + if token != None and token[0] == "sep" and token[1] == '[': + self.type = self.type + nametok[1] + while token != None and token[0] == "sep" and token[1] == '[': + self.type = self.type + token[1] + token = self.token() + while token != None and token[0] != 'sep' and \ + token[1] != ']' and token[1] != ';': + self.type = self.type + token[1] + token = self.token() + if token != None and token[0] == 'sep' and token[1] == ']': + self.type = self.type + token[1] + token = self.token() + else: + self.error("parsing array type, ']' expected", token); + return token + elif token != None and token[0] == "sep" and token[1] == ':': + # remove :12 in case it's a limited int size + token = self.token() + token = self.token() + self.lexer.push(token) + token = nametok + + return token + + # + # Parse a signature: '(' has been parsed and we scan the type definition + # up to the ')' included + def parseSignature(self, token): + signature = [] + if token != None and token[0] == "sep" and token[1] == ')': + self.signature = [] + token = self.token() + return token + while token != None: + token = self.parseType(token) + if token != None and token[0] == "name": + signature.append((self.type, token[1], None)) + token = self.token() + elif token != None and token[0] == "sep" and token[1] == ',': + token = self.token() + continue + elif token != None and token[0] == "sep" and token[1] == ')': + # only the type was provided + if self.type == "...": + signature.append((self.type, "...", None)) + else: + signature.append((self.type, None, None)) + if token != None and token[0] == "sep": + if token[1] == ',': + token = self.token() + continue + elif token[1] == ')': + token = self.token() + break + self.signature = signature + return token + + # + # Parse a global definition, be it a type, variable or function + # the extern "C" blocks are a bit nasty and require it to recurse. + # + def parseGlobal(self, token): + static = 0 + if token[1] == 'extern': + token = self.token() + if token == None: + return token + if token[0] == 'string': + if token[1] == 'C': + token = self.token() + if token == None: + return token + if token[0] == 'sep' and token[1] == "{": + token = self.token() +# print 'Entering extern "C line ', self.lineno() + while token != None and (token[0] != 'sep' or + token[1] != "}"): + if token[0] == 'name': + token = self.parseGlobal(token) + else: + self.error( + "token %s %s unexpected at the top level" % ( + token[0], token[1])) + token = self.parseGlobal(token) +# print 'Exiting extern "C" line', self.lineno() + token = self.token() + return token + else: + return token + elif token[1] == 'static': + static = 1 + token = self.token() + if token == None or token[0] != 'name': + return token + + if token[1] == 'typedef': + token = self.token() + return self.parseTypedef(token) + else: + token = self.parseType(token) + type_orig = self.type + if token == None or token[0] != "name": + return token + type = type_orig + self.name = token[1] + token = self.token() + while token != None and (token[0] == "sep" or token[0] == "op"): + if token[0] == "sep": + if token[1] == "[": + type = type + token[1] + token = self.token() + while token != None and (token[0] != "sep" or \ + token[1] != ";"): + type = type + token[1] + token = self.token() + + if token != None and token[0] == "op" and token[1] == "=": + # + # Skip the initialization of the variable + # + token = self.token() + if token[0] == 'sep' and token[1] == '{': + token = self.token() + token = self.parseBlock(token) + else: + while token != None and (token[0] != "sep" or \ + (token[1] != ';' and token[1] != ',')): + token = self.token() + if token == None or token[0] != "sep" or (token[1] != ';' and + token[1] != ','): + self.error("missing ';' or ',' after value") + + if token != None and token[0] == "sep": + if token[1] == ";": + if type == "struct": + self.index_add(self.name, self.filename, + not self.is_header, "struct", self.struct_fields) + else: + info = self.parseSimpleComment(self.name, True) + self.index_add(self.name, self.filename, + not self.is_header, "variable", type, info) + self.comment = None + token = self.token() + break + elif token[1] == "(": + token = self.token() + token = self.parseSignature(token) + if token == None: + return None + if token[0] == "sep" and token[1] == ";": + d = self.mergeFunctionComment(self.name, + ((type, None), self.signature), 1) + self.index_add(self.name, self.filename, static, + "function", d) + self.comment = None + token = self.token() + elif token[0] == "sep" and token[1] == "{": + d = self.mergeFunctionComment(self.name, + ((type, None), self.signature), static) + self.index_add(self.name, self.filename, static, + "function", d) + self.comment = None + token = self.token() + token = self.parseBlock(token); + elif token[1] == ',': + self.index_add(self.name, self.filename, static, + "variable", type) + self.comment = None + type = type_orig + token = self.token() + while token != None and token[0] == "sep": + type = type + token[1] + token = self.token() + if token != None and token[0] == "name": + self.name = token[1] + token = self.token() + else: + break + + return token + + def parse(self): + self.warning("Parsing %s" % (self.filename)) + token = self.token() + while token != None: + if token[0] == 'name': + token = self.parseGlobal(token) + else: + self.error("token %s %s unexpected at the top level" % ( + token[0], token[1])) + token = self.parseGlobal(token) + return + self.parseTopComment(self.top_comment) + return self.index + + +class docBuilder: + """A documentation builder""" + def __init__(self, name, directories=['.'], excludes=[]): + self.name = name + self.directories = directories + self.excludes = excludes + list(ignored_files.keys()) + self.modules = {} + self.headers = {} + self.idx = index() + self.index = {} + if name == 'libxml2': + self.basename = 'libxml' + else: + self.basename = name + + def analyze(self): + print("Project %s : %d headers, %d modules" % (self.name, len(list(self.headers.keys())), len(list(self.modules.keys())))) + self.idx.analyze() + + def scanHeaders(self): + for header in list(self.headers.keys()): + parser = CParser(header) + idx = parser.parse() + self.headers[header] = idx; + self.idx.merge(idx) + + def scanModules(self): + for module in list(self.modules.keys()): + parser = CParser(module) + idx = parser.parse() + # idx.analyze() + self.modules[module] = idx + self.idx.merge_public(idx) + + def scan(self): + for directory in self.directories: + files = glob.glob(directory + "/*.c") + for file in files: + skip = 0 + for excl in self.excludes: + if file.find(excl) != -1: + print("Skipping %s" % file) + skip = 1 + break + if skip == 0: + self.modules[file] = None; + files = glob.glob(directory + "/*.h") + for file in files: + skip = 0 + for excl in self.excludes: + if file.find(excl) != -1: + print("Skipping %s" % file) + skip = 1 + break + if skip == 0: + self.headers[file] = None; + self.scanHeaders() + self.scanModules() + + def modulename_file(self, file): + module = os.path.basename(file) + if module[-2:] == '.h': + module = module[:-2] + elif module[-2:] == '.c': + module = module[:-2] + return module + + def serialize_enum(self, output, name): + id = self.idx.enums[name] + output.write(" \n") + + def serialize_macro(self, output, name): + id = self.idx.macros[name] + output.write(" \n" % (name, + self.modulename_file(id.header))) + if id.info != None: + try: + (args, desc) = id.info + if desc != None and desc != "": + output.write(" %s\n" % (escape(desc))) + for arg in args: + (name, desc) = arg + if desc != None and desc != "": + output.write(" \n" % ( + name, escape(desc))) + else: + output.write(" \n" % (name)) + except: + pass + output.write(" \n") + + def serialize_typedef(self, output, name): + id = self.idx.typedefs[name] + if id.info[0:7] == 'struct ': + output.write(" \n"); + try: + for field in self.idx.structs[name].info: + output.write(" \n" % (field[1] , field[0])) + except: + print("Failed to serialize struct %s" % (name)) + output.write(" \n") + else: + output.write("/>\n"); + else : + output.write(" \n %s\n" % (escape(desc))) + output.write(" \n") + else: + output.write("/>\n") + except: + output.write("/>\n") + + def serialize_variable(self, output, name): + id = self.idx.variables[name] + if id.info != None: + output.write(" \n %s\n" % (escape(desc))) + output.write(" \n") + else: + output.write("/>\n") + + def serialize_function(self, output, name): + id = self.idx.functions[name] + if name == debugsym: + print("=>", id) + + output.write(" <%s name='%s' file='%s' module='%s'>\n" % (id.type, + name, self.modulename_file(id.header), + self.modulename_file(id.module))) + # + # Processing of conditionals modified by Bill 1/1/05 + # + if id.conditionals != None: + apstr = "" + for cond in id.conditionals: + if apstr != "": + apstr = apstr + " && " + apstr = apstr + cond + output.write(" %s\n"% (apstr)); + try: + (ret, params, desc) = id.info + if (desc == None or desc == '') and \ + name[0:9] != "xmlThrDef" and name != "xmlDllMain" and \ + ret[1] == '': + print("%s %s from %s has no description" % (id.type, name, + self.modulename_file(id.module))) + + output.write(" %s\n" % (escape(desc))) + if ret[0] != None: + if ret[0] == "void": + output.write(" \n") + else: + output.write(" \n" % ( + ret[0], escape(ret[1]))) + for param in params: + if param[0] == 'void': + continue + if param[2] == None: + output.write(" \n" % (param[1], param[0])) + else: + output.write(" \n" % (param[1], param[0], escape(param[2]))) + except: + print("Failed to save function %s info: " % name, repr(id.info)) + output.write(" \n" % (id.type)) + + def serialize_exports(self, output, file): + module = self.modulename_file(file) + output.write(" \n" % (module)) + dict = self.headers[file] + if dict.info != None: + for data in ('Summary', 'Description', 'Author'): + try: + output.write(" <%s>%s\n" % ( + data.lower(), + escape(dict.info[data]), + data.lower())) + except: + if data != 'Author': + print("Header %s lacks a %s description" % (module, data)) + if 'Description' in dict.info: + desc = dict.info['Description'] + if desc.find("DEPRECATED") != -1: + output.write(" \n") + + ids = list(dict.macros.keys()) + ids.sort() + for id in ids: + # Macros are sometime used to masquerade other types. + if id in dict.functions: + continue + if id in dict.variables: + continue + if id in dict.typedefs: + continue + if id in dict.structs: + continue + if id in dict.enums: + continue + output.write(" \n" % (id)) + ids = list(dict.enums.keys()) + ids.sort() + for id in ids: + output.write(" \n" % (id)) + ids = list(dict.typedefs.keys()) + ids.sort() + for id in ids: + output.write(" \n" % (id)) + ids = list(dict.structs.keys()) + ids.sort() + for id in ids: + output.write(" \n" % (id)) + ids = list(dict.variables.keys()) + ids.sort() + for id in ids: + output.write(" \n" % (id)) + ids = list(dict.functions.keys()) + ids.sort() + for id in ids: + output.write(" \n" % (id)) + output.write(" \n") + + def serialize(self): + filename = "%s-api.xml" % self.name + print("Saving XML description %s" % (filename)) + output = open(filename, "w") + output.write('\n') + output.write("\n" % self.name) + output.write(" \n") + headers = list(self.headers.keys()) + headers.sort() + for file in headers: + self.serialize_exports(output, file) + output.write(" \n") + output.write(" \n") + macros = list(self.idx.macros.keys()) + macros.sort() + for macro in macros: + self.serialize_macro(output, macro) + enums = list(self.idx.enums.keys()) + enums.sort() + for enum in enums: + self.serialize_enum(output, enum) + typedefs = list(self.idx.typedefs.keys()) + typedefs.sort() + for typedef in typedefs: + self.serialize_typedef(output, typedef) + variables = list(self.idx.variables.keys()) + variables.sort() + for variable in variables: + self.serialize_variable(output, variable) + functions = list(self.idx.functions.keys()) + functions.sort() + for function in functions: + self.serialize_function(output, function) + output.write(" \n") + output.write("\n") + output.close() + + +def rebuild(): + builder = None + if glob.glob("parser.c") != [] : + print("Rebuilding API description for libxml2") + builder = docBuilder("libxml2", [".", "."], + ["tst.c"]) + elif glob.glob("../parser.c") != [] : + print("Rebuilding API description for libxml2") + builder = docBuilder("libxml2", ["..", "../include/libxml"], + ["tst.c"]) + elif glob.glob("../libxslt/transform.c") != [] : + print("Rebuilding API description for libxslt") + builder = docBuilder("libxslt", ["../libxslt"], + ["win32config.h", "libxslt.h", "tst.c"]) + else: + print("rebuild() failed, unable to guess the module") + return None + builder.scan() + builder.analyze() + builder.serialize() + if glob.glob("../libexslt/exslt.c") != [] : + extra = docBuilder("libexslt", ["../libexslt"], ["libexslt.h"]) + extra.scan() + extra.analyze() + extra.serialize() + return builder + +# +# for debugging the parser +# +def parse(filename): + parser = CParser(filename) + idx = parser.parse() + return idx + +if __name__ == "__main__": + if len(sys.argv) > 1: + debug = 1 + parse(sys.argv[1]) + else: + rebuild() diff --git a/local-test-libxml2-delta-02/afc-libxml2/doc/libxml2-api.xml b/local-test-libxml2-delta-02/afc-libxml2/doc/libxml2-api.xml new file mode 100644 index 0000000000000000000000000000000000000000..af18f826187e80efc4943fdc2418a14284f4be5e --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/doc/libxml2-api.xml @@ -0,0 +1,17527 @@ + + + + + interface for an HTML 4.0 non-verifying parser + this module implements an HTML 4.0 non-verifying parser with API compatible with the XML parser ones. It should be able to parse "real world" HTML, even if severely broken from a specification point of view. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + specific APIs to process HTML tree, especially serialization + this module implements a few function needed to process tree in an HTML specific way. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + SAX2 parser interface used to build the DOM tree + those are the default SAX2 interfaces used by the library when building DOM tree. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Provide Canonical XML and Exclusive XML Canonicalization + the c14n modules provides a "Canonical XML" implementation + Aleksey Sanin <aleksey@aleksey.com> + + + + + + + + + + + + interfaces to the Catalog handling system + the catalog module implements the support for XML Catalogs and SGML catalogs + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Unicode character range checking + this module exports interfaces for the character range validation APIs This file is automatically generated from the cvs source definition files using the genChRanges.py Python script + William Brack <wbrack@mmm.com.hk> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tree debugging APIs + Interfaces to a set of routines used for debugging the tree produced by the XML parser. + Daniel Veillard + + + + + + + + + + + + + + string dictionary + dictionary of reusable strings, just used to avoid allocation and freeing operations. + Daniel Veillard + + + + + + + + + + + + + + + + + + interface for the encoding conversion functions + interface for the encoding conversion functions needed for XML basic encoding and iconv() support. Related specs are rfc2044 (UTF-8 and UTF-16) F. Yergeau Alis Technologies [ISO-10646] UTF-8 and UTF-16 in Annexes [ISO-8859-1] ISO Latin-1 characters codes. [UNICODE] The Unicode Consortium, "The Unicode Standard -- Worldwide Character Encoding -- Version 1.0", Addison- Wesley, Volume 1, 1991, Volume 2, 1992. UTF-8 is described in Unicode Technical Report #4. [US-ASCII] Coded Character Set--7-bit American Standard Code for Information Interchange, ANSI X3.4-1986. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + interface for the XML entities handling + this module provides some of the entity API needed for the parser and applications. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + interface for all global variables of the library + Deprecated, don't use + + + + + + + Chained hash tables + This module implements the hash table support used in various places in the library. + Bjorn Reese <bjorn.reese@systematic.dk> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lists interfaces + this module implement the list support used in various place in the library. + Gary Pennington <Gary.Pennington@uk.sun.com> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + minimal HTTP implementation + minimal HTTP implementation allowing to fetch resources like external subset. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + the core parser module + Interfaces, constants and types related to the XML parser + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + internals routines and limits exported by the parser. + this module exports a number of internal parsing routines they are not really all intended for applications but can prove useful doing low level processing. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + pattern expression handling + allows to compile and test pattern expressions for nodes either in a tree or based on a parser state. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + implementation of the Relax-NG validation + implementation of the Relax-NG validation + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + internal interfaces for XML Schemas + internal interfaces for the XML Schemas handling and schema validity checking The Schemas development is a Work In Progress. Some of those interfaces are not guaranteed to be API or ABI stable ! + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + XML Schematron implementation + interface to the XML Schematron validity checking. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + interfaces for thread handling + set of generic threading related routines should work with pthreads, Windows native or TLS threads + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + interfaces for tree manipulation + this module describes the structures found in an tree resulting from an XML or HTML parsing, as well as the API provided for various processing on that tree + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + library of generic URI related routines + library of generic URI related routines Implements RFC 2396 + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + The DTD validation + API for the DTD handling and the validity checking + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + implementation of XInclude + API to handle XInclude processing, implements the World Wide Web Consortium Last Call Working Draft 10 November 2003 + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + unfinished XLink detection module + unfinished XLink detection module + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + interface for the I/O interfaces used by the parser + interface for the I/O interfaces used by the parser + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + API to build regexp automata + the API to build regexp automata + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + error handling + the API used to report errors + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + macros for marking symbols as exportable/importable. + macros for marking symbols as exportable/importable. + + + interface for the memory allocator + provides interfaces for the memory allocator, including debugging capabilities. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dynamic module loading + basic API for dynamic module loading, used by libexslt added in 2.6.17 + Joel W. Reed + + + + + + + + + + + + the XMLReader implementation + API of the XML streaming API based on C# interfaces. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + regular expressions handling + basic API for libxml regular expressions handling used for XML Schemas and validation. + Daniel Veillard + + + + + + + + + + + + + + + + + + + the XML document serializer + API to save document or subtree of document + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + incomplete XML Schemas structure implementation + interface to the XML Schemas handling and schema validity checking, it is incomplete right now. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + implementation of XML Schema Datatypes + module providing the XML Schema Datatypes implementation both definition and validity checking + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + set of routines to process strings + type and interfaces needed for the internal string handling of the library, especially UTF8 processing. + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Unicode character APIs + API for the Unicode character APIs This file is automatically generated from the UCS description files of the Unicode Character Database + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + compile-time version information + compile-time version information for the XML library + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text writing API for XML + text writing API for XML + Alfred Mickautsch <alfred@mickautsch.de> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + XML Path Language implementation + API for the XML Path Language implementation XML Path Language implementation XPath is a language for addressing parts of an XML document, designed to be used by both XSLT and XPointer + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + internal interfaces for XML Path Language implementation + internal interfaces for XML Path Language implementation used to build new modules on top of XPath like XPointer and XSLT + Daniel Veillard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + API to handle XML Pointers + API to handle XML Pointers Base implementation was made accordingly to W3C Candidate Recommendation 7 June 2000 + Daniel Veillard + + + + + + + Macro to cast a string to an xmlChar * when one know its safe. + + + default buffer size 4000. + + + Macro to try to cast the value on the top of the XPath stack to a boolean. + + + Macro to try to cast the value on the top of the XPath stack to a number. + + + Macro to try to cast the value on the top of the XPath stack to a string. + + + Macro to check that the number of args passed to an XPath function matches. + + + + Macro to return from the function if an XPath error was detected. + + + Macro to return 0 from the function if an XPath error was detected. + + + Macro to check that the value on top of the XPath stack is of a given type. + + + + Macro to check that the value on top of the XPath stack is of a given type. Return(0) in case of failure + + + + Macro. A comment in a HTML document is really implemented the same way as a comment in an XML document. + + + Macro. An entity reference in a HTML document is really implemented the same way as an entity reference in an XML document. + + + Macro. A processing instruction in a HTML document is really implemented the same way as a processing instruction in an XML document. + + + Macro. A preserved node in a HTML document is really implemented the same way as a CDATA section in an XML document. + + + Macro. A text node in a HTML document is really implemented the same way as a text node in an XML document. + + + Macro to check the following production in the XML spec: [3] S ::= (#x20 | #x9 | #xD | #xA)+ + + + + Behaviour same as IS_BLANK + + + + Macro used to express that the API use the new buffers for xmlParserInputBuffer and xmlOutputBuffer. The change was introduced in 2.9.0. + + + Whether the automata interfaces are compiled in + + + Whether the Canonicalization support is configured in + + + Whether the Catalog support is configured in + + + Whether Debugging module is configured in + + + the version string like "1.2.3" + + + Whether the HTML support is configured in + + + Whether the HTTP support is configured in + + + Whether iconv support is available + + + Whether icu support is available + + + Whether ISO-8859-* support is made available in case iconv is not + + + Whether the deprecated APIs are compiled in for compatibility + + + Whether the Lzma support is compiled in + + + Whether the module interfaces are compiled in + + + the string suffix used by dynamic modules (usually shared libraries) + + + Whether the serialization/saving support is configured in + + + Whether the xmlPattern node selection interface is configured in + + + Whether the push parsing interfaces are configured in + + + Whether the xmlReader parsing interface is configured in + + + Whether the regular expressions interfaces are compiled in + + + Whether the older SAX1 interface is configured in + + + Whether the Schemas validation interfaces are compiled in + + + Whether the Schematron validation interfaces are compiled in + + + Macro to check that the libxml version in use is compatible with the version the software has been compiled against + + + Whether the allocation hooks are per-thread + + + Whether the thread support is configured in + + + Always enabled since 2.14.0 + + + Whether the Unicode related interfaces are compiled in + + + Whether the DTD validation support is configured in + + + the version number: 1.2.3 value is 10203 + + + extra version information, used to show a git commit description + + + the version number string, 1.2.3 value is "10203" + + + Whether the xmlWriter saving interface is configured in + + + Whether XInclude is configured in + + + Whether XPath is configured in + + + Whether XPointer is configured in + + + Whether the Zlib support is compiled in + + + Macro defining "fallback" + + + Macro defining "href" + + + Macro defining "include" + + + Macro defining the Xinclude namespace: http://www.w3.org/2003/XInclude + + + Macro defining the draft Xinclude namespace: http://www.w3.org/2001/XInclude + + + Macro defining "parse" + + + Macro defining "encoding" + + + Macro defining "text" + + + Macro defining "xml" + + + Macro defining "xpointer" + + + Macro to do a casting from an object pointer to a function pointer without encountering a warning from gcc #define XML_CAST_FPTR(fptr) (*(void **)(&fptr)) This macro violated ISO C aliasing rules (gcc4 on s390 broke) so it is disabled now + + + + The namespace for the XML Catalogs elements. + + + The specific XML Catalog Processing Instruction name. + + + The default version of XML used: 1.0 + + + Macro to extract the content pointer of a node. + + + Macro to extract the line number of an element node. + + + + + + + + + + + A namespace declaration node. + + + Maximum size allowed by the parser for a dictionary by default This is not a limitation of the parser but a safety boundary feature, use XML_PARSE_HUGE option to override it. Introduced in 2.9.0 + + + Maximum size allowed when XML_PARSE_HUGE is set. + + + Maximum size allowed by the parser for ahead lookup This is an upper boundary enforced by the parser to avoid bad behaviour on "unfriendly' content Introduced in 2.9.0 + + + Identifiers can be longer, but this will be more costly at runtime. + + + Maximum size allowed for a markup identifier. This is not a limitation of the parser but a safety boundary feature, use XML_PARSE_HUGE option to override it. Note that with the use of parsing dictionaries overriding the limit may result in more runtime memory usage in face of "unfriendly' content Introduced in 2.9.0 + + + Maximum size allowed for a single text node when building a tree. This is not a limitation of the parser but a safety boundary feature, use XML_PARSE_HUGE option to override it. Introduced in 2.9.0 + + + Special constant found in SAX2 blocks initialized fields + + + Ignore validation non definition on attributes Obsolete, not used anymore. + + + Skip unknown attribute from validation Obsolete, not used anymore. + + + Apply strict validation rules on attributes Obsolete, not used anymore. + + + Used by wildcards. Validate if type found, don't worry if not found + + + Skip unknown attribute from validation + + + Used by wildcards. Apply strict validation rules + + + The attribute group has been defined. + + + Whether this attr. group contains attr. group references. + + + Marks the attr group as marked; used for circular checks. + + + The attr group was redefined. + + + The attribute wildcard has been built. + + + the attribute has a fixed value + + + allow elements in no namespace + + + this is set when the "type" and "ref" references have been resolved. + + + allow elements in no namespace + + + The attribute is optional. + + + Used by wildcards. The attribute is prohibited. + + + The attribute is required. + + + the schema has "extension" in the set of blockDefault. + + + the schema has "restriction" in the set of blockDefault. + + + the schema has "substitution" in the set of blockDefault. + + + the element is abstract + + + the "block" attribute is absent + + + disallowed substitutions are absent + + + disallowed substitutions: "restriction" + + + disallowed substitutions: "substitution" + + + a helper flag for the search of circular references. + + + the element has a default value + + + substitution group exclusions are absent + + + substitution group exclusions: "extension" + + + substitution group exclusions: "restriction" + + + the element has a fixed value + + + the element is global + + + this is set when the elem decl has been checked against all constraints + + + this is set when "type", "ref", "substitutionGroup" references have been resolved. + + + the element is nillable + + + allow elements in no namespace Obsolete, not used anymore. + + + the element is a reference to a type + + + the declaration is a substitution group head + + + the element is top level obsolete: use XML_SCHEMAS_ELEM_GLOBAL instead + + + collapse the types of the facet + + + preserve the type of the facet + + + replace the type of the facet + + + unknown facet handling + + + the schema has "extension" in the set of finalDefault. + + + the schema has "list" in the set of finalDefault. + + + the schema has "restriction" in the set of finalDefault. + + + the schema has "union" in the set of finalDefault. + + + the schema is currently including an other schema with no target namespace. + + + Reflects attributeFormDefault == qualified in an XML schema document. + + + Reflects elementFormDefault == qualified in an XML schema document. + + + the simple/complexType is abstract. + + + the complexType did not specify 'block' so use the default of the <schema> item. + + + the complexType has a 'block' of "extension". + + + the complexType has a 'block' of "restriction". + + + Marks the item as a builtin primitive. + + + the simple or complex type has a derivation method of "extension". + + + the simple or complex type has a derivation method of "restriction". + + + indicates if the facets need a computed value + + + the simpleType has a final of "default". + + + the complexType has a final of "extension". + + + the simpleType has a final of "list". + + + the simpleType/complexType has a final of "restriction". + + + the simpleType has a final of "union". + + + First stage of fixup was done. + + + the type is global + + + has facets + + + indicates that the type is invalid + + + indicates that the type was typefixed + + + Marks the item as marked; used for circular checks. + + + the element content type is mixed + + + indicates if the facets (pattern) need a normalized value + + + the complexType owns an attribute wildcard, i.e. it can be freed by the complexType + + + The type was redefined. + + + the simpleType has a variety of "absent". TODO: Actually not necessary :-/, since if none of the variety flags occur then it's automatically absent. + + + the simpleType has a variety of "union". + + + the simpleType has a variety of "list". + + + the simpleType has a variety of "union". + + + a whitespace-facet value of "collapse" + + + a whitespace-facet value of "preserve" + + + a whitespace-facet value of "replace" + + + If the wildcard is complete. + + + + + + + + + + + Both general and parameter entities need to be substituted. + + + If no entities need to be substituted. + + + Whether parameter entities need to be substituted. + + + Whether general entities need to be substituted. + + + This is the name for the special xml:id attribute + + + This is the namespace for the special xml: prefix predefined in the XML Namespace specification. + + + check namespaces at compilation + + + forbid variables in expression + + + Macro to raise an XPath error and return. + + + + Macro to raise an XPath error and return 0. + + + + Returns the default subelement for this element + + + + Checks whether an HTML element description may be a direct child of the specified element. Returns 1 if allowed; 0 otherwise. + + + + + Returns the attributes required for the specified element. + + + + Macro for compatibility naming layer with libxml1. Maps to "children." + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Automatically generated by genChRanges.py + + + + Macro for compatibility naming layer with libxml1. Maps to "children". + + + this macro maps to xmlTextWriterWriteDTD + + + This macro maps to xmlTextWriterWritePI + + + Check if an XPath error was raised. Returns true if an error has been raised, false otherwise. + + + + Empties a node-set. + + + + Get the context node of an XPath context. Returns the context node. + + + + Get the document of an XPath context. Returns the context document. + + + + Get the error code of an XPath context. Returns the context error. + + + + Implement a functionality similar to the DOM NodeList.length. Returns the number of nodes in the node-set. + + + + Checks whether @ns is empty or not. Returns %TRUE if @ns is an empty node-set. + + + + Implements a functionality similar to the DOM NodeList.item(). Returns the xmlNodePtr at the given @index in @ns or NULL if @index is out of range (0 to length-1) + + + + + Pushes the boolean @val on the context stack. + + + + + Pushes an empty node-set on the context stack. + + + + Pushes an empty string on the stack. + + + + Pushes user data on the context stack. + + + + + Pushes false on the context stack. + + + + Pushes the node-set @ns on the context stack. + + + + + Pushes the double @val on the context stack. + + + + + Pushes the string @str on the context stack. + + + + + Pushes true on the context stack. + + + + Raises an XPATH_INVALID_ARITY error. + + + + Raises an error. + + + + + Raises an XPATH_INVALID_TYPE error. + + + + Checks if the current value on the XPath stack is an external object. Returns true if the current object on the stack is an external object. + + + + Check if the current value on the XPath stack is a node set or an XSLT value tree. Returns true if the current object on the stack is a node-set. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A libxml automata description, It can be compiled into a regexp + + + + A state int the automata description, + + + + A pointer to a buffer structure, the actual structure internals are not public + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This is a basic byte in an UTF-8 encoded string. It's unsigned allowing to pinpoint case where char * are assigned to xmlChar * (possibly making serialization back impossible). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A handle to a dynamically loaded module + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A libxml progressive regular expression evaluation context + + + + A libxml regular expression, they can actually be far more complex thank the POSIX regex expressions. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pointer to an xmlReader context. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DEPRECATED: This handler is unused and will be removed from future versions. Default old SAX v1 handler for HTML, builds the DOM tree + + + DEPRECATED, always 0. + + + DEPRECATED: Don't use. Global setting, default allocation policy for buffers, default is XML_BUFFER_ALLOC_EXACT + + + DEPRECATED: Don't use. Global setting, default buffer size. Default value is BASE_BUFFER_SIZE + + + DEPRECATED: This handler is unused and will be removed from future versions. Default SAX version1 handler for XML, builds the DOM tree + + + DEPRECATED: Don't use The default SAX Locator { getPublicId, getSystemId, getLineNumber, getColumnNumber} + + + @mem: an already allocated block of memory The variable holding the libxml free() implementation + + + + + + + + + + @size: the size requested in bytes The variable holding the libxml malloc() implementation Returns a pointer to the newly allocated block or NULL in case of error + + + @size: the size requested in bytes The variable holding the libxml malloc() implementation for atomic data (i.e. blocks not containing pointers), useful when using a garbage collecting allocator. Returns a pointer to the newly allocated block or NULL in case of error + + + @str: a zero terminated string The variable holding the libxml strdup() implementation Returns the copy of the string or NULL in case of error + + + DEPRECATED, always 0. + + + arbitrary depth limit for the XML documents that we allow to process. This is not a limitation of the parser but a safety boundary feature. It can be disabled with the XML_PARSE_HUGE parser option. + + + Constant string describing the internal version of the library + + + @mem: an already allocated block of memory @size: the new size requested in bytes The variable holding the libxml realloc() implementation Returns a pointer to the newly reallocated block or NULL in case of error + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Take a block of UTF-8 chars in and try to convert it to an ASCII plus HTML entities block of chars out. + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Take a block of UTF-8 chars in and try to convert it to an ISO Latin 1 block of chars out. + + + + + + + + An attribute definition has been parsed. + + + + + + + + + + + Handle an attribute that has been read by the parser. The default handling is to convert the attribute into an DOM subtree and past it in a new xmlAttr element added to the element. + + + + + + + Called when a pcdata block has been parsed. + + + + + + + Receiving some chars from the parser. + + + + + + + A comment has been parsed. + + + + + + An element definition has been parsed. + + + + + + + + Called when the document end has been detected. + + + + + SAX2 callback when an element end has been detected by the parser. It provides the namespace information for the element. + + + + + + + + Called when the end of an element has been detected. + + + + + + An entity definition has been parsed. + + + + + + + + + + Display and format an error messages, callback. + + + + + + + Callback on external subset declaration. + + + + + + + + Display and format fatal error messages, callback. Note: so far fatalError() SAX callbacks are not used, error() get all the callbacks for errors. + + + + + + + Get an entity by name. + + + + + + Get a parameter entity by name. + + + + + + Does this document has an external subset? + + + + + Does this document has an internal subset. + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Don't use. + + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Internal function, don't use. The HTML DTD allows a tag to implicitly close other tags. The list is kept in htmlStartClose array. This function checks if the element or one of it's children would autoclose the given tag. + + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Use htmlNewParserCtxt and htmlCtxtReadFile. Create a parser context to read from a file. A non-NULL encoding overrides encoding declarations in the document. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Use htmlNewParserCtxt and htmlCtxtReadMemory. Create a parser context for an HTML in-memory document. The input buffer must not contain any terminating null bytes. + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_PUSH_ENABLED) + Create a parser context for using the HTML parser in push mode. + + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML document and return the resulting document tree. Available since 2.13.0. + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML in-memory document and build a tree. See htmlCtxtUseOptions for details. + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML from a file descriptor and build a tree. See htmlCtxtUseOptions for details. NOTE that the file descriptor will not be closed when the context is freed or reset. + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML file from the filesystem, the network or a user-defined resource loader. See htmlCtxtUseOptions for details. + + + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML document from I/O functions and source and build a tree. See htmlCtxtUseOptions for details. + + + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML in-memory document and build a tree. The input buffer must not contain any terminating null bytes. See htmlCtxtUseOptions for details. + + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Reset a parser context + + + + + defined(LIBXML_HTML_ENABLED) + Applies the options to the parser context. Unset options are cleared. Available since 2.14.0. With older versions, you can use htmlCtxtUseOptions. HTML_PARSE_RECOVER No effect as of 2.14.0. HTML_PARSE_HTML5 Make the tokenizer emit a SAX callback for each token. This results in unbalanced invocations of startElement and endElement. For now, this is only usable with custom SAX callbacks. HTML_PARSE_NODEFDTD Do not default to a doctype if none was found. HTML_PARSE_NOERROR Disable error and warning reports to the error handlers. Errors are still accessible with xmlCtxtGetLastError. HTML_PARSE_NOWARNING Disable warning reports. HTML_PARSE_PEDANTIC No effect. HTML_PARSE_NOBLANKS Remove some text nodes containing only whitespace from the result document. Which nodes are removed depends on a conservative heuristic. The reindenting feature of the serialization code relies on this option to be set when parsing. Use of this option is DISCOURAGED. HTML_PARSE_NONET No effect. HTML_PARSE_NOIMPLIED Do not add implied html, head or body elements. HTML_PARSE_COMPACT Store small strings directly in the node struct to save memory. HTML_PARSE_HUGE Relax some internal limits. Available since 2.14.0. Use XML_PARSE_HUGE works with older versions. Maximum size of text nodes, tags, comments, CDATA sections normal: 10M huge: 1B Maximum size of names, system literals, pubid literals normal: 50K huge: 10M Maximum nesting depth of elements normal: 256 huge: 2048 HTML_PARSE_IGNORE_ENC Ignore the encoding in the HTML declaration. This option is mostly unneeded these days. The only effect is to enforce UTF-8 decoding of ASCII-like data. HTML_PARSE_BIG_LINES Enable reporting of line numbers larger than 65535. Available since 2.14.0. + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Use htmlCtxtSetOptions. Applies the options to the parser context. The following options are never cleared and can only be enabled: HTML_PARSE_NODEFDTD HTML_PARSE_NOERROR HTML_PARSE_NOWARNING HTML_PARSE_NOIMPLIED HTML_PARSE_COMPACT HTML_PARSE_HUGE HTML_PARSE_IGNORE_ENC HTML_PARSE_BIG_LINES + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: This function is a no-op. Call xmlInitParser to initialize the library. + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document. + + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document. Formatting return/spaces are added. + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document to an open FILE. + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document in memory and return the xmlChar * and it's size. It's up to the caller to free the memory. + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document in memory and return the xmlChar * and it's size. It's up to the caller to free the memory. + + + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Don't use. + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Don't use. + + + + + + defined(LIBXML_HTML_ENABLED) + Take a block of UTF-8 chars in and try to convert it to an ASCII plus HTML entities block of chars out. + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Lookup the given entity in EntitiesTable TODO: the linear scan is really ugly, an hash table is really needed. + + + + + defined(LIBXML_HTML_ENABLED) + Lookup the given entity in EntitiesTable TODO: the linear scan is really ugly, an hash table is really needed. + + + + + defined(LIBXML_HTML_ENABLED) + Free all the memory used by a parser context. However the parsed document in ctxt->myDoc is not freed. + + + + + defined(LIBXML_HTML_ENABLED) + Encoding definition lookup in the Meta tags + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Use HTML_PARSE_NOIMPLIED Set and return the previous value for handling HTML omitted tags. + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: This is a no-op. + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Internal function, don't use. The HTML DTD allows a tag to implicitly close other tags. The list is kept in htmlStartClose array. This function checks if a tag is autoclosed by one of it's child + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Internal function, don't use. Determine if a given attribute is a boolean attribute. + + + + + defined(LIBXML_HTML_ENABLED) + Check if an attribute is of content type Script + + + + + defined(LIBXML_HTML_ENABLED) + Creates a new HTML document + + + + + + defined(LIBXML_HTML_ENABLED) + Creates a new HTML document without a DTD node if @URI and @ExternalID are NULL + + + + + + defined(LIBXML_HTML_ENABLED) + Allocate and initialize a new HTML parser context. This can be used to parse HTML documents into DOM trees with functions like xmlCtxtReadFile or xmlCtxtReadMemory. See htmlCtxtUseOptions for parser options. See xmlCtxtSetErrorHandler for advanced error handling. See htmlNewSAXParserCtxt for custom SAX parsers. + + + + defined(LIBXML_HTML_ENABLED) + Allocate and initialize a new HTML SAX parser context. If userData is NULL, the parser context will be passed as user data. Available since 2.11.0. If you want support older versions, it's best to invoke htmlNewParserCtxt and set ctxt->sax with struct assignment. Also see htmlNewParserCtxt. + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML node, recursive behaviour,children are printed too, and formatting returns are added. + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML node, recursive behaviour,children are printed too, and formatting returns are added. + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML node, recursive behaviour,children are printed too. TODO: if encoding == NULL try to save in the doc encoding + + + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML node, recursive behaviour,children are printed too. + + + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML node, recursive behaviour,children are printed too, and formatting returns/spaces are added. + + + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Don't use. + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Internal function, don't use. + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_PUSH_ENABLED) + Parse a chunk of memory in push parser mode. Assumes that the parser context was initialized with htmlCreatePushParserCtxt. The last chunk, which will often be empty, must be marked with the @terminate flag. With the default SAX callbacks, the resulting document will be available in ctxt->myDoc. This pointer will not be freed by the library. If the document isn't well-formed, ctxt->myDoc is set to NULL. + + + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Use htmlReadDoc. Parse an HTML in-memory document and build a tree. This function uses deprecated global parser options. + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML document and invoke the SAX handlers. This is useful if you're only interested in custom SAX callbacks. If you want a document tree, use htmlCtxtParseDocument. + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Internal function, don't use. parse an HTML element, this is highly recursive this is kept for compatibility with previous code versions [39] element ::= EmptyElemTag | STag content ETag [41] Attribute ::= Name Eq AttValue + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Internal function, don't use. + + + + + + defined(LIBXML_HTML_ENABLED) + Parse an HTML file and build a tree. + + + + + + defined(LIBXML_HTML_ENABLED) + Convenience function to parse an HTML document from a zero-terminated string. See htmlCtxtReadDoc for details. + + + + + + + + defined(LIBXML_HTML_ENABLED) + Convenience function to parse an HTML document from a file descriptor. NOTE that the file descriptor will not be closed when the context is freed or reset. See htmlCtxtReadFd for details. + + + + + + + + defined(LIBXML_HTML_ENABLED) + Convenience function to parse an HTML file from the filesystem, the network or a global user-defined resource loader. See htmlCtxtReadFile for details. + + + + + + + defined(LIBXML_HTML_ENABLED) + Convenience function to parse an HTML document from I/O functions and context. See htmlCtxtReadIO for details. + + + + + + + + + + defined(LIBXML_HTML_ENABLED) + Convenience function to parse an HTML document from memory. The input buffer must not contain any terminating null bytes. See htmlCtxtReadMemory for details. + + + + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Use htmlNewSAXParserCtxt and htmlCtxtReadDoc. Parse an HTML in-memory document. If sax is not NULL, use the SAX callbacks to handle parse events. If sax is NULL, fallback to the default DOM behavior and return a tree. + + + + + + + + defined(LIBXML_HTML_ENABLED) + DEPRECATED: Use htmlNewSAXParserCtxt and htmlCtxtReadFile. parse an HTML file and build a tree. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. It use the given SAX function block to handle the parsing callback. If sax is NULL, fallback to the default DOM tree building routines. + + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document to a file. If @filename is "-" the stdout file is used. + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document to a file using a given encoding and formatting returns/spaces are added. + + + + + + + defined(LIBXML_HTML_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump an HTML document to a file using a given encoding. + + + + + + + + defined(LIBXML_HTML_ENABLED) + Sets the current encoding in the Meta tags NOTE: this will not change the document content encoding, just the META flag associated. + + + + + + defined(LIBXML_HTML_ENABLED) + Lookup the HTML tag in the ElementTable + + + + + Receiving some ignorable whitespaces from the parser. UNUSED: by default the DOM building will use characters. + + + + + + + DEPRECATED: Use xmlSetGenericErrorFunc. Set or reset (if NULL) the default handler for generic errors to the builtin error function. + + + + + Pops the top parser input from the input stack + + + + + Pushes a new parser input on top of the input stack + + + + + + Callback on internal subset declaration. + + + + + + + + Is this document tagged standalone? + + + + + Take a block of ISO Latin 1 chars in and try to convert it to an UTF-8 block of chars out. + + + + + + + + DEPRECATED: Internal function, do not use. Pops the top element name from the name stack + + + + + DEPRECATED: Internal function, do not use. Pushes a new element name on top of the name stack + + + + + + DEPRECATED: Internal function, do not use. Pops the top element node from the node stack + + + + + DEPRECATED: Internal function, do not use. Pushes a new element node on top of the node stack + + + + + + What to do when a notation declaration has been parsed. + + + + + + + + A processing instruction has been parsed. + + + + + + + Called when an entity reference is detected. + + + + + + Callback: The entity loader, to control the loading of external entities, the application can either: - override this resolveEntity() callback in the SAX block - or better use the xmlSetExternalEntityLoader() function to set up it's own entity resolution routine + + + + + + + Receive the document locator at startup, actually xmlDefaultSAXLocator. Everything is available on the context, so this is useless in our case. + + + + + + Called when the document start being processed. + + + + + SAX2 callback when an element start has been detected by the parser. It provides the namespace information for the element, as well as the new namespace declarations on the element. + + + + + + + + + + + + + Called when an opening tag has been processed. + + + + + + + What to do when an unparsed entity declaration is parsed. + + + + + + + + + defined(LIBXML_XPATH_ENABLED) + Pops the top XPath object from the value stack + + + + + defined(LIBXML_XPATH_ENABLED) + Pushes a new XPath object on top of the value stack. If value is NULL, a memory error is recorded in the parser context. + + + + + + Display and format a warning messages, callback. + + + + + + + defined(LIBXML_XPTR_ENABLED) + This is the prototype for a extended link detection callback. + + + + + + + + + + + + + + + + + defined(LIBXML_XPTR_ENABLED) + This is the prototype for a extended link set detection callback. + + + + + + + + + + + + defined(LIBXML_XPTR_ENABLED) + DEPRECATED: Don't use. Get the default xlink detection routine + + + + defined(LIBXML_XPTR_ENABLED) + DEPRECATED: Don't use. Get the default xlink handler. + + + + defined(LIBXML_XPTR_ENABLED) + Check whether the given node carries the attributes needed to be a link element (or is one of the linking elements issued from the (X)HTML DtDs). This routine don't try to do full checking of the link validity but tries to detect and return the appropriate link type. + + + + + + defined(LIBXML_XPTR_ENABLED) + This is the prototype for the link detection routine. It calls the default link detection callbacks upon link detection. + + + + + + defined(LIBXML_XPTR_ENABLED) + DEPRECATED: Don't use. Set the default xlink detection routine + + + + + defined(LIBXML_XPTR_ENABLED) + DEPRECATED: Don't use. Set the default xlink handlers + + + + + defined(LIBXML_XPTR_ENABLED) + This is the prototype for a simple link detection callback. + + + + + + + + + defined(LIBXML_CATALOG_ENABLED) + Add an entry in the catalog, it may overwrite existing but different entries. + + + + + + + + defined(LIBXML_CATALOG_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump the given catalog to the given file. + + + + + + defined(LIBXML_CATALOG_ENABLED) + Remove an entry from the catalog + + + + + + defined(LIBXML_CATALOG_ENABLED) + Do a complete resolution lookup of an External Identifier + + + + + + + defined(LIBXML_CATALOG_ENABLED) + Try to lookup the catalog local reference associated to a public ID in that catalog + + + + + + defined(LIBXML_CATALOG_ENABLED) + Try to lookup the catalog resource for a system ID + + + + + + defined(LIBXML_CATALOG_ENABLED) + Do a complete resolution lookup of an URI + + + + + + DEPRECATED: Internal function, don't use. Register a new attribute declaration Note that @tree becomes the ownership of the DTD + + + + + + + + + + + + + Unlink @cur and append it to the children of @parent. If @cur is a text node, it may be merged with an adjacent text node and freed. In this case the text node containing the merged content is returned. If @cur is an attribute node, it is appended to the attributes of @parent. If the attribute list contains an attribute with a name matching @elem, the old attribute is destroyed. General notes: Move operations like xmlAddChild can cause element or attribute nodes to reference namespaces that aren't declared in one of their ancestors. This can lead to use-after-free errors if the elements containing the declarations are freed later, especially when moving nodes from one document to another. You should consider calling xmlReconciliateNs after a move operation to normalize namespaces. Another option is to call xmlDOMWrapAdoptNode with the target parent before moving a node. For the most part, move operations don't check whether the resulting tree structure is valid. Users must make sure that parent nodes only receive children of valid types. Inserted child nodes must never be an ancestor of the parent node to avoid cycles in the tree structure. In general, only document, document fragments, elements and attributes should be used as parent nodes. When moving a node between documents and a memory allocation fails, the node's content will be corrupted and it will be unlinked. In this case, the node must be freed manually. Moving DTDs between documents isn't supported. + + + + + + Append a node list to another node. See xmlAddChild. + + + + + + Register a new entity for this document. + + + + + + + + + + Register a new entity for this document DTD external subset. + + + + + + + + + + DEPRECATED: Internal function, don't use. Register a new element declaration + + + + + + + + + DEPRECATED: This function modifies global state and is not thread-safe. Registers an alias @alias for an encoding named @name. Existing alias will be overwritten. + + + + + + Register a new entity for this document. Available since 2.13.0. + + + + + + + + + + + + Register a new id declaration + + + + + + + + Register a new id declaration Available since 2.13.0. + + + + + + Unlinks @cur and inserts it as next sibling after @prev. Unlike xmlAddChild this function does not merge text nodes. If @cur is an attribute node, it is inserted after attribute @prev. If the attribute list contains an attribute with a name matching @cur, the old attribute is destroyed. See the notes in xmlAddChild. + + + + + + DEPRECATED: Internal function, don't use. Register a new notation declaration + + + + + + + + + Unlinks @cur and inserts it as previous sibling before @next. Unlike xmlAddChild this function does not merge text nodes. If @cur is an attribute node, it is inserted before attribute @next. If the attribute list contains an attribute with a name matching @cur, the old attribute is destroyed. See the notes in xmlAddChild. + + + + + + DEPRECATED, do not use. This function will be removed from the public API. Register a new ref declaration + + + + + + + + Unlinks @cur and inserts it as last sibling of @node. If @cur is a text node, it may be merged with an adjacent text node and freed. In this case the text node containing the merged content is returned. If @cur is an attribute node, it is appended to the attribute list containing @node. If the attribute list contains an attribute with a name matching @cur, the old attribute is destroyed. See the notes in xmlAddChild. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a buffered parser output Consumes @encoder even in error case. + + + + + DEPRECATED: Use xmlNewInputFrom*. Create a buffered parser input for progressive parsing. The encoding argument is deprecated and should be set to XML_CHAR_ENCODING_NONE. The encoding can be changed with xmlSwitchEncoding or xmlSwitchEncodingName later on. + + + + + defined(LIBXML_OUTPUT_ENABLED) + Serialize text attribute values to an xml simple buffer + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Compile the automata into a Reg Exp ready for being executed. The automata should be free after this point. + + + + + defined(LIBXML_REGEXP_ENABLED) + Initial state lookup + + + + + defined(LIBXML_REGEXP_ENABLED) + Checks if an automata is determinist. + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a an ALL transition from the @from state to the target state. That transition is an epsilon transition allowed only when all transitions from the @from node have been activated. + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a transition from the @from state to the target state activated by a succession of input of value @token and whose number is between @min and @max + + + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a transition from the @from state to the target state activated by a succession of input of value @token and @token2 and whose number is between @min and @max + + + + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds an epsilon transition from the @from state to the target state which will increment the counter provided + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Create a new counter + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds an epsilon transition from the @from state to the target state which will be allowed only if the counter is within the right range. + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds an epsilon transition from the @from state to the target state + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a transition from the @from state to the target state activated by any value except (@token,@token2) Note that if @token2 is not NULL, then (X, NULL) won't match to follow # the semantic of XSD ##other + + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a transition from the @from state to the target state activated by a succession of input of value @token and whose number is between @min and @max, moreover that transition can only be crossed once. + + + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a transition from the @from state to the target state activated by a succession of input of value @token and @token2 and whose number is between @min and @max, moreover that transition can only be crossed once. + + + + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Create a new disconnected state in the automata + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a transition from the @from state to the target state activated by the value of @token + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + If @to is NULL, this creates first a new target state in the automata and then adds a transition from the @from state to the target state activated by the value of @token + + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Makes that state a final state + + + + + + Function to extract the content of a buffer + + + + + Function to extract the end of the content of a buffer + + + + + Read the value of a node @cur, this can be either the text carried directly by this node if it's a TEXT node or the aggregate string of the values carried by this node child's (TEXT and ENTITY_REF). Entity references are substituted. Fills up the buffer @buf with this value + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML node, recursive behaviour,children are printed too. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called + + + + + + + + + DEPRECATED: Don't use. Remove the beginning of an XML buffer. NOTE that this routine behaviour differs from xmlBufferShrink() as it will return 0 on error instead of -1 due to size_t being used as the return type. + + + + + + Function to get the length of a buffer + + + + + Add a string range to an XML buffer. if len == -1, the length of str is recomputed. + + + + + + + Add a string range to the beginning of an XML buffer. if len == -1, the length of @str is recomputed. + + + + + + + Append a zero terminated C string to an XML buffer. + + + + + + Append a zero terminated string to an XML buffer. + + + + + + Function to extract the content of a buffer + + + + + routine to create an XML buffer. + + + + routine to create an XML buffer. + + + + + + + + + + + Remove the string contained in a buffer and gie it back to the caller. The buffer is reset to an empty content. This doesn't work with immutable buffers as they can't be reset. + + + + + Dumps an XML buffer to a FILE *. + + + + + + empty a buffer. + + + + + Frees an XML buffer. It frees both the content and the structure which encapsulate it. + + + + + DEPRECATED: Don't use. Grow the available space of an XML buffer. + + + + + + Function to get the length of a buffer + + + + + DEPRECATED: Don't use. Resize a buffer to accommodate minimum size of @size. + + + + + + Sets the allocation scheme for this buffer. For libxml2 before 2.14, it is recommended to set this to XML_BUFFER_ALLOC_DOUBLE_IT. Has no effect on 2.14 or later. + + + + + + DEPRECATED: Don't use. Remove the beginning of an XML buffer. + + + + + + routine which manages and grows an output buffer. This one adds xmlChars at the end of the buffer. + + + + + + routine which manage and grows an output buffer. This one add C chars at the end of the array. + + + + + + routine which manage and grows an output buffer. This one writes a quoted or double quoted #xmlChar string, checking first if it holds quote or double-quotes internally + + + + + + Builds the QName @prefix:@ncname in @memory if there is enough space and prefix is not NULL nor empty, otherwise allocate a new string. If prefix is NULL or empty it returns ncname. + + + + + + + + See xmlBuildRelativeURISafe. + + + + + + Expresses the URI of the reference in terms relative to the base. Some examples of this operation include: base = "http://site1.com/docs/book1.html" URI input URI returned http://site1.com/docs/pic1.gif pic1.gif http://site2.com/docs/pic1.gif http://site2.com/docs/pic1.gif base = "docs/book1.html" URI input URI returned docs/pic1.gif pic1.gif docs/img/pic1.gif img/pic1.gif img/pic1.gif ../img/pic1.gif http://site1.com/docs/pic1.gif http://site1.com/docs/pic1.gif Available since 2.13.0. + + + + + + + Computes he final URI of the reference done by checking that the given URI is valid, and building the final URI using the base URI. This is processed according to section 5.2 of the RFC 2396 5.2. Resolving Relative References to Absolute Form + + + + + + Computes he final URI of the reference done by checking that the given URI is valid, and building the final URI using the base URI. This is processed according to section 5.2 of the RFC 2396 5.2. Resolving Relative References to Absolute Form Available since 2.13.0. + + + + + + + DEPRECATED: Don't use. This function provides the current index of the parser relative to the start of the current entity. This function is computed in bytes from the beginning starting at zero and finishing at the size in byte of the file if parsing a file. The function is of constant cost if the input is UTF-8 but can be costly if run on non-UTF-8 input. + + + + + defined(LIBXML_C14N_ENABLED) + Dumps the canonized image of given XML document into memory. For details see "Canonical XML" (http://www.w3.org/TR/xml-c14n) or "Exclusive XML Canonicalization" (http://www.w3.org/TR/xml-exc-c14n) + + + + + + + + + + defined(LIBXML_C14N_ENABLED) + Dumps the canonized image of given XML document into the file. For details see "Canonical XML" (http://www.w3.org/TR/xml-c14n) or "Exclusive XML Canonicalization" (http://www.w3.org/TR/xml-exc-c14n) + + + + + + + + + + + defined(LIBXML_C14N_ENABLED) + Dumps the canonized image of given XML document into the provided buffer. For details see "Canonical XML" (http://www.w3.org/TR/xml-c14n) or "Exclusive XML Canonicalization" (http://www.w3.org/TR/xml-exc-c14n) + + + + + + + + + + defined(LIBXML_C14N_ENABLED) + Dumps the canonized image of given XML document into the provided buffer. For details see "Canonical XML" (http://www.w3.org/TR/xml-c14n) or "Exclusive XML Canonicalization" (http://www.w3.org/TR/xml-exc-c14n) + + + + + + + + + + + defined(LIBXML_C14N_ENABLED) + Signature for a C14N callback on visible nodes + + + + + + + Prepares a path. If the path contains the substring "://", it is considered a Legacy Extended IRI. Characters which aren't allowed in URIs are escaped. Otherwise, the path is considered a filesystem path which is copied without modification. The caller is responsible for freeing the memory occupied by the returned string. If there is insufficient memory available, or the argument is NULL, the function returns NULL. + + + + + defined(LIBXML_CATALOG_ENABLED) + Add an entry in the catalog, it may overwrite existing but different entries. If called before any other catalog routine, allows to override the default shared catalog put in place by xmlInitializeCatalog(); + + + + + + + defined(LIBXML_CATALOG_ENABLED) + Add the new entry to the catalog list + + + + + + defined(LIBXML_CATALOG_ENABLED) + Free up all the memory associated with catalogs + + + + defined(LIBXML_CATALOG_ENABLED) + Convert all the SGML catalog entries as XML ones + + + + defined(LIBXML_CATALOG_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump all the global catalog content to the given file. + + + + + defined(LIBXML_CATALOG_ENABLED) + Free up the memory associated to the catalog list + + + + + defined(LIBXML_CATALOG_ENABLED) + DEPRECATED: Use XML_PARSE_NO_SYS_CATALOG and XML_PARSE_NO_CATALOG_PI. Used to get the user preference w.r.t. to what catalogs should be accepted + + + + defined(LIBXML_CATALOG_ENABLED) + Try to lookup the catalog reference associated to a public ID DEPRECATED, use xmlCatalogResolvePublic() + + + + + defined(LIBXML_CATALOG_ENABLED) + Try to lookup the catalog reference associated to a system ID DEPRECATED, use xmlCatalogResolveSystem() + + + + + defined(LIBXML_CATALOG_ENABLED) + Check is a catalog is empty + + + + + defined(LIBXML_CATALOG_ENABLED) + Do a complete resolution lookup of an External Identifier using a document's private catalog list + + + + + + + defined(LIBXML_CATALOG_ENABLED) + Do a complete resolution lookup of an URI using a document's private catalog list + + + + + + defined(LIBXML_CATALOG_ENABLED) + Remove an entry from the catalog + + + + + defined(LIBXML_CATALOG_ENABLED) + Do a complete resolution lookup of an External Identifier + + + + + + defined(LIBXML_CATALOG_ENABLED) + Try to lookup the catalog reference associated to a public ID + + + + + defined(LIBXML_CATALOG_ENABLED) + Try to lookup the catalog resource for a system ID + + + + + defined(LIBXML_CATALOG_ENABLED) + Do a complete resolution lookup of an URI + + + + + defined(LIBXML_CATALOG_ENABLED) + Used to set the debug level for catalog operation, 0 disable debugging, 1 enable it + + + + + defined(LIBXML_CATALOG_ENABLED) + DEPRECATED: This setting is global and not thread-safe. Allows to set the preference between public and system for deletion in XML Catalog resolution. C.f. section 4.1.1 of the spec Values accepted are XML_CATA_PREFER_PUBLIC or XML_CATA_PREFER_SYSTEM + + + + + defined(LIBXML_CATALOG_ENABLED) + DEPRECATED: Use XML_PARSE_NO_SYS_CATALOG and XML_PARSE_NO_CATALOG_PI. Used to set the user preference w.r.t. to what catalogs should be accepted + + + + + Releases an xmlCharEncodingHandler. Must be called after a handler is no longer in use. + + + + + Free a conversion context. + + + + + Convert between character encodings. On success, the value of @inlen after return is the number of bytes consumed and @outlen is the number of bytes produced. + + + + + + + + + vctxt: user data name: encoding name conv: pointer to xmlCharEncConverter struct If this function returns XML_ERR_OK, it must fill the @conv struct with a conversion function, and optional destructor and optional input and output conversion contexts. + + + + + + + DEPERECATED: Don't use. + + + + + + + Generic front-end for the encoding handler input function + + + + + + + Generic front-end for the encoding handler output function a first call with @in == NULL has to be made firs to initiate the output in case of non-stateless encoding needing to initiate their state or the output (like the BOM in UTF16). In case of UTF8 sequence conversion errors for the given encoder, the content will be automatically remapped to a CharRef sequence. + + + + + + + Convert characters to UTF-8. On success, the value of @inlen after return is the number of bytes consumed and @outlen is the number of bytes produced. + + + + + + + + Convert characters from UTF-8. On success, the value of @inlen after return is the number of bytes consumed and @outlen is the number of bytes produced. + + + + + + + + Does a binary search of the range table to determine if char is valid + + + + + + a strdup for char's to xmlChar's + + + + + a strndup for char's to xmlChar's + + + + + + DEPRECATED: Internal function, don't use. if stat is not available on the target machine, + + + + + DEPRECATED: Internal function, don't use. Check an input in case it was created from an HTTP stream, in that case it will handle encoding and update of the base URL in case of redirection. It also checks for HTTP errors in which case the input is cleanly freed up and an appropriate error is raised in context + + + + + + DEPRECATED: Internal function, do not use. Checks that the value conforms to the LanguageID production: NOTE: this is somewhat deprecated, those productions were removed from the XML Second edition. [33] LanguageID ::= Langcode ('-' Subcode)* [34] Langcode ::= ISO639Code | IanaCode | UserCode [35] ISO639Code ::= ([a-z] | [A-Z]) ([a-z] | [A-Z]) [36] IanaCode ::= ('i' | 'I') '-' ([a-z] | [A-Z])+ [37] UserCode ::= ('x' | 'X') '-' ([a-z] | [A-Z])+ [38] Subcode ::= ([a-z] | [A-Z])+ The current REC reference the successors of RFC 1766, currently 5646 http://www.rfc-editor.org/rfc/rfc5646.txt langtag = language ["-" script] ["-" region] *("-" variant) *("-" extension) ["-" privateuse] language = 2*3ALPHA ; shortest ISO 639 code ["-" extlang] ; sometimes followed by ; extended language subtags / 4ALPHA ; or reserved for future use / 5*8ALPHA ; or registered language subtag extlang = 3ALPHA ; selected ISO 639 codes *2("-" 3ALPHA) ; permanently reserved script = 4ALPHA ; ISO 15924 code region = 2ALPHA ; ISO 3166-1 code / 3DIGIT ; UN M.49 code variant = 5*8alphanum ; registered variants / (DIGIT 3alphanum) extension = singleton 1*("-" (2*8alphanum)) ; Single alphanumerics ; "x" reserved for private use singleton = DIGIT ; 0 - 9 / %x41-57 ; A - W / %x59-5A ; Y - Z / %x61-77 ; a - w / %x79-7A ; y - z it sounds right to still allow Irregular i-xxx IANA and user codes too The parser below doesn't try to cope with extension or privateuse that could be added but that's not interoperable anyway + + + + + Check whether thread-local storage could be allocated. In cross-platform code running in multithreaded environments, this function should be called once in each thread before calling other library functions to make sure that thread-local storage was allocated properly. + + + + Checks @utf for being valid UTF-8. @utf is assumed to be null-terminated. This function is not super-strict, as it will allow longer UTF-8 sequences than necessary. Note that Java is capable of producing these sequences if provoked. Also note, this routine checks for the 4-byte maximum size, but does not check for 0x10ffff maximum value. + + + + + Count the number of child nodes which are elements. Note that entity references are not expanded. + + + + + DEPRECATED: This function will be made private. Call xmlCleanupParser to free global state but see the warnings there. xmlCleanupParser should be only called once at program exit. In most cases, you don't have call cleanup functions at all. Cleanup the memory allocated for the char encoding support, it unregisters all the encoding handlers and the aliases. + + + + DEPRECATED: This function modifies global state and is not thread-safe. Unregisters all aliases + + + + DEPRECATED: This function is a no-op. Call xmlCleanupParser to free global state but see the warnings there. xmlCleanupParser should be only called once at program exit. In most cases, you don't have call cleanup functions at all. + + + + clears the entire input callback table. this includes the compiled-in I/O. + + + + DEPRECATED: This function is a no-op. Call xmlCleanupParser to free global state but see the warnings there. xmlCleanupParser should be only called once at program exit. In most cases, you don't have call cleanup functions at all. + + + + defined(LIBXML_OUTPUT_ENABLED) + clears the entire output callback table. this includes the compiled-in I/O callbacks. + + + + This function is named somewhat misleadingly. It does not clean up parser state but global memory allocated by the library itself. Since 2.9.11, cleanup is performed automatically if a shared or dynamic libxml2 library is unloaded. This function should only be used to avoid false positives from memory leak checkers in static builds. WARNING: xmlCleanupParser assumes that all other threads that called libxml2 functions have terminated. No library calls must be made after calling this function. In general, THIS FUNCTION SHOULD ONLY BE CALLED RIGHT BEFORE THE WHOLE PROCESS EXITS. + + + + DEPRECATED: This function is a no-op. Call xmlCleanupParser to free global state but see the warnings there. xmlCleanupParser should be only called once at program exit. In most cases, you don't have call cleanup functions at all. + + + + DEPRECATED: Don't use. -- Clear (release memory and reinitialize) node info sequence + + + + + Clear (release owned resources) and reinitialize a parser context + + + + + defined(LIBXML_CATALOG_ENABLED) + Convert all the SGML catalog entries as XML ones + + + + + DEPRECATED: Internal function, don't use. Build a copy of an attribute table. + + + + + DEPRECATED: Don't use. append the char value in the array + + + + + + + DEPRECATED: Internal function, don't use. append the char value in the array + + + + + + Copy a document. If recursive, the content tree will be copied too as well as DTD, namespaces and entities. + + + + + + DEPRECATED: Internal function, don't use. Build a copy of an element content description. + + + + + + Copy a DTD. + + + + + DEPRECATED: Internal function, don't use. Build a copy of an element content description. Deprecated, use xmlCopyDocElementContent instead + + + + + DEPRECATED: Internal function, don't use. Build a copy of an element table. + + + + + Build a copy of an entity table. + + + + + DEPRECATED: Internal function, don't use. Copy an enumeration attribute node (recursive). + + + + + Save the original error to the new place. + + + + + + Copy a namespace. + + + + + Copy a namespace list. + + + + + Copy a node. Use of this function is DISCOURAGED in favor of xmlDocCopyNode. + + + + + + Copy a node list and all children. Use of this function is DISCOURAGED in favor of xmlDocCopyNodeList. + + + + + DEPRECATED: Internal function, don't use. Build a copy of a notation table. + + + + + Create a copy of the attribute. This function sets the parent pointer of the copy to @target but doesn't set the attribute on the target element. Users should consider to set the attribute by calling xmlAddChild afterwards or reset the parent pointer to NULL. + + + + + + Create a copy of an attribute list. This function sets the parent pointers of the copied attributes to @target but doesn't set the attributes on the target element. + + + + + + Find or create a handler matching the encoding. The following converters are looked up in order: - Built-in handler (UTF-8, UTF-16, ISO-8859-1, ASCII) - Custom implementation if provided - User-registered global handler (deprecated) - iconv if enabled - ICU if enabled The handler must be closed with xmlCharEncCloseFunc. If the encoding is UTF-8, a NULL handler and no error code will be returned. Available since 2.14.0. + + + + + + + + + Creates a parser context for an XML in-memory document. + + + + + create and initialize an empty entities hash table. This really doesn't make sense and should be deprecated + + + + DEPRECATED: Don't use. Create a parser context for an external entity Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. + + + + + + + DEPRECATED: Internal function, don't use. create and initialize an enumeration attribute node. + + + + + DEPRECATED: Use xmlNewParserCtxt and xmlCtxtReadFile. Create a parser context for a file content. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. + + + + + Create a parser context for using the XML parser with an existing I/O stream + + + + + + + + + + Create a DTD node. If a document is provided and it already has an internal subset, the existing DTD object is returned without creating a new object. If the document has no internal subset, it will be set to the created DTD. + + + + + + + + Create a parser context for an XML in-memory document. The input buffer must not contain a terminating null byte. + + + + + + defined(LIBXML_PUSH_ENABLED) + Create a parser context for using the XML parser in push mode. See xmlParseChunk. Passing an initial chunk is useless and deprecated. @filename is used as base URI to fetch external entities and for error reports. + + + + + + + + + Simply creates an empty xmlURI + + + + DEPRECATED: Use xmlNewParserCtxt and xmlCtxtReadFile. Create a parser context for a file or URL content. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time and for file accesses + + + + + + Handle an out-of-memory error. Available since 2.13.0. + + + + + Available since 2.14.0. + + + + + Available since 2.14.0. + + + + + Available since 2.14.0. + + + + + Get the last parsing error registered. + + + + + Get the current options of the parser context. Available since 2.14.0. + + + + + Available since 2.14.0. + + + + + Available since 2.14.0. + + + + + Get well-formedness and validation status after parsing. Also reports catastrophic errors which are not related to parsing like out-of-memory, I/O or other errors. Available since 2.14.0. + + + + + Available since 2.14.0. + + + + + Parse a well-balanced chunk of XML matching the 'content' production. Namespaces in scope of @node and entities of @node's document are recognized. When validating, the DTD of @node's document is used. Always consumes @input even in error case. Available since 2.14.0. + + + + + + + + Parse an XML document and return the resulting document tree. Takes ownership of the input object. Available since 2.13.0. + + + + + + defined(LIBXML_VALID_ENABLED) + Parse a DTD. Option XML_PARSE_DTDLOAD should be enabled in the parser context to make external entities work. Availabe since 2.14.0. + + + + + + + + Pops the top parser input from the input stack + + + + + Pushes a new parser input on top of the input stack + + + + + + Parse an XML in-memory document and build a tree. @URL is used as base to resolve external entities and for error reporting. See xmlCtxtUseOptions for details. + + + + + + + + + Parse an XML document from a file descriptor and build a tree. NOTE that the file descriptor will not be closed when the context is freed or reset. @URL is used as base to resolve external entities and for error reporting. See xmlCtxtUseOptions for details. + + + + + + + + + Parse an XML file from the filesystem, the network or a user-defined resource loader. + + + + + + + + parse an XML document from I/O functions and source and build a tree. This reuses the existing @ctxt parser context @URL is used as base to resolve external entities and for error reporting. See xmlCtxtUseOptions for details. + + + + + + + + + + + Parse an XML in-memory document and build a tree. The input buffer must not contain a terminating null byte. @URL is used as base to resolve external entities and for error reporting. See xmlCtxtUseOptions for details. + + + + + + + + + + Reset a parser context + + + + + Cleanup the last global error registered. For parsing error this does not change the well-formedness result. + + + + + Reset a push parser context + + + + + + + + + Available since 2.14.0. Set the local catalogs. + + + + + + Installs a custom implementation to convert between character encodings. This bypasses legacy feature like global encoding handlers or encoding aliases. Available since 2.14.0. + + + + + + + Available since 2.14.0. Set the dictionary. This should only be done immediately after creating a parser context. + + + + + + Register a callback function that will be called on errors and warnings. If handler is NULL, the error handler will be deactivated. This is the recommended way to collect errors from the parser and takes precedence over all other error reporting mechanisms. These are (in order of precedence): - per-context structured handler (xmlCtxtSetErrorHandler) - per-context structured "serror" SAX handler - global structured handler (xmlSetStructuredErrorFunc) - per-context generic "error" and "warning" SAX handlers - global generic handler (xmlSetGenericErrorFunc) - print to stderr Available since 2.13.0. + + + + + + + To protect against exponential entity expansion ("billion laughs"), the size of serialized output is (roughly) limited to the input size multiplied by this factor. The default value is 5. When working with documents making heavy use of entity expansion, it can be necessary to increase the value. For security reasons, this should only be considered when processing trusted input. + + + + + + Applies the options to the parser context. Unset options are cleared. Available since 2.13.0. With older versions, you can use xmlCtxtUseOptions. XML_PARSE_RECOVER Enable "recovery" mode which allows non-wellformed documents. How this mode behaves exactly is unspecified and may change without further notice. Use of this feature is DISCOURAGED. XML_PARSE_NOENT Despite the confusing name, this option enables substitution of entities. The resulting tree won't contain any entity reference nodes. This option also enables loading of external entities (both general and parameter entities) which is dangerous. If you process untrusted data, it's recommended to set the XML_PARSE_NO_XXE option to disable loading of external entities. XML_PARSE_DTDLOAD Enables loading of an external DTD and the loading and substitution of external parameter entities. Has no effect if XML_PARSE_NO_XXE is set. XML_PARSE_DTDATTR Adds default attributes from the DTD to the result document. Implies XML_PARSE_DTDLOAD, but loading of external content can be disabled with XML_PARSE_NO_XXE. XML_PARSE_DTDVALID This option enables DTD validation which requires to load external DTDs and external entities (both general and parameter entities) unless XML_PARSE_NO_XXE was set. XML_PARSE_NO_XXE Disables loading of external DTDs or entities. Available since 2.13.0. XML_PARSE_NOERROR Disable error and warning reports to the error handlers. Errors are still accessible with xmlCtxtGetLastError. XML_PARSE_NOWARNING Disable warning reports. XML_PARSE_PEDANTIC Enable some pedantic warnings. XML_PARSE_NOBLANKS Remove some text nodes containing only whitespace from the result document. Which nodes are removed depends on DTD element declarations or a conservative heuristic. The reindenting feature of the serialization code relies on this option to be set when parsing. Use of this option is DISCOURAGED. XML_PARSE_SAX1 Always invoke the deprecated SAX1 startElement and endElement handlers. This option is DEPRECATED. XML_PARSE_NONET Disable network access with the builtin HTTP client. XML_PARSE_NODICT Create a document without interned strings, making all strings separate memory allocations. XML_PARSE_NSCLEAN Remove redundant namespace declarations from the result document. XML_PARSE_NOCDATA Output normal text nodes instead of CDATA nodes. XML_PARSE_COMPACT Store small strings directly in the node struct to save memory. XML_PARSE_OLD10 Use old Name productions from before XML 1.0 Fifth Edition. This options is DEPRECATED. XML_PARSE_HUGE Relax some internal limits. Maximum size of text nodes, tags, comments, processing instructions, CDATA sections, entity values normal: 10M huge: 1B Maximum size of names, system literals, pubid literals normal: 50K huge: 10M Maximum nesting depth of elements normal: 256 huge: 2048 Maximum nesting depth of entities normal: 20 huge: 40 XML_PARSE_OLDSAX Enable an unspecified legacy mode for SAX parsers. This option is DEPRECATED. XML_PARSE_IGNORE_ENC Ignore the encoding in the XML declaration. This option is mostly unneeded these days. The only effect is to enforce UTF-8 decoding of ASCII-like data. XML_PARSE_BIG_LINES Enable reporting of line numbers larger than 65535. XML_PARSE_NO_UNZIP Disables input decompression. Setting this option is recommended to avoid zip bombs. Available since 2.14.0. XML_PARSE_NO_SYS_CATALOG Disables the global system XML catalog. Available since 2.14.0. XML_PARSE_NO_CATALOG_PI Ignore XML catalog processing instructions. Available since 2.14.0. + + + + + + Available since 2.14.0. Set the private application data. + + + + + + Installs a custom callback to load documents, DTDs or external entities. Available since 2.14.0. + + + + + + + DEPRECATED: Use xmlCtxtSetOptions. Applies the options to the parser context. The following options are never cleared and can only be enabled: XML_PARSE_NOERROR XML_PARSE_NOWARNING XML_PARSE_NONET XML_PARSE_NSCLEAN XML_PARSE_NOCDATA XML_PARSE_COMPACT XML_PARSE_OLD10 XML_PARSE_HUGE XML_PARSE_OLDSAX XML_PARSE_IGNORE_ENC XML_PARSE_BIG_LINES + + + + + + defined(LIBXML_VALID_ENABLED) + Validate a document. Like xmlValidateDocument but uses the parser context's error handler. Option XML_PARSE_DTDLOAD should be enabled in the parser context to make external entities work. Availabe since 2.14.0. + + + + + + defined(LIBXML_VALID_ENABLED) + Validate a document against a DTD. Like xmlValidateDtd but uses the parser context's error handler. Availabe since 2.14.0. + + + + + + + DEPRECATED: Internal function, do not use. The current char value, if using UTF-8 this may actually span multiple bytes in the input buffer. Implement the end of line normalization: 2.11 End-of-Line Handling Wherever an external parsed entity or the literal entity value of an internal parsed entity contains either the literal two-character sequence "#xD#xA" or a standalone literal #xD, an XML processor must pass to the application the single character #xA. This behavior can conveniently be produced by normalizing all line breaks to #xA on input, before parsing.) + + + + + + A function called to acquire namespaces (xmlNs) from the wrapper. + + + + + + + + References of out-of scope ns-decls are remapped to point to @destDoc: 1) If @destParent is given, then nsDef entries on element-nodes are used 2) If *no* @destParent is given, then @destDoc->oldNs entries are used This is the case when you have an unlinked node and just want to move it to the context of If @destParent is given, it ensures that the tree is namespace wellformed by creating additional ns-decls where needed. Note that, since prefixes of already existent ns-decls can be shadowed by this process, it could break QNames in attribute values or element content. NOTE: This function was not intensively tested. + + + + + + + + + + References of out-of scope ns-decls are remapped to point to @destDoc: 1) If @destParent is given, then nsDef entries on element-nodes are used 2) If *no* @destParent is given, then @destDoc->oldNs entries are used. This is the case when you don't know already where the cloned branch will be added to. If @destParent is given, it ensures that the tree is namespace wellformed by creating additional ns-decls where needed. Note that, since prefixes of already existent ns-decls can be shadowed by this process, it could break QNames in attribute values or element content. TODO: 1) What to do with XInclude? Currently this returns an error for XInclude. + + + + + + + + + + + + Frees the DOM-wrapper context. + + + + + Allocates and initializes a new DOM-wrapper context. + + + + Ensures that ns-references point to ns-decls hold on element-nodes. Ensures that the tree is namespace wellformed by creating additional ns-decls where needed. Note that, since prefixes of already existent ns-decls can be shadowed by this process, it could break QNames in attribute values or element content. NOTE: This function was not intensively tested. + + + + + + + Unlinks the given node from its owner. This will substitute ns-references to node->nsDef for ns-references to doc->oldNs, thus ensuring the removed branch to be autark wrt ns-references. NOTE: This function was not intensively tested. + + + + + + + + defined(LIBXML_DEBUG_ENABLED) + Check the document for potential content problems, and output the errors to @output + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for the attribute + + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for the attribute list + + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for the DTD + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for the document, it's recursive + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information concerning the document, not recursive + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for all the entities in use by the document + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for the element node, it is recursive + + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for the list of element node, it is recursive + + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps debug information for the element node, it is not recursive + + + + + + + defined(LIBXML_DEBUG_ENABLED) + Dumps information about the string, shorten it if necessary + + + + + + DEPRECATED: This function is a no-op. Call xmlInitParser to initialize the library. Initialize the default SAX2 handler + + + + DEPRECATED: This function modifies global state and is not thread-safe. Unregisters an encoding alias @alias + + + + + DEPRECATED: don't use Registers a callback for node destruction + + + + + Signature for the deregistration callback of a discarded node + + + + + Guess the encoding of the entity using the first bytes of the entity content according to the non-normative appendix F of the XML-1.0 recommendation. + + + + + + DEPRECATED: This function is a no-op. Call xmlCleanupParser to free global state but see the warnings there. xmlCleanupParser should be only called once at program exit. In most cases, you don't have call cleanup functions at all. + + + + Create a new dictionary + + + + Create a new dictionary, inheriting strings from the read-only dictionary @sub. On lookup, strings are first searched in the new dictionary, then in @sub, and if not found are created in the new dictionary. + + + + + Check if a string exists in the dictionary. + + + + + + + Free the hash @dict and its contents. The userdata is deallocated with @f if provided. + + + + + Get how much memory is used by a dictionary for strings Added in 2.9.0 + + + + + Lookup a string and add it to the dictionary if it wasn't found. + + + + + + + check if a string is owned by the dictionary + + + + + + Lookup the QName @prefix:@name and add it to the dictionary if it wasn't found. + + + + + + + Increment the reference counter of a dictionary + + + + + Set a size limit for the dictionary Added in 2.9.0 + + + + + + Query the number of elements installed in the hash @dict. + + + + + Copy a node into another document. + + + + + + + Copy a node list and all children into a new document. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document to an open FILE. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document in memory and return the #xmlChar * and it's size. It's up to the caller to free the memory with xmlFree(). Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump the current DOM tree into memory using the character encoding specified by the caller. Note it is up to the caller of this function to free the allocated memory with xmlFree(). Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called + + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document in memory and return the #xmlChar * and it's size in bytes. It's up to the caller to free the memory with xmlFree(). The resulting byte array is zero terminated, though the last 0 is not included in the returned size. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump the current DOM tree into memory using the character encoding specified by the caller. Note it is up to the caller of this function to free the allocated memory with xmlFree(). + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document to an open FILE. + + + + + + + Get the root element of the document (doc->children is a list containing possibly comments, PIs, etc ...). + + + + + Set the root element of the document (doc->children is a list containing possibly comments, PIs, etc ...). @root must be an element node. It is unlinked before insertion. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Use xmlSaveTree. This will dump the content of the attribute declaration as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Don't use. This will dump the content of the attribute table as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Use xmlSaveTree. This will dump the content of the element declaration as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Don't use. This will dump the content of the element table as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + This will dump the content of the entity table as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + This will dump the content of the entity table as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Don't use. This will dump the content the notation declaration as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Don't use. This will dump the content of the notation table as an XML DTD definition + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML/HTML node, recursive behaviour, children are printed too. + + + + + + + Do a global encoding of a string, replacing the predefined entities and non ASCII values with their entities and CharRef counterparts. Contrary to xmlEncodeEntities, this routine is reentrant, and result must be deallocated. This escapes '<', '>', '&' and '\r'. If the document has no encoding, non-ASCII codepoints are escaped. There is some special handling for HTML documents. + + + + + + Do a global encoding of a string, replacing the predefined entities this routine is reentrant, and result must be deallocated. This escapes '<', '>', '&', '"' and '\r' chars. + + + + + + External entity loaders types. + + + + + + + DEPRECATED: Internal function, don't use. + + + + + DEPRECATED: Internal function, don't use. + + + + + DEPRECATED: Internal function, don't use. + + + + + DEPRECATED: Internal function, don't use. + + + + + + + DEPRECATED: Use xmlOpenCharEncodingHandler which has better error reporting. If the encoding is UTF-8, this will return a no-op handler that shouldn't be used. + + + + + Find the first child node which is an element. Note that entity references are not expanded. + + + + + Report a formatted error to a printf-like callback. This can result in a verbose multi-line report including additional information from the parser context. Available since 2.13.0. + + + + + + + DEPRECATED: Internal function, don't use. Deallocate the memory used by an entities hash table. + + + + + defined(LIBXML_REGEXP_ENABLED) + Free an automata + + + + + defined(LIBXML_CATALOG_ENABLED) + Free the memory allocated to a Catalog + + + + + Free a document including all children and associated DTDs. + + + + + DEPRECATED: Internal function, don't use. Free an element content structure. The whole subtree is removed. + + + + + + Free a DTD structure. + + + + + DEPRECATED: Internal function, don't use. Free an element content structure. The whole subtree is removed. Deprecated, use xmlFreeDocElementContent instead + + + + + DEPRECATED: Internal function, don't use. Deallocate the memory used by an element hash table. + + + + + Deallocate the memory used by an entities hash table. + + + + + Frees the entity. + + + + + free an enumeration attribute node (recursive). + + + + + Signature for a free() implementation. + + + + + Deallocate the memory used by an ID hash table. + + + + + Free up an input stream. + + + + + Free a mutex. + + + + + Free a node including all the children. This doesn't unlink the node from the tree. Call xmlUnlinkNode first unless @cur is a root node. + + + + + Free a node list including all children. + + + + + DEPRECATED: Internal function, don't use. Deallocate the memory used by an entities hash table. + + + + + Free an xmlNs object. + + + + + Free a list of xmlNs objects. + + + + + Free all the memory used by a parser context. However the parsed document in ctxt->myDoc is not freed. + + + + + Free up the memory used by a buffered parser input + + + + + defined(LIBXML_PATTERN_ENABLED) + Free up the memory allocated by @comp + + + + + defined(LIBXML_PATTERN_ENABLED) + Free up the memory allocated by all the elements of @comp + + + + + Free an attribute including all children. + + + + + Free an attribute list including all children. + + + + + xmlRFreeMutex() is used to reclaim resources associated with a reentrant mutex. + + + + + DEPRECATED, do not use. This function will be removed from the public API. Deallocate the memory used by an Ref hash table. + + + + + defined(LIBXML_PATTERN_ENABLED) + Free the stream context + + + + + defined(LIBXML_READER_ENABLED) + Deallocate all the resources associated to the reader + + + + + defined(LIBXML_WRITER_ENABLED) + Deallocate all the resources associated to the writer + + + + + Free up the xmlURI struct + + + + + defined(LIBXML_VALID_ENABLED) + Free a validation context structure. + + + + + DEPRECATED: xmlMemGet. Provides the memory access functions set currently in use The mallocAtomicFunc is specialized for atomic block allocations (i.e. of areas useful for garbage collected memory allocators + + + + + + + + + DEPRECATED: Use xmlMemSetup. Override the default memory access functions with a new set This has to be called before any other libxml routines ! The mallocAtomicFunc is specialized for atomic block allocations (i.e. of areas useful for garbage collected memory allocators Should this be blocked if there was already some allocations done ? + + + + + + + + + Signature of the function to use when there is an error and no parsing or validity context available . + + + + + + + DEPRECATED: Use xmlBufferSetAllocationScheme. Types are XML_BUFFER_ALLOC_EXACT - use exact sizes, keeps memory usage down XML_BUFFER_ALLOC_DOUBLEIT - double buffer when extra needed, improves performance XML_BUFFER_ALLOC_HYBRID - use exact sizes on small strings to keep memory usage tight in normal usage, and doubleit on large strings to avoid pathological performance. + + + + DEPRECATED: Use xmlLookupCharEncodingHandler which has better error reporting. + + + + + The "canonical" name for XML encoding. C.f. http://www.w3.org/TR/REC-xml#charencoding Section 4.3.3 Character Encoding in Entities + + + + + DEPRECATED: Use xmlGetDocCompressMode get the default compression mode used, ZLIB based. + + + + get the compression ratio for a document, ZLIB based + + + + + Do an entity lookup in the document entity hash table and + + + + + + Search the DTD for the description of this attribute on this element. + + + + + + + Search the DTD for the description of this element NOTE: A NULL return value can also mean that a memory allocation failed. + + + + + + Do an entity lookup in the DTD entity hash table and + + + + + + Search the DTD for the description of this notation + + + + + + Search the DTD for the description of this qualified attribute on this element. + + + + + + + + Search the DTD for the description of this element + + + + + + + DEPRECATED: This function is not thread-safe. Lookup an encoding name for the given alias. + + + + + DEPRECATED: See xmlSetExternalEntityLoader. Get the default external entity resolver function for the application + + + + DEPRECATED + + + + Search the attribute declaring the given ID + + + + + + Get the internal subset of a document. + + + + + Find the last child of a node. + + + + + Get the last global error registered. This is per thread if compiled with thread support. + + + + Get line number of @node. Try to override the limitation of lines being store in 16 bits ints if XML_PARSE_BIG_LINES parser option was used + + + + + Search and get the value of an attribute associated to a node This does the entity substitution. This function looks in DTD attribute declaration for #FIXED or default declaration values. This function is similar to xmlGetProp except it will accept only an attribute in no namespace. NOTE: This function doesn't allow to distinguish malloc failures from missing attributes. It's more robust to use xmlNodeGetAttrValue. + + + + + + Build a structure based Path for the given node + + + + + Find all in-scope namespaces of a node. Use xmlGetNsListSafe for better error reporting. + + + + + + Find all in-scope namespaces of a node. @out returns a NULL terminated array of namespace pointers that must be freed by the caller. Available since 2.13.0. + + + + + + + Search and get the value of an attribute associated to a node This attribute has to be anchored in the namespace specified. This does the entity substitution. This function looks in DTD attribute declaration for #FIXED or default declaration values. NOTE: This function doesn't allow to distinguish malloc failures from missing attributes. It's more robust to use xmlNodeGetAttrValue. + + + + + + + Do an entity lookup in the internal and external subsets and + + + + + + Check whether this name is an predefined entity. + + + + + Search and get the value of an attribute associated to a node This does the entity substitution. This function looks in DTD attribute declaration for #FIXED or default declaration values. NOTE: This function acts independently of namespaces associated to the attribute. Use xmlGetNsProp() or xmlGetNoNsProp() for namespace aware processing. NOTE: This function doesn't allow to distinguish malloc failures from missing attributes. It's more robust to use xmlNodeGetAttrValue. + + + + + + DEPRECATED, do not use. This function will be removed from the public API. Find the set of references for the supplied ID. + + + + + + DEPRECATED: Internal function, do not use. xmlGetThreadId() find the current thread ID number Note that this is likely to be broken on some platforms using pthreads as the specification doesn't mandate pthread_t to be an integer type + + + + Read the first UTF8 character from @utf + + + + + + Examines if the library has been compiled with a given feature. + + + + + Search for an attribute associated to a node This attribute has to be anchored in the namespace specified. This does the entity substitution. This function looks in DTD attribute declaration for #FIXED or default declaration values. Note that a namespace of NULL indicates to use the default namespace. + + + + + + + Search an attribute associated to a node This function also looks in DTD attribute declaration for #FIXED or default declaration values. + + + + + + Add a hash table entry. If an entry with this key already exists, payload will not be updated and 0 is returned. This return value can't be distinguished from out-of-memory errors, so this function should be used with care. Available since 2.13.0. + + + + + + + Add a hash table entry with two strings as key. See xmlHashAdd. Available since 2.13.0. + + + + + + + + Add a hash table entry with three strings as key. See xmlHashAdd. Available since 2.13.0. + + + + + + + + + Add a hash table entry. If an entry with this key already exists, payload will not be updated and -1 is returned. This return value can't be distinguished from out-of-memory errors, so this function should be used with care. NOTE: This function doesn't allow to distinguish malloc failures from existing entries. Use xmlHashAdd instead. + + + + + + + Add a hash table entry with two strings as key. See xmlHashAddEntry. + + + + + + + + Add a hash table entry with three strings as key. See xmlHashAddEntry. + + + + + + + + + Callback to copy data from a hash. + + + + + + DEPRECATED: Leaks memory in error case. Copy the hash table using @copy to copy payloads. + + + + + + Copy the hash table using @copyFunc to copy payloads. Available since 2.13.0. + + + + + + + Create a new hash table. Set size to zero if the number of entries can't be estimated. + + + + + Create a new hash table backed by a dictionary. This can reduce resource usage considerably if most keys passed to API functions originate from this dictionary. + + + + + + Callback to free data from a hash. + + + + + + Free a hash table entry with xmlFree. + + + + + + Free the hash and its contents. The payload is deallocated with @dealloc if provided. + + + + + + Find the entry specified by @key. + + + + + + Find the payload specified by the (@key, @key2) tuple. + + + + + + + Find the payload specified by the (@key, @key2, @key3) tuple. + + + + + + + + Find the payload specified by the QName @prefix:@name or @name. + + + + + + + Find the payload specified by the QNames tuple. + + + + + + + + + Find the payload specified by the QNames tuple. + + + + + + + + + + + Find the entry specified by the @key and remove it from the hash table. Payload will be freed with @dealloc. + + + + + + + Remove an entry with two strings as key. See xmlHashRemoveEntry. + + + + + + + + Remove an entry with three strings as key. See xmlHashRemoveEntry. + + + + + + + + + Scan the hash @table and apply @scan to each value. + + + + + + + Scan the hash @table and apply @scan to each value matching (@key, @key2, @key3) tuple. If one of the keys is null, the comparison is considered to match. + + + + + + + + + + Scan the hash @table and apply @scan to each value. + + + + + + + Scan the hash @table and apply @scan to each value matching (@key, @key2, @key3) tuple. If one of the keys is null, the comparison is considered to match. + + + + + + + + + + Callback when scanning data in a hash with the simple scanner. + + + + + + + Callback when scanning data in a hash with the full scanner. + + + + + + + + + Query the number of elements in the hash table. + + + + + Add a hash table entry. If an entry with this key already exists, the old payload will be freed and updated with the new value. + + + + + + + + Add a hash table entry with two strings as key. See xmlHashUpdateEntry. + + + + + + + + + Add a hash table entry with three strings as key. See xmlHashUpdateEntry. + + + + + + + + + + defined(LIBXML_HTTP_ENABLED) + DEPRECATED: Internal function, don't use. Close an HTTP I/O channel + + + + + defined(LIBXML_HTTP_ENABLED) + DEPRECATED: Internal function, don't use. check if the URI matches an HTTP one + + + + + defined(LIBXML_HTTP_ENABLED) + DEPRECATED: Internal function, don't use. open an HTTP I/O channel + + + + + defined(LIBXML_HTTP_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Support for HTTP POST has been removed. + + + + + + defined(LIBXML_HTTP_ENABLED) + DEPRECATED: Internal function, don't use. Read @len bytes to @buffer from the I/O channel. + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Use xmlCtxtParseDtd. Load and parse a DTD + + + + + + + DEPRECATED: Alias for xmlInitParser. + + + + DEPRECATED: Alias for xmlInitParser. + + + + DEPRECATED: Alias for xmlInitParser. + + + + DEPRECATED: Don't use. -- Initialize (set to initial state) node info sequence + + + + + Initialization function for the XML parser. For older versions, it's recommended to call this function once from the main thread before using the library in multithreaded programs. Since 2.14.0, there's no distinction between threads. It should be unnecessary to call this function. + + + + DEPRECATED: Internal function which will be made private in a future version. Initialize a parser context + + + + + DEPRECATED: Alias for xmlInitParser. + + + + defined(LIBXML_CATALOG_ENABLED) + Load the default system catalog. + + + + DEPRECATED: Alias for xmlInitParser. + + + + DEPRECATED: No-op. + + + + + Callback used in the I/O Input API to close the resource + + + + + Callback used in the I/O Input API to detect if the current handler can provide input functionality for this resource. + + + + + Callback used in the I/O Input API to open the resource + + + + + Callback used in the I/O Input API to read the resource + + + + + + + Use encoding handler to decode input data. Closes the handler on error. + + + + + + This function is DEPRECATED. Use xmlIsBaseChar_ch or xmlIsBaseCharQ instead + + + + + This function is DEPRECATED. Use xmlIsBlank_ch or xmlIsBlankQ instead + + + + + Checks whether this node is an empty or whitespace only (and possibly ignorable) text-node. + + + + + This function is DEPRECATED. Use xmlIsChar_ch or xmlIsCharQ instead + + + + + This function is DEPRECATED. Use xmlIsCombiningQ instead + + + + + This function is DEPRECATED. Use xmlIsDigit_ch or xmlIsDigitQ instead + + + + + This function is DEPRECATED. Use xmlIsExtender_ch or xmlIsExtenderQ instead + + + + + Determine whether an attribute is of type ID. In case we have DTD(s) then this is done if DTD loading has been requested. In the case of HTML documents parsed with the HTML parser, then ID detection is done systematically. + + + + + + + This function is DEPRECATED. Use xmlIsIdeographicQ instead + + + + + DEPRECATED: Internal function, don't use. Check whether the character is allowed by the production [84] Letter ::= BaseChar | Ideographic + + + + + DEPRECATED: Internal function, do not use. Check whether the current thread is the main thread. + + + + Search in the DtDs whether an element accept Mixed content (or ANY) basically if it is supposed to accept text childs + + + + + + This function is DEPRECATED. Use xmlIsPubidChar_ch or xmlIsPubidCharQ instead + + + + + DEPRECATED, do not use. This function will be removed from the public API. Determine whether an attribute is of type Ref. In case we have DTD(s) then this is simple, otherwise we use an heuristic: name Ref (upper or lowercase). + + + + + + + Try to find if the document correspond to an XHTML DTD + + + + + + DEPRECATED: Use the modern options API with XML_PARSE_NOBLANKS. Set and return the previous value for default blanks text nodes support. The 1.x version of the parser used an heuristic to try to detect ignorable white spaces. As a result the SAX callback was generating xmlSAX2IgnorableWhitespace() callbacks instead of characters() one, and when using the DOM output text nodes containing those blanks were not generated. The 2.x and later version will switch to the XML standard way and ignorableWhitespace() are only generated when running the parser in validating mode and when the current element doesn't allow CDATA or mixed content. This function is provided as a way to force the standard behavior on 1.X libs and to switch back to the old mode for compatibility when running 1.X client code on 2.X . Upgrade of 1.X code should be done by using xmlIsBlankNode() commodity function to detect the "empty" nodes generated. This value also affect autogeneration of indentation when saving code if blanks sections are kept, indentation is not generated. + + + + + Find the last child node which is an element. Note that entity references are not expanded. + + + + + DEPRECATED: The modern options API always enables line numbers. Set and return the previous value for enabling line numbers in elements contents. This may break on old application and is turned off by default. + + + + + See Returns. + + + + + Insert data in the ordered list at the end for this value + + + + + + Remove the all data in the list + + + + + Move all the element from the old list in the new list + + + + + + Create a new list + + + + + + Callback function used to compare 2 data. + + + + + + Callback function used to free data from a list. + + + + + Deletes the list and its associated data + + + + + Duplicate the list + + + + + Is the list empty ? + + + + + Get the last element in the list + + + + + Get the first element in the list + + + + + Insert data in the ordered list at the beginning for this value + + + + + + include all the elements of the second list in the first one and clear the second list + + + + + + Removes the last element in the list + + + + + Removes the first element in the list + + + + + add the new data at the end of the list + + + + + + add the new data at the beginning of the list + + + + + + Remove the all instance associated to data in the list + + + + + + Remove the first instance associated to data in the list + + + + + + Remove the last instance associated to data in the list + + + + + + Reverse the order of the elements in the list + + + + + Search the list in reverse order for an existing value of @data + + + + + + Walk all the element of the list in reverse order and apply the walker function to it + + + + + + + Search the list for an existing value of @data + + + + + + Get the number of elements in the list + + + + + Sort all the elements in the list + + + + + Walk all the element of the first from first to last and apply the walker function to it + + + + + + + Callback function used when walking a list with xmlListWalk(). + + + + + + defined(LIBXML_CATALOG_ENABLED) + Load the catalog and build the associated data structures. This can be either an XML Catalog or an SGML Catalog It will recurse in SGML CATALOG entries. On the other hand XML Catalogs are not handled recursively. + + + + + defined(LIBXML_CATALOG_ENABLED) + Load the catalog and makes its definitions effective for the default external entity loader. It will recurse in SGML CATALOG entries. this function is not thread safe, catalog initialization should preferably be done once at startup + + + + + defined(LIBXML_CATALOG_ENABLED) + Load the catalogs and makes their definitions effective for the default external entity loader. this function is not thread safe, catalog initialization should preferably be done once at startup + + + + + @URL is a filename or URL. If if contains the substring "://", it is assumed to be a Legacy Extended IRI. Otherwise, it is treated as a filesystem path. @ID is an optional XML public ID, typically from a doctype declaration. It is used for catalog lookups. If catalog lookup is enabled (default is yes) and URL or ID are found in system or local XML catalogs, URL is replaced with the result. Then the following resource loaders will be called if they were registered (in order of precedence): - the resource loader set with xmlCtxtSetResourceLoader - the global external entity loader set with xmlSetExternalEntityLoader (without catalog resolution, deprecated) - the per-thread xmlParserInputBufferCreateFilenameFunc set with xmlParserInputBufferCreateFilenameDefault (deprecated) - the default loader which will return - the result from a matching global input callback set with xmlRegisterInputCallbacks (deprecated) - a HTTP resource if support is compiled in. - a file opened from the filesystem, with automatic detection of compressed files if support is compiled in. + + + + + + + defined(LIBXML_CATALOG_ENABLED) + Load an SGML super catalog. It won't expand CATALOG or DELEGATE references. This is only needed for manipulating SGML Super Catalogs like adding and removing CATALOG or DELEGATE entries. + + + + + xmlLockLibrary() is used to take out a re-entrant lock on the libxml2 library. + + + + Find or create a handler matching the encoding. The following converters are looked up in order: - Built-in handler (UTF-8, UTF-16, ISO-8859-1, ASCII) - User-registered global handler (deprecated) - iconv if enabled - ICU if enabled The handler must be closed with xmlCharEncCloseFunc. If the encoding is UTF-8, a NULL handler and no error code will be returned. Available since 2.13.0. + + + + + + DEPRECATED: don't use + + + + + + + Signature for a malloc() implementation. + + + + + DEPRECATED: don't use + + + + + + + Provides the number of memory areas currently allocated + + + + DEPRECATED: This feature was removed. + + + + + DEPRECATED: This feature was removed. + + + + + + a free() equivalent, with error checking. + + + + + Provides the memory access functions set currently in use + + + + + + + + a malloc() equivalent, with logging of the allocation info. + + + + + a realloc() equivalent, with logging of the allocation info. + + + + + + Override the default memory access functions with a new set This has to be called before any other libxml routines ! Should this be blocked if there was already some allocations done ? + + + + + + + + DEPRECATED: This feature was removed. + + + + + + + + + + + DEPRECATED: don't use + + + + + + + Provides the amount of memory currently allocated + + + + DEPRECATED: This feature was removed. + + + + a strdup() equivalent, with logging of the allocation info. + + + + + defined(LIBXML_MODULES_ENABLED) + The close operations unload the associated module and free the data associated to the module. + + + + + defined(LIBXML_MODULES_ENABLED) + The free operations free the data associated to the module but does not unload the associated shared library which may still be in use. + + + + + defined(LIBXML_MODULES_ENABLED) + Opens a module/shared library given its name or path NOTE: that due to portability issues, behaviour can only be guaranteed with @name using ASCII. We cannot guarantee that an UTF-8 string would work, which is why name is a const char * and not a const xmlChar * . TODO: options are not yet implemented. + + + + + + defined(LIBXML_MODULES_ENABLED) + Lookup for a symbol address in the given module NOTE: that due to portability issues, behaviour can only be guaranteed with @name using ASCII. We cannot guarantee that an UTF-8 string would work, which is why name is a const char * and not a const xmlChar * . + + + + + + + xmlMutexLock() is used to lock a libxml2 token. + + + + + xmlMutexUnlock() is used to unlock a libxml2 token. + + + + + defined(LIBXML_HTTP_ENABLED) + Get the authentication header of an HTTP context + + + + + defined(LIBXML_HTTP_ENABLED) + Cleanup the HTTP protocol layer. + + + + defined(LIBXML_HTTP_ENABLED) + This function closes an HTTP context, it ends up the connection and free all data related to it. + + + + + defined(LIBXML_HTTP_ENABLED) + Provides the specified content length from the HTTP header. + + + + + defined(LIBXML_HTTP_ENABLED) + Provides the specified encoding if specified in the HTTP headers. + + + + + defined(LIBXML_HTTP_ENABLED) + This function try to fetch the indicated resource via HTTP GET and save it's content in the file. + + + + + + + defined(LIBXML_HTTP_ENABLED) + Initialize the HTTP protocol layer. Currently it just checks for proxy information + + + + defined(LIBXML_HTTP_ENABLED) + This function try to open a connection to the indicated resource via HTTP using the given @method, adding the given extra headers and the input buffer for the request content. + + + + + + + + + + defined(LIBXML_HTTP_ENABLED) + This function try to open a connection to the indicated resource via HTTP using the given @method, adding the given extra headers and the input buffer for the request content. + + + + + + + + + + + defined(LIBXML_HTTP_ENABLED) + Provides the specified Mime-Type if specified in the HTTP headers. + + + + + defined(LIBXML_HTTP_ENABLED) + This function try to open a connection to the indicated resource via HTTP GET. + + + + + + defined(LIBXML_HTTP_ENABLED) + This function try to open a connection to the indicated resource via HTTP GET. + + + + + + + defined(LIBXML_HTTP_ENABLED) + This function tries to read @len bytes from the existing HTTP connection and saves them in @dest. This is a blocking call. + + + + + + + defined(LIBXML_HTTP_ENABLED) + Provides the specified redirection URL if available from the HTTP header. + + + + + defined(LIBXML_HTTP_ENABLED) + Get the latest HTTP return code received + + + + + defined(LIBXML_HTTP_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + This function saves the output of the HTTP transaction to a file It closes and free the context at the end + + + + + + defined(LIBXML_HTTP_ENABLED) + (Re)Initialize the HTTP Proxy context by parsing the URL and finding the protocol host port it indicates. Should be like http://myproxy/ or http://myproxy:3128/ A NULL URL cleans up proxy information. + + + + + defined(LIBXML_REGEXP_ENABLED) + Create a new automata + + + + Create a CDATA section node. + + + + + + + defined(LIBXML_CATALOG_ENABLED) + create a new Catalog. + + + + + DEPRECATED: This function modifies global state and is not thread-safe. Create and registers an xmlCharEncodingHandler. + + + + + + + This function is MISNAMED. It doesn't create a character reference but an entity reference. Create an empty entity reference node. This function doesn't attempt to look up the entity in @doc. Entity names like '&entity;' are handled as well. + + + + + + Create a new child element and append it to a parent element. If @ns is NULL, the newly created element inherits the namespace of the parent. If provided, @content is expected to be a valid XML attribute value possibly containing character and entity references. Text and entity reference node will be added to the child element, see xmlNewDocNode. + + + + + + + + Use of this function is DISCOURAGED in favor of xmlNewDocComment. Create a comment node. + + + + + Creates a new XML document. If version is NULL, "1.0" is used. + + + + + Create a comment node. + + + + + + DEPRECATED: Internal function, don't use. Allocate an element content structure for the document. + + + + + + + Create a document fragment node. + + + + + Create an element node. If provided, @content is expected to be a valid XML attribute value possibly containing character and entity references. Syntax errors and references to undeclared entities are ignored silently. Only references are handled, nested elements, comments or PIs are not. See xmlNewDocRawNode for an alternative. General notes on object creation: Each node and all its children are associated with the same document. The document should be provided when creating nodes to avoid a performance penalty when adding the node to a document tree. Note that a document only owns nodes reachable from the root node. Unlinked subtrees must be freed manually. + + + + + + + + Create an element node. Like xmlNewDocNode, but the @name string will be used directly without making a copy. Takes ownership of @name which will also be freed on error. + + + + + + + + Create a processing instruction object. + + + + + + + Create an attribute object. If provided, @value is expected to be a valid XML attribute value possibly containing character and entity references. Syntax errors and references to undeclared entities are ignored silently. If you want to pass a raw string, see xmlNewProp. + + + + + + + Create an element node. If provided, @value should be a raw, unescaped string. + + + + + + + + Create a new text node. + + + + + + Create a new text node. + + + + + + + Create a DTD node. If a document is provided, it is an error if it already has an external subset. If the document has no external subset, it will be set to the created DTD. To create an internal subset, use xmlCreateIntSubset(). + + + + + + + + DEPRECATED: Internal function, don't use. Allocate an element content structure. Deprecated in favor of xmlNewDocElementContent + + + + + + Create a new entity, this differs from xmlAddDocEntity() that if the document is NULL or has no internal subset defined, then an unlinked entity structure will be returned, it is then the responsibility of the caller to link it to the document later or free it when not needed anymore. + + + + + + + + + + DEPRECATED: Internal function, do not use. Create a new input stream based on an xmlEntityPtr + + + + + + Create a new input stream structure encapsulating the @input into a stream suitable for the parser. + + + + + + + Creates a new parser input to read from a zero-terminated string. @url is used as base to resolve external entities and for error reporting. @fd is closed after parsing has finished. Available since 2.14.0. + + + + + + + DEPRECATED: Use xmlNewInputFromUrl. Create a new input stream based on a file or an URL. + + + + + + Creates a new parser input to read from input callbacks and cintext. @url is used as base to resolve external entities and for error reporting. @ioRead is called to read new data into a provided buffer. It must return the number of bytes written into the buffer ot a negative xmlParserErrors code on failure. @ioClose is called after parsing has finished. @ioCtxt is an opaque pointer passed to the callbacks. Available since 2.14.0. + + + + + + + + + Creates a new parser input to read from a memory area. @url is used as base to resolve external entities and for error reporting. If the XML_INPUT_BUF_STATIC flag is set, the memory area must stay unchanged until parsing has finished. This can avoid temporary copies. If the XML_INPUT_BUF_ZERO_TERMINATED flag is set, the memory area must contain a zero byte after the buffer at position @size. This can avoid temporary copies. Available since 2.14.0. + + + + + + + + Creates a new parser input to read from a zero-terminated string. @url is used as base to resolve external entities and for error reporting. If the XML_INPUT_BUF_STATIC flag is set, the string must stay unchanged until parsing has finished. This can avoid temporary copies. Available since 2.14.0. + + + + + + + Create a new input stream based on a file or a URL. The flag XML_INPUT_UNZIP allows decompression. The flag XML_INPUT_NETWORK allows network access. The following resource loaders will be called if they were registered (in order of precedence): - the per-thread xmlParserInputBufferCreateFilenameFunc set with xmlParserInputBufferCreateFilenameDefault (deprecated) - the default loader which will return - the result from a matching global input callback set with xmlRegisterInputCallbacks (deprecated) - a HTTP resource if support is compiled in. - a file opened from the filesystem, with automatic detection of compressed files if support is compiled in. Available since 2.14.0. + + + + + + + DEPRECATED: Use xmlNewInputFromUrl or similar functions. Create a new input stream structure. + + + + + xmlNewMutex() is used to allocate a libxml2 token struct for use in synchronizing access to data. + + + + Create an element node. Use of this function is DISCOURAGED in favor of xmlNewDocNode. + + + + + + Create an element node. Use of this function is DISCOURAGED in favor of xmlNewDocNodeEatName. Like xmlNewNode, but the @name string will be used directly without making a copy. Takes ownership of @name which will also be freed on error. + + + + + + Create a new namespace. For a default namespace, @prefix should be NULL. The namespace URI in @href is not checked. You should make sure to pass a valid URI. If @node is provided, it must be an element node. The namespace will be appended to the node's namespace declarations. It is an error if the node already has a definition for the prefix or default namespace. + + + + + + + Create an attribute object. If provided, @value should be a raw, unescaped string. If @node is provided, the created attribute will be appended without checking for duplicate names. It is an error if @node is not an element. + + + + + + + + Like xmlNewNsProp, but the @name string will be used directly without making a copy. Takes ownership of @name which will also be freed on error. + + + + + + + + Create a processing instruction node. Use of this function is DISCOURAGED in favor of xmlNewDocPI. + + + + + + Allocate and initialize a new parser context. + + + + Create an attribute node. If provided, @value should be a raw, unescaped string. If @node is provided, the created attribute will be appended without checking for duplicate names. It is an error if @node is not an element. + + + + + + + xmlRNewMutex() is used to allocate a reentrant mutex for use in synchronizing access to data. token_r is a re-entrant lock and thus useful for synchronizing access to data structures that may be manipulated in a recursive fashion. + + + + Create a new entity reference node, linking the result with the entity in @doc if found. Entity names like '&entity;' are handled as well. + + + + + + Allocate and initialize a new SAX parser context. If userData is NULL, the parser context will be passed as user data. Available since 2.11.0. If you want support older versions, it's best to invoke xmlNewParserCtxt and set ctxt->sax with struct assignment. + + + + + + DEPRECATED: Use xmlNewInputFromString. Create a new input stream based on a memory buffer. + + + + + + Create a text node. Use of this function is DISCOURAGED in favor of xmlNewDocText. + + + + + Create a new child element and append it to a parent element. If @ns is NULL, the newly created element inherits the namespace of the parent. If @content is provided, a text node will be added to the child element, see xmlNewDocRawNode. + + + + + + + + Use of this function is DISCOURAGED in favor of xmlNewDocTextLen. + + + + + + defined(LIBXML_READER_ENABLED) + Create an xmlTextReader structure fed with @input + + + + + + defined(LIBXML_READER_ENABLED) + Create an xmlTextReader structure fed with the resource at @URI + + + + + defined(LIBXML_WRITER_ENABLED) + Create a new xmlNewTextWriter structure using an xmlOutputBufferPtr NOTE: the @out parameter will be deallocated when the writer is closed (if the call succeed.) + + + + + defined(LIBXML_WRITER_ENABLED) + Create a new xmlNewTextWriter structure with @*doc as output + + + + + + defined(LIBXML_WRITER_ENABLED) + Create a new xmlNewTextWriter structure with @uri as output + + + + + + defined(LIBXML_WRITER_ENABLED) + Create a new xmlNewTextWriter structure with @buf as output TODO: handle compression + + + + + + defined(LIBXML_WRITER_ENABLED) + Create a new xmlNewTextWriter structure with @ctxt as output NOTE: the @ctxt context will be freed with the resulting writer (if the call succeeds). TODO: handle compression + + + + + + defined(LIBXML_WRITER_ENABLED) + Create a new xmlNewTextWriter structure with @doc as output starting at @node + + + + + + + defined(LIBXML_VALID_ENABLED) + Allocate a validation context structure. + + + + DEPRECATED: Internal function, do not use. Skip to the next char input char. + + + + + Find the closest following sibling which is a element. Note that entity references are not expanded. + + + + + DEPRECATED: Use XML_PARSE_NONET. A specific entity loader disabling network accesses, though still allowing local catalog accesses for resolution. + + + + + + + Append the extra substring to the node content. NOTE: In contrast to xmlNodeSetContent(), @content is supposed to be raw text, so unescaped XML special chars are allowed, entity references are not supported. + + + + + + Append the extra substring to the node content. NOTE: In contrast to xmlNodeSetContentLen(), @content is supposed to be raw text, so unescaped XML special chars are allowed, entity references are not supported. + + + + + + + Read the value of a node @cur, this can be either the text carried directly by this node if it's a TEXT node or the aggregate string of the values carried by this node child's (TEXT and ENTITY_REF). Entity references are substituted. Fills up the buffer @buffer with this value + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML node, recursive behaviour,children are printed too. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called. Since this is using xmlBuffer structures it is limited to 2GB and somehow deprecated, use xmlNodeDumpOutput() instead. + + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML node, recursive behaviour, children are printed too. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called + + + + + + + + + + Search and get the value of an attribute associated to a node This attribute has to be anchored in the namespace specified. This does the entity substitution. The returned value must be freed by the caller. Available since 2.13.0. + + + + + + + + See xmlNodeGetBaseSafe. This function doesn't allow to distinguish memory allocation failures from a non-existing base. + + + + + + Searches for the BASE URL. The code should work on both XML and HTML document even if base mechanisms are completely different. It returns the base as defined in RFC 2396 sections 5.1.1. Base URI within Document Content and 5.1.2. Base URI from the Encapsulating Entity However it does not return the document base (5.1.3), use doc->URL in this case Available since 2.13.0. + + + + + + + Read the value of a node, this can be either the text carried directly by this node if it's a TEXT node or the aggregate string of the values carried by this node child's (TEXT and ENTITY_REF). Entity references are substituted. + + + + + Searches the language of a node, i.e. the values of the xml:lang attribute or the one carried by the nearest ancestor. + + + + + Searches the space preserving behaviour of a node, i.e. the values of the xml:space attribute or the one carried by the nearest ancestor. + + + + + Is this node a Text node ? + + + + + Serializes attribute children (text and entity reference nodes) into a string. If @inLine is true, entity references will be substituted. Otherwise, entity references will be kept and special characters like '&' will be escaped. + + + + + + + Serializes attribute children (text and entity reference nodes) into a string. If @inLine is true, entity references will be substituted. Otherwise, entity references will be kept and special characters like '&' as well as non-ASCII chars will be escaped. See xmlNodeListGetRawString for an alternative option. + + + + + + + Set (or reset) the base URI of a node, i.e. the value of the xml:base attribute. + + + + + + Replace the text content of a node. Sets the raw text content of text, CDATA, comment or PI nodes. For element and attribute nodes, removes all children and replaces them by parsing @content which is expected to be a valid XML attribute value possibly containing character and entity references. Syntax errors and references to undeclared entities are ignored silently. Unfortunately, there isn't an API to pass raw content directly. An inefficient work-around is to escape the content with xmlEncodeSpecialChars before passing it. A better trick is clearing the old content with xmlNodeSetContent(node, NULL) first and then calling xmlNodeAddContent(node, content). Unlike this function, xmlNodeAddContent accepts raw text. + + + + + + See xmlNodeSetContent. + + + + + + + Set the language of a node, i.e. the values of the xml:lang attribute. + + + + + + Set (or reset) the name of a node. + + + + + + Set (or reset) the space preserving behaviour of a node, i.e. the value of the xml:space attribute. + + + + + + Applies the 5 normalization steps to a path string--that is, RFC 2396 Section 5.2, steps 6.c through 6.g. Normalization occurs directly on the string, no new allocation is done + + + + + DEPRECATED: This never really worked. + + + + + Find or create a handler matching the encoding. The following converters are looked up in order: - Built-in handler (UTF-8, UTF-16, ISO-8859-1, ASCII) - User-registered global handler (deprecated) - iconv if enabled - ICU if enabled The handler must be closed with xmlCharEncCloseFunc. If the encoding is UTF-8, a NULL handler and no error code will be returned. Available since 2.13.0. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + flushes and close the output I/O channel and free up all the associated resources + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a buffered output for the progressive saving to a xmlBuffer Consumes @encoder even in error case. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a buffered output for the progressive saving to a file descriptor Consumes @encoder even in error case. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a buffered output for the progressive saving to a FILE * buffered C I/O Consumes @encoder even in error case. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a buffered output for the progressive saving of a file If filename is "-' then we use stdout as the output. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. TODO: currently if compression is set, the library only support writing to a local file. Consumes @encoder even in error case. + + + + + + + Registers a callback for URI output file handling + + + + + Signature for the function doing the lookup for a suitable output method corresponding to an URI. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a buffered output for the progressive saving to an I/O handler Consumes @encoder even in error case. + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + flushes the output I/O channel + + + + + defined(LIBXML_OUTPUT_ENABLED) + Gives a pointer to the data currently held in the output buffer + + + + + defined(LIBXML_OUTPUT_ENABLED) + Gives the length of the data currently held in the output buffer + + + + + defined(LIBXML_OUTPUT_ENABLED) + Write the content of the array in the output I/O buffer This routine handle the I18N transcoding from internal UTF-8 The buffer is lossless, i.e. will store in case of partial or delayed writes. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Write the content of the string in the output I/O buffer This routine escapes the characters and then handle the I18N transcoding from internal UTF-8 The buffer is lossless, i.e. will store in case of partial or delayed writes. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Write the content of the string in the output I/O buffer This routine handle the I18N transcoding from internal UTF-8 The buffer is lossless, i.e. will store in case of partial or delayed writes. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Callback used in the I/O Output API to close the resource + + + + + defined(LIBXML_OUTPUT_ENABLED) + Callback used in the I/O Output API to detect if the current handler can provide output functionality for this resource. + + + + + defined(LIBXML_OUTPUT_ENABLED) + Callback used in the I/O Output API to open the resource + + + + + defined(LIBXML_OUTPUT_ENABLED) + Callback used in the I/O Output API to write to the resource + + + + + + + DEPRECATED: Internal function, don't use. parse a value for an attribute Note: the parser won't do substitution of entities here, this will be handled later in xmlStringGetNodeList [10] AttValue ::= '"' ([^<&"] | Reference)* '"' | "'" ([^<&'] | Reference)* "'" 3.3.3 Attribute-Value Normalization: Before the value of an attribute is passed to the application or checked for validity, the XML processor must normalize it as follows: - a character reference is processed by appending the referenced character to the attribute value - an entity reference is processed by recursively processing the replacement text of the entity - a whitespace character (#x20, #xD, #xA, #x9) is processed by appending #x20 to the normalized value, except that only a single #x20 is appended for a "#xD#xA" sequence that is part of an external parsed entity or the literal entity value of an internal parsed entity - other characters are processed by appending them to the normalized value If the declared value is not CDATA, then the XML processor must further process the normalized attribute value by discarding any leading and trailing space (#x20) characters, and by replacing sequences of space (#x20) characters by a single space (#x20) character. All attributes for which no declaration has been read should be treated by a non-validating parser as if declared CDATA. + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Internal function, don't use. parse an attribute [41] Attribute ::= Name Eq AttValue [ WFC: No External Entity References ] Attribute values cannot contain direct or indirect entity references to external entities. [ WFC: No < in Attribute Values ] The replacement text of any entity referred to directly or indirectly in an attribute value (other than "&lt;") must not contain a <. [ VC: Attribute Value Type ] The attribute must have been declared; the value must be of the type declared for it. [25] Eq ::= S? '=' S? With namespace: [NS 11] Attribute ::= QName Eq AttValue Also the case QName == xmlns:??? is handled independently as a namespace definition. + + + + + + DEPRECATED: Internal function, don't use. Parse an attribute list declaration for an element. Always consumes '<!'. [52] AttlistDecl ::= '<!ATTLIST' S Name AttDef* S? '>' [53] AttDef ::= S Name S AttType S DefaultDecl + + + + + DEPRECATED: Internal function, don't use. parse the Attribute list def for an element [54] AttType ::= StringType | TokenizedType | EnumeratedType [55] StringType ::= 'CDATA' [56] TokenizedType ::= 'ID' | 'IDREF' | 'IDREFS' | 'ENTITY' | 'ENTITIES' | 'NMTOKEN' | 'NMTOKENS' Validity constraints for attribute values syntax are checked in xmlValidateAttributeValue() [ VC: ID ] Values of type ID must match the Name production. A name must not appear more than once in an XML document as a value of this type; i.e., ID values must uniquely identify the elements which bear them. [ VC: One ID per Element Type ] No element type may have more than one ID attribute specified. [ VC: ID Attribute Default ] An ID attribute must have a declared default of #IMPLIED or #REQUIRED. [ VC: IDREF ] Values of type IDREF must match the Name production, and values of type IDREFS must match Names; each IDREF Name must match the value of an ID attribute on some element in the XML document; i.e. IDREF values must match the value of some ID attribute. [ VC: Entity Name ] Values of type ENTITY must match the Name production, values of type ENTITIES must match Names; each Entity Name must match the name of an unparsed entity declared in the DTD. [ VC: Name Token ] Values of type NMTOKEN must match the Nmtoken production; values of type NMTOKENS must match Nmtokens. + + + + + + defined(LIBXML_SAX1_ENABLED) + Parse a well-balanced chunk of an XML document called by the parser The allowed sequence for the Well Balanced Chunk is the one defined by the content production in the XML grammar: [43] content ::= (element | CharData | Reference | CDSect | PI | Comment)* + + + + + + + + + + defined(LIBXML_SAX1_ENABLED) + Parse a well-balanced chunk of an XML document The allowed sequence for the Well Balanced Chunk is the one defined by the content production in the XML grammar: [43] content ::= (element | CharData | Reference | CDSect | PI | Comment)* + + + + + + + + + + + DEPRECATED: Internal function, don't use. Parse escaped pure raw content. Always consumes '<!['. [18] CDSect ::= CDStart CData CDEnd [19] CDStart ::= '<![CDATA[' [20] Data ::= (Char* - (Char* ']]>' Char*)) [21] CDEnd ::= ']]>' + + + + + defined(LIBXML_CATALOG_ENABLED) + parse an XML file and build a tree. It's like xmlParseFile() except it bypass all catalog lookups. + + + + + DEPRECATED: Internal function, don't use. + + + + + + Compare the string to the encoding schemes already known. Note that the comparison is case insensitive accordingly to the section [XML] 4.3.3 Character Encoding in Entities. + + + + + DEPRECATED: Internal function, don't use. Parse a numeric character reference. Always consumes '&'. [66] CharRef ::= '&#' [0-9]+ ';' | '&#x' [0-9a-fA-F]+ ';' [ WFC: Legal Character ] Characters referred to using character references must match the production for Char. + + + + + defined(LIBXML_PUSH_ENABLED) + Parse a chunk of memory in push parser mode. Assumes that the parser context was initialized with xmlCreatePushParserCtxt. The last chunk, which will often be empty, must be marked with the @terminate flag. With the default SAX callbacks, the resulting document will be available in ctxt->myDoc. This pointer will not be freed by the library. If the document isn't well-formed, ctxt->myDoc is set to NULL. The push parser doesn't support recovery mode. + + + + + + + + DEPRECATED: Internal function, don't use. Parse an XML (SGML) comment. Always consumes '<!'. The spec says that "For compatibility, the string "--" (double-hyphen) must not occur within comments. " [15] Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->' + + + + + Parse XML element content. This is useful if you're only interested in custom SAX callbacks. If you want a node list, use xmlCtxtParseContent. + + + + + Parse an external general entity within an existing parsing context An external general parsed entity is well-formed if it matches the production labeled extParsedEnt. [78] extParsedEnt ::= TextDecl? content + + + + + + + + defined(LIBXML_VALID_ENABLED) + Load and parse an external subset. + + + + + + DEPRECATED: Internal function, don't use. Parse an attribute default declaration [60] DefaultDecl ::= '#REQUIRED' | '#IMPLIED' | (('#FIXED' S)? AttValue) [ VC: Required Attribute ] if the default declaration is the keyword #REQUIRED, then the attribute must be specified for all elements of the type in the attribute-list declaration. [ VC: Attribute Default Legal ] The declared default value must meet the lexical constraints of the declared attribute type c.f. xmlValidateAttributeDecl() [ VC: Fixed Attribute Default ] if an attribute has a default value declared with the #FIXED keyword, instances of that attribute must match the default value. [ WFC: No < in Attribute Values ] handled in xmlParseAttValue() + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlReadDoc. parse an XML in-memory document and build a tree. + + + + + DEPRECATED: Internal function, don't use. parse a DOCTYPE declaration [28] doctypedecl ::= '<!DOCTYPE' S Name (S ExternalID)? S? ('[' (markupdecl | PEReference | S)* ']' S?)? '>' [ VC: Root Element Type ] The Name in the document type declaration must match the element type of the root element. + + + + + Parse an XML document and invoke the SAX handlers. This is useful if you're only interested in custom SAX callbacks. If you want a document tree, use xmlCtxtParseDocument. + + + + + DEPRECATED: Internal function, don't use. parse an XML element [39] element ::= EmptyElemTag | STag content ETag [ WFC: Element Type Match ] The Name in an element's end-tag must match the element type in the start-tag. + + + + + DEPRECATED: Internal function, don't use. parse the declaration for a Mixed Element content The leading '(' and spaces have been skipped in xmlParseElementContentDecl [47] children ::= (choice | seq) ('?' | '*' | '+')? [48] cp ::= (Name | choice | seq) ('?' | '*' | '+')? [49] choice ::= '(' S? cp ( S? '|' S? cp )* S? ')' [50] seq ::= '(' S? cp ( S? ',' S? cp )* S? ')' [ VC: Proper Group/PE Nesting ] applies to [49] and [50] TODO Parameter-entity replacement text must be properly nested with parenthesized groups. That is to say, if either of the opening or closing parentheses in a choice, seq, or Mixed construct is contained in the replacement text for a parameter entity, both must be contained in the same replacement text. For interoperability, if a parameter-entity reference appears in a choice, seq, or Mixed construct, its replacement text should not be empty, and neither the first nor last non-blank character of the replacement text should be a connector (| or ,). + + + + + + DEPRECATED: Internal function, don't use. parse the declaration for an Element content either Mixed or Children, the cases EMPTY and ANY are handled directly in xmlParseElementDecl [46] contentspec ::= 'EMPTY' | 'ANY' | Mixed | children + + + + + + + DEPRECATED: Internal function, don't use. Parse an element declaration. Always consumes '<!'. [45] elementdecl ::= '<!ELEMENT' S Name S contentspec S? '>' [ VC: Unique Element Type Declaration ] No element type may be declared more than once + + + + + DEPRECATED: Internal function, don't use. parse the declaration for a Mixed Element content The leading '(' and spaces have been skipped in xmlParseElementContentDecl [51] Mixed ::= '(' S? '#PCDATA' (S? '|' S? Name)* S? ')*' | '(' S? '#PCDATA' S? ')' [ VC: Proper Group/PE Nesting ] applies to [51] too (see [49]) [ VC: No Duplicate Types ] The same name must not appear more than once in a single mixed-content declaration. + + + + + + DEPRECATED: Internal function, don't use. parse the XML encoding name [81] EncName ::= [A-Za-z] ([A-Za-z0-9._] | '-')* + + + + + DEPRECATED: Internal function, don't use. parse the XML encoding declaration [80] EncodingDecl ::= S 'encoding' Eq ('"' EncName '"' | "'" EncName "'") this setups the conversion filters. + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Internal function, don't use. parse an end of tag [42] ETag ::= '</' Name S? '>' With namespace [NS 9] ETag ::= '</' QName S? '>' + + + + + defined(LIBXML_SAX1_ENABLED) + parse an XML external entity out of context and build a tree. [78] extParsedEnt ::= TextDecl? content This correspond to a "Well Balanced" chunk + + + + + DEPRECATED: Internal function, don't use. Parse an entity declaration. Always consumes '<!'. [70] EntityDecl ::= GEDecl | PEDecl [71] GEDecl ::= '<!ENTITY' S Name S EntityDef S? '>' [72] PEDecl ::= '<!ENTITY' S '%' S Name S PEDef S? '>' [73] EntityDef ::= EntityValue | (ExternalID NDataDecl?) [74] PEDef ::= EntityValue | ExternalID [76] NDataDecl ::= S 'NDATA' S Name [ VC: Notation Declared ] The Name must match the declared name of a notation. + + + + + DEPRECATED: Internal function, don't use. + + + + + DEPRECATED: Internal function, don't use. parse a value for ENTITY declarations [9] EntityValue ::= '"' ([^%&"] | PEReference | Reference)* '"' | "'" ([^%&'] | PEReference | Reference)* "'" + + + + + + DEPRECATED: Internal function, don't use. parse an Enumerated attribute type. [57] EnumeratedType ::= NotationType | Enumeration [58] NotationType ::= 'NOTATION' S '(' S? Name (S? '|' S? Name)* S? ')' + + + + + + DEPRECATED: Internal function, don't use. parse an Enumeration attribute type. [59] Enumeration ::= '(' S? Nmtoken (S? '|' S? Nmtoken)* S? ')' [ VC: Enumeration ] Values of this type must match one of the Nmtoken tokens in the declaration + + + + + DEPRECATED: Internal function, don't use. parse a general parsed entity An external general parsed entity is well-formed if it matches the production labeled extParsedEnt. [78] extParsedEnt ::= TextDecl? content + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlParseCtxtExternalEntity. Parse an external general entity An external general parsed entity is well-formed if it matches the production labeled extParsedEnt. [78] extParsedEnt ::= TextDecl? content + + + + + + + + + + + DEPRECATED: Internal function, don't use. Parse an External ID or a Public ID NOTE: Productions [75] and [83] interact badly since [75] can generate 'PUBLIC' S PubidLiteral S SystemLiteral [75] ExternalID ::= 'SYSTEM' S SystemLiteral | 'PUBLIC' S PubidLiteral S SystemLiteral [83] PublicID ::= 'PUBLIC' S PubidLiteral + + + + + + + DEPRECATED: Internal function, don't use. parse Markup declarations from an external subset [30] extSubset ::= textDecl? extSubsetDecl [31] extSubsetDecl ::= (markupdecl | conditionalSect | PEReference | S) * + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlReadFile. parse an XML file and build a tree. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. + + + + + Parse a well-balanced chunk of an XML document within the context (DTD, namespaces, etc ...) of the given node. The allowed sequence for the data is a Well Balanced Chunk defined by the content production in the XML grammar: [43] content ::= (element | CharData | Reference | CDSect | PI | Comment)* This function assumes the encoding of @node's document which is typically not what you want. A better alternative is xmlCtxtParseContent. + + + + + + + + + DEPRECATED: Internal function, don't use. Parse markup declarations. Always consumes '<!' or '<?'. [29] markupdecl ::= elementdecl | AttlistDecl | EntityDecl | NotationDecl | PI | Comment [ VC: Proper Declaration/PE Nesting ] Parameter-entity replacement text must be properly nested with markup declarations. That is to say, if either the first character or the last character of a markup declaration (markupdecl above) is contained in the replacement text for a parameter-entity reference, both must be contained in the same replacement text. [ WFC: PEs in Internal Subset ] In the internal DTD subset, parameter-entity references can occur only where markup declarations can occur, not within markup declarations. (This does not apply to references that occur in external parameter entities or to the external subset.) + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlReadMemory. parse an XML in-memory block and build a tree. + + + + + + DEPRECATED: Internal function, don't use. parse an XML Misc* optional field. [27] Misc ::= Comment | PI | S + + + + + DEPRECATED: Internal function, don't use. parse an XML name. [4] NameChar ::= Letter | Digit | '.' | '-' | '_' | ':' | CombiningChar | Extender [5] Name ::= (Letter | '_' | ':') (NameChar)* [6] Names ::= Name (#x20 Name)* + + + + + DEPRECATED: Internal function, don't use. parse an XML Nmtoken. [7] Nmtoken ::= (NameChar)+ [8] Nmtokens ::= Nmtoken (#x20 Nmtoken)* + + + + + DEPRECATED: Internal function, don't use. Parse a notation declaration. Always consumes '<!'. [82] NotationDecl ::= '<!NOTATION' S Name S (ExternalID | PublicID) S? '>' Hence there is actually 3 choices: 'PUBLIC' S PubidLiteral 'PUBLIC' S PubidLiteral S SystemLiteral and 'SYSTEM' S SystemLiteral See the NOTE on xmlParseExternalID(). + + + + + DEPRECATED: Internal function, don't use. parse an Notation attribute type. Note: the leading 'NOTATION' S part has already being parsed... [58] NotationType ::= 'NOTATION' S '(' S? Name (S? '|' S? Name)* S? ')' [ VC: Notation Attributes ] Values of this type must match one of the notation names included in the declaration; all notation names in the declaration must be declared. + + + + + DEPRECATED: Internal function, don't use. Parse a parameter entity reference. Always consumes '%'. The entity content is handled directly by pushing it's content as a new input stream. [69] PEReference ::= '%' Name ';' [ WFC: No Recursion ] A parsed entity must not contain a recursive reference to itself, either directly or indirectly. [ WFC: Entity Declared ] In a document without any DTD, a document with only an internal DTD subset which contains no parameter entity references, or a document with "standalone='yes'", ... ... The declaration of a parameter entity must precede any reference to it... [ VC: Entity Declared ] In a document with an external subset or external parameter entities with "standalone='no'", ... ... The declaration of a parameter entity must precede any reference to it... [ WFC: In DTD ] Parameter-entity references may only appear in the DTD. NOTE: misleading but this is handled. + + + + + DEPRECATED: Internal function, don't use. parse an XML Processing Instruction. [16] PI ::= '<?' PITarget (S (Char* - (Char* '?>' Char*)))? '?>' The processing is transferred to SAX once parsed. + + + + + DEPRECATED: Internal function, don't use. parse the name of a PI [17] PITarget ::= Name - (('X' | 'x') ('M' | 'm') ('L' | 'l')) + + + + + DEPRECATED: Internal function, don't use. parse an XML public literal [12] PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'" + + + + + DEPRECATED: Internal function, don't use. parse and handle entity references in content, depending on the SAX interface, this may end-up in a call to character() if this is a CharRef, a predefined entity, if there is no reference() callback. or if the parser was asked to switch to that mode. Always consumes '&'. [67] Reference ::= EntityRef | CharRef + + + + + DEPRECATED: Internal function, don't use. parse the XML standalone declaration [32] SDDecl ::= S 'standalone' Eq (("'" ('yes' | 'no') "'") | ('"' ('yes' | 'no')'"')) [ VC: Standalone Document Declaration ] TODO The standalone document declaration must have the value "no" if any external markup declarations contain declarations of: - attributes with default values, if elements to which these attributes apply appear in the document without specifications of values for these attributes, or - entities (other than amp, lt, gt, apos, quot), if references to those entities appear in the document, or - attributes with values subject to normalization, where the attribute appears in the document with a value which will change as a result of normalization, or - element types with element content, if white space occurs directly within any instance of those types. + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Internal function, don't use. Parse a start tag. Always consumes '<'. [40] STag ::= '<' Name (S Attribute)* S? '>' [ WFC: Unique Att Spec ] No attribute name may appear more than once in the same start-tag or empty-element tag. [44] EmptyElemTag ::= '<' Name (S Attribute)* S? '/>' [ WFC: Unique Att Spec ] No attribute name may appear more than once in the same start-tag or empty-element tag. With namespace: [NS 8] STag ::= '<' QName (S Attribute)* S? '>' [NS 10] EmptyElement ::= '<' QName (S Attribute)* S? '/>' + + + + + DEPRECATED: Internal function, don't use. parse an XML Literal [11] SystemLiteral ::= ('"' [^"]* '"') | ("'" [^']* "'") + + + + + DEPRECATED: Internal function, don't use. parse an XML declaration header for external entities [77] TextDecl ::= '<?xml' VersionInfo? EncodingDecl S? '?>' + + + + + Parse an URI based on RFC 3986 URI-reference = [ absoluteURI | relativeURI ] [ "#" fragment ] + + + + + Parse an URI but allows to keep intact the original fragments. URI-reference = URI / relative-ref + + + + + + Parse an URI reference string based on RFC 3986 and fills in the appropriate fields of the @uri structure URI-reference = URI / relative-ref + + + + + + Parse an URI based on RFC 3986 URI-reference = [ absoluteURI | relativeURI ] [ "#" fragment ] Available since 2.13.0. + + + + + + DEPRECATED: Internal function, don't use. parse the XML version. [24] VersionInfo ::= S 'version' Eq (' VersionNum ' | " VersionNum ") [25] Eq ::= S? '=' S? + + + + + DEPRECATED: Internal function, don't use. parse the XML version value. [26] VersionNum ::= '1.' [0-9]+ In practice allow [0-9].[0-9]+ at that level + + + + + DEPRECATED: Internal function, don't use. parse an XML declaration header [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>' + + + + + DEPRECATED: Don't use. Insert node info record into the sorted sequence + + + + + + Display and format an error messages, gives file, line, position and extra parameters. + + + + + + + DEPRECATED: Don't use. Find the parser node info struct for a given node + + + + + + DEPRECATED: Don't use. xmlParserFindNodeInfoIndex : Find the index that the info record for the given node is or should be at in a sorted sequence + + + + + + lookup the directory for that file + + + + + DEPRECATED: Internal function, do not use. [69] PEReference ::= '%' Name ';' [ WFC: No Recursion ] A parsed entity must not contain a recursive reference to itself, either directly or indirectly. [ WFC: Entity Declared ] In a document without any DTD, a document with only an internal DTD subset which contains no parameter entity references, or a document with "standalone='yes'", ... ... The declaration of a parameter entity must precede any reference to it... [ VC: Entity Declared ] In a document with an external subset or external parameter entities with "standalone='no'", ... ... The declaration of a parameter entity must precede any reference to it... [ WFC: In DTD ] Parameter-entity references may only appear in the DTD. NOTE: misleading but this is handled. A PEReference may have been detected in the current input stream the handling is done accordingly to http://www.w3.org/TR/REC-xml#entproc i.e. - Included in literal in entity values - Included as Parameter Entity reference within DTDs + + + + + DEPRECATED: Use xmlNewInputFromFd. Create a buffered parser input for the progressive parsing for the input from a file descriptor The encoding argument is deprecated and should be set to XML_CHAR_ENCODING_NONE. The encoding can be changed with xmlSwitchEncoding or xmlSwitchEncodingName later on. + + + + + + DEPRECATED: Don't use. Create a buffered parser input for the progressive parsing of a FILE * buffered C I/O The encoding argument is deprecated and should be set to XML_CHAR_ENCODING_NONE. The encoding can be changed with xmlSwitchEncoding or xmlSwitchEncodingName later on. + + + + + + DEPRECATED: Use xmlNewInputFromUrl. Create a buffered parser input for the progressive parsing of a file Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. Do an encoding check if enc == XML_CHAR_ENCODING_NONE + + + + + + DEPRECATED: Use xmlCtxtSetResourceLoader or similar functions. Registers a callback for URI input file handling + + + + + Signature for the function doing the lookup for a suitable input method corresponding to an URI. + + + + + + DEPRECATED: Use xmlNewInputFromIO. Create a buffered parser input for the progressive parsing for the input from an I/O handler The encoding argument is deprecated and should be set to XML_CHAR_ENCODING_NONE. The encoding can be changed with xmlSwitchEncoding or xmlSwitchEncodingName later on. + + + + + + + + DEPRECATED: Use xmlNewInputFromMemory. Create a parser input buffer for parsing from a memory area. This function makes a copy of the whole input buffer. If you are sure that the contents of the buffer will remain valid until the document was parsed, you can avoid the copy by using xmlParserInputBufferCreateStatic. The encoding argument is deprecated and should be set to XML_CHAR_ENCODING_NONE. The encoding can be changed with xmlSwitchEncoding or xmlSwitchEncodingName later on. + + + + + + + DEPRECATED: Use xmlNewInputFromMemory. Create a parser input buffer for parsing from a memory area. This functions assumes that the contents of the input buffer remain valid until the document was parsed. Use xmlParserInputBufferCreateMem otherwise. The encoding argument is deprecated and should be set to XML_CHAR_ENCODING_NONE. The encoding can be changed with xmlSwitchEncoding or xmlSwitchEncodingName later on. + + + + + + + DEPRECATED: Internal function, don't use. Grow up the content of the input buffer, the old data are preserved This routine handle the I18N transcoding to internal UTF-8 This routine is used when operating the parser in normal (pull) mode + + + + + + DEPRECATED: Internal function, don't use. Push the content of the arry in the input buffer This routine handle the I18N transcoding to internal UTF-8 This is used when operating the parser in progressive (push) mode. + + + + + + + DEPRECATED: Internal function, don't use. Same as xmlParserInputBufferGrow. + + + + + + Callback for freeing some parser input allocations. + + + + + DEPRECATED: Don't use. This function increase the input for the parser. It tries to preserve pointers to the input buffer, and keep already read data + + + + + + DEPRECATED: This function was internal and is deprecated. + + + + + + DEPRECATED: Don't use. This function removes used input for the parser. + + + + + DEPRECATED: Use xmlFormatError. Displays current context within the input content for error tracking + + + + + DEPRECATED: Use xmlFormatError. Displays the associated file and line information for the current input + + + + + Display and format an validity error messages, gives file, line, position and extra parameters. + + + + + + + Display and format a validity warning messages, gives file, line, position and extra parameters. + + + + + + + Display and format a warning messages, gives file, line, position and extra parameters. + + + + + + + Constructs an URI expressing the existing path + + + + + defined(LIBXML_PATTERN_ENABLED) + Compile a pattern. Available since 2.13.0. + + + + + + + + + defined(LIBXML_PATTERN_ENABLED) + Check if the pattern must be looked at from the root. + + + + + defined(LIBXML_PATTERN_ENABLED) + Get a streaming context for that pattern Use xmlFreeStreamCtxt to free the context. + + + + + defined(LIBXML_PATTERN_ENABLED) + Test whether the node matches the pattern + + + + + + defined(LIBXML_PATTERN_ENABLED) + Check the maximum depth reachable by a pattern + + + + + defined(LIBXML_PATTERN_ENABLED) + Check the minimum depth reachable by a pattern, 0 mean the / or . are part of the set. + + + + + defined(LIBXML_PATTERN_ENABLED) + Check if the pattern is streamable i.e. xmlPatternGetStreamCtxt() should work. + + + + + defined(LIBXML_PATTERN_ENABLED) + Compile a pattern. + + + + + + + + DEPRECATED: Use the modern options API with XML_PARSE_PEDANTIC. Set and return the previous value for enabling pedantic warnings. + + + + + DEPRECATED: Internal function, don't use. + + + + + Clear the top input callback from the input stack. this includes the compiled-in I/O. + + + + defined(LIBXML_OUTPUT_ENABLED) + Remove the top output callbacks from the output stack. This includes the compiled-in I/O. + + + + Find the closest preceding sibling which is a element. Note that entity references are not expanded. + + + + + Prints the URI in the stream @stream. + + + + + + DEPRECATED: Internal function, don't use. Push an input stream onto the stack. + + + + + + xmlRMutexLock() is used to lock a libxml2 token_r. + + + + + xmlRMutexUnlock() is used to unlock a libxml2 token_r. + + + + + Convenience function to parse an XML document from a zero-terminated string. See xmlCtxtReadDoc for details. + + + + + + + + Parse an XML from a file descriptor and build a tree. See xmlCtxtReadFd for details. NOTE that the file descriptor will not be closed when the context is freed or reset. + + + + + + + + Convenience function to parse an XML file from the filesystem, the network or a global user-define resource loader. See xmlCtxtReadFile for details. + + + + + + + Parse an XML document from I/O functions and context and build a tree. See xmlCtxtReadIO for details. + + + + + + + + + + Parse an XML in-memory document and build a tree. The input buffer must not contain a terminating null byte. See xmlCtxtReadMemory for details. + + + + + + + + + defined(LIBXML_READER_ENABLED) + Create an xmltextReader for an XML in-memory document. The parsing flags @options are a combination of xmlParserOption. + + + + + + + + defined(LIBXML_READER_ENABLED) + Create an xmltextReader for an XML from a file descriptor. The parsing flags @options are a combination of xmlParserOption. NOTE that the file descriptor will not be closed when the reader is closed or reset. + + + + + + + + defined(LIBXML_READER_ENABLED) + parse an XML file from the filesystem or the network. The parsing flags @options are a combination of xmlParserOption. + + + + + + + defined(LIBXML_READER_ENABLED) + Create an xmltextReader for an XML document from I/O functions and source. The parsing flags @options are a combination of xmlParserOption. + + + + + + + + + + defined(LIBXML_READER_ENABLED) + Create an xmltextReader for an XML in-memory document. The parsing flags @options are a combination of xmlParserOption. + + + + + + + + + defined(LIBXML_READER_ENABLED) + Setup an xmltextReader to parse an XML in-memory document. The parsing flags @options are a combination of xmlParserOption. This reuses the existing @reader xmlTextReader. + + + + + + + + + defined(LIBXML_READER_ENABLED) + Setup an xmltextReader to parse an XML from a file descriptor. NOTE that the file descriptor will not be closed when the reader is closed or reset. The parsing flags @options are a combination of xmlParserOption. This reuses the existing @reader xmlTextReader. + + + + + + + + + defined(LIBXML_READER_ENABLED) + parse an XML file from the filesystem or the network. The parsing flags @options are a combination of xmlParserOption. This reuses the existing @reader xmlTextReader. + + + + + + + + defined(LIBXML_READER_ENABLED) + Setup an xmltextReader to parse an XML document from I/O functions and source. The parsing flags @options are a combination of xmlParserOption. This reuses the existing @reader xmlTextReader. + + + + + + + + + + + defined(LIBXML_READER_ENABLED) + Setup an xmltextReader to parse an XML in-memory document. The parsing flags @options are a combination of xmlParserOption. This reuses the existing @reader xmlTextReader. + + + + + + + + + + defined(LIBXML_READER_ENABLED) + Setup an xmltextReader to parse a preparsed XML document. This reuses the existing @reader xmlTextReader. + + + + + + defined(LIBXML_READER_ENABLED) + Create an xmltextReader for a preparsed document. + + + + + Signature for a realloc() implementation. + + + + + + DEPRECATED: don't use + + + + + + + + This function checks that all the namespaces declared within the given tree are properly declared. This is needed for example after Copy or Cut and then paste operations. The subtree may still hold pointers to namespace declarations outside the subtree or invalid/masked. As much as possible the function try to reuse the existing namespaces found in the new environment. If not possible the new namespaces are redeclared on @tree at the top of the given subtree. + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlReadDoc with XML_PARSE_RECOVER. parse an XML in-memory document and build a tree. In the case the document is not Well Formed, a attempt to build a tree is tried anyway + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlReadFile with XML_PARSE_RECOVER. parse an XML file and build a tree. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. In the case the document is not Well Formed, it attempts to build a tree anyway + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlReadMemory with XML_PARSE_RECOVER. parse an XML in-memory block and build a tree. In the case the document is not Well Formed, an attempt to build a tree is tried anyway + + + + + + defined(LIBXML_REGEXP_ENABLED) + Callback function when doing a transition in the automata + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Extract error information from the regexp execution, the parameter @string will be updated with the value pushed and not accepted, the parameter @values must point to an array of @nbval string pointers on return nbval will contain the number of possible strings in that state and the @values array will be updated with them. The string values + + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Extract information from the regexp execution, the parameter @values must point to an array of @nbval string pointers on return nbval will contain the number of possible strings in that state and the @values array will be updated with them. The string values + + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Push one input token in the execution context + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Push one input token in the execution context + + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Free the structures associated to a regular expression evaluation context. + + + + + defined(LIBXML_REGEXP_ENABLED) + Free a regexp + + + + + defined(LIBXML_REGEXP_ENABLED) + Build a context used for progressive evaluation of a regexp. + + + + + + + defined(LIBXML_REGEXP_ENABLED) + Parses a regular expression conforming to XML Schemas Part 2 Datatype Appendix F and builds an automata suitable for testing strings against that regular expression + + + + + defined(LIBXML_REGEXP_ENABLED) + Check if the regular expression generates the value + + + + + + defined(LIBXML_REGEXP_ENABLED) + Check if the regular expression is determinist + + + + + defined(LIBXML_REGEXP_ENABLED) + Print the content of the compiled regular expression + + + + + + DEPRECATED: This function modifies global state and is not thread-safe. Register the char encoding handler. + + + + + Registers the default compiled-in I/O handlers. + + + + defined(LIBXML_OUTPUT_ENABLED) + Registers the default compiled-in I/O handlers. + + + + defined(LIBXML_OUTPUT_ENABLED) && defined(LIBXML_HTTP_ENABLED) + DEPRECATED: Support for HTTP POST has been removed. + + + + DEPRECATED: Use xmlCtxtSetResourceLoader or similar functions. Register a new set of I/O callback for handling parser input. + + + + + + + + DEPRECATED: don't use Registers a callback for node creation + + + + + Signature for the registration callback of a created node + + + + + defined(LIBXML_OUTPUT_ENABLED) + Register a new set of I/O callback for handling output. + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + DEPRECATED: This function will be made private. Call xmlCleanupParser to free global state but see the warnings there. xmlCleanupParser should be only called once at program exit. In most cases, you don't have call cleanup functions at all. Cleanup the default Schemas type library associated to RelaxNG + + + + defined(LIBXML_SCHEMAS_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump a RelaxNG structure back + + + + + + defined(LIBXML_SCHEMAS_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump the transformed RelaxNG tree. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Deallocate a RelaxNG structure. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Free the resources associated to the schema parser context + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Free the resources associated to the schema validation context + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Get the callback information used to handle errors for a validation context + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Get the error and warning callback information + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Initialize the default type libraries. + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML RelaxNGs parser context for that document. Note: since the process of compiling a RelaxNG schemas modifies the document, the @doc parameter is duplicated internally. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML RelaxNGs parse context for that memory buffer expected to contain an XML RelaxNGs file. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML RelaxNGs parse context for that file/resource expected to contain an XML RelaxNGs file. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML RelaxNGs validation context based on the given schema + + + + + defined(LIBXML_SCHEMAS_ENABLED) + parse a schema definition resource and build an internal XML Schema structure which can be used to validate instances. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + DEPRECATED: Use xmlRelaxNGSetParserStructuredErrors. Set the callback functions used to handle errors for a validation context + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Set the callback functions used to handle errors for a parsing context + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Set the callback function used to load external resources. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + DEPRECATED: Use xmlRelaxNGSetValidStructuredErrors. Set the error and warning callback information + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Set the structured error callback + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Validate a document tree in memory. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Validate a full subtree when xmlRelaxNGValidatePushElement() returned 0 and the content of the node has been expanded. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Pop the element end from the RelaxNG validation stack. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + check the CData parsed for validation in the current stack + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Push a new element start on the RelaxNG validation stack. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Signature of an error callback from a Relax-NG validation + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Signature of a warning callback from a Relax-NG validation + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Semi private function used to pass information to a parser context which are a combination of xmlRelaxNGParserFlag . + + + + + + Remove the given attribute from the ID table maintained internally. + + + + + + Unlink and free an attribute including all children. Note this doesn't work for namespace declarations. The attribute must have a non-NULL parent pointer. + + + + + DEPRECATED, do not use. This function will be removed from the public API. Remove the given attribute from the Ref table maintained internally. + + + + + + Unlink the old node. If @cur is provided, it is unlinked and inserted in place of @old. It is an error if @old has no parent. Unlike xmlAddChild, this function doesn't merge text nodes or delete duplicate attributes. See the notes in xmlAddChild. + + + + + + Cleanup the error. + + + + + Cleanup the last global error registered. For parsing error this does not change the well-formedness result. + + + + + + + + + + + + + + An attribute definition has been parsed + + + + + + + + + + + called when a pcdata block has been parsed + + + + + + + receiving some chars from the parser. + + + + + + + A xmlSAX2Comment has been parsed. + + + + + + An element definition has been parsed + + + + + + + + called when the document end has been detected. + + + + + DEPRECATED: Don't call this function directly. called when the end of an element has been detected. Used for HTML and SAX1. + + + + + + SAX2 callback when an element end has been detected by the parser. It provides the namespace information for the element. + + + + + + + + An entity definition has been parsed + + + + + + + + + + Callback on external subset declaration. + + + + + + + + Provide the column number of the current parsing point. + + + + + Get an entity by name + + + + + + Provide the line number of the current parsing point. + + + + + Get a parameter entity by name + + + + + + Provides the public ID e.g. "-//SGMLSOURCE//DTD DEMO//EN" + + + + + Provides the system ID, basically URL or filename e.g. http://www.sgmlsource.com/dtds/memo.dtd + + + + + Does this document has an external subset + + + + + Does this document has an internal subset + + + + + receiving some ignorable whitespaces from the parser. UNUSED: by default the DOM building will use xmlSAX2Characters + + + + + + + Initialize the default XML SAX2 handler + + + + + + defined(LIBXML_HTML_ENABLED) + Initialize the default HTML SAX2 handler + + + + + Callback on internal subset declaration. + + + + + + + + Is this document tagged standalone ? + + + + + What to do when a notation declaration has been parsed. + + + + + + + + A processing instruction has been parsed. + + + + + + + called when an entity xmlSAX2Reference is detected. + + + + + + This is only used to load DTDs. The preferred way to install custom resolvers is xmlCtxtSetResourceLoader. + + + + + + + Receive the document locator at startup, actually xmlDefaultSAXLocator Everything is available on the context, so this is useless in our case. + + + + + + called when the document start being processed. + + + + + DEPRECATED: Don't call this function directly. Called when an opening tag has been processed. Used for HTML and SAX1. + + + + + + + SAX2 callback when an element start has been detected by the parser. It provides the namespace information for the element, as well as the new namespace declarations on the element. + + + + + + + + + + + + + What to do when an unparsed entity declaration is parsed + + + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use parser option XML_PARSE_SAX1. Has no effect. + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Use xmlCtxtParseDtd. Load and parse an external subset. + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlNewSAXParserCtxt and xmlCtxtReadDoc. parse an XML in-memory document and build a tree. It use the given SAX function block to handle the parsing callback. If sax is NULL, fallback to the default DOM tree building routines. + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Don't use. parse an XML external entity out of context and build a tree. It use the given SAX function block to handle the parsing callback. If sax is NULL, fallback to the default DOM tree building routines. [78] extParsedEnt ::= TextDecl? content This correspond to a "Well Balanced" chunk + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlNewSAXParserCtxt and xmlCtxtReadFile. parse an XML file and build a tree. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. It use the given SAX function block to handle the parsing callback. If sax is NULL, fallback to the default DOM tree building routines. + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlNewSAXParserCtxt and xmlCtxtReadFile. parse an XML file and build a tree. Automatic support for ZLIB/Compress compressed document is provided by default if found at compile-time. It use the given SAX function block to handle the parsing callback. If sax is NULL, fallback to the default DOM tree building routines. User data (void *) is stored within the parser context in the context's _private member, so it is available nearly everywhere in libxml + + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlNewSAXParserCtxt and xmlCtxtReadMemory. parse an XML in-memory block and use the given SAX function block to handle the parsing callback. If sax is NULL, fallback to the default DOM tree building routines. + + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlNewSAXParserCtxt and xmlCtxtReadMemory. parse an XML in-memory block and use the given SAX function block to handle the parsing callback. If sax is NULL, fallback to the default DOM tree building routines. User data (void *) is stored within the parser context in the context's _private member, so it is available nearly everywhere in libxml + + + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlNewSAXParserCtxt and xmlCtxtReadFile. parse an XML file and call the given SAX handler routines. Automatic support for ZLIB/Compress compressed document is provided + + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Use xmlNewSAXParserCtxt and xmlCtxtReadMemory. parse an XML in-memory buffer and call the given SAX handler routines. + + + + + + + + Initialize the default XML SAX handler according to the version + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Close a document saving context, i.e. make sure that all bytes have been output and free the associated data. + + + + + defined(LIBXML_OUTPUT_ENABLED) + Save a full document to a saving context TODO: The function is not fully implemented yet as it does not return the byte count but 0 instead + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document to a file. Will use compression if compiled in and enabled. If @filename is "-" the stdout file is used. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document, converting it to the given encoding + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document to an I/O buffer. Warning ! This call xmlOutputBufferClose() on buf which is not available after this call. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Close a document saving context, i.e. make sure that all bytes have been output and free the associated data. Available since 2.13.0. + + + + + defined(LIBXML_OUTPUT_ENABLED) + Flush a document saving context, i.e. make sure that all bytes have been output. + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document to a file. Will use compression if compiled in and enabled. If @filename is "-" the stdout file is used. If @format is set then the document will be indented on output. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document to a file or an URL. + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Dump an XML document to an I/O buffer. Warning ! This call xmlOutputBufferClose() on buf which is not available after this call. + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Don't use. Has no effect. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Don't use. Set a custom escaping function to be used for text in element content + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Sets the indent string. Available since 2.14.0. + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a document saving context serializing to a buffer with the encoding and the options given + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a document saving context serializing to a file descriptor with the encoding and the options given. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a document saving context serializing to a filename or possibly to an URL (but this is less reliable) with the encoding and the options given. + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Create a document saving context serializing to a file descriptor with the encoding and the options given + + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + Save a subtree starting at the node parameter to a saving context TODO: The function is not fully implemented yet as it does not return the byte count but 0 instead + + + + + + Save the URI as an escaped string + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Checks and computes the values of facets. + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + DEPRECATED: This function will be made private. Call xmlCleanupParser to free global state but see the warnings there. xmlCleanupParser should be only called once at program exit. In most cases, you don't have to call cleanup functions at all. Cleanup the default XML Schemas type library + + + + defined(LIBXML_SCHEMAS_ENABLED) + Removes and normalize white spaces in the string + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Compare 2 values + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Compare 2 values + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Copies the precomputed value. This duplicates any string within. + + + + + defined(LIBXML_SCHEMAS_ENABLED) && defined(LIBXML_OUTPUT_ENABLED) + Dump a Schema structure. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Deallocate a Schema structure. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Deallocate a Schema Facet structure. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Free the resources associated to the schema parser context + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Deallocate a Schema Type structure. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Free the resources associated to the schema validation context + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Cleanup the default XML Schemas type library + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Deallocates a wildcard structure. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Lookup function + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Gives you the type struct for a built-in type by its type id. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Get the canonical lexical representation of the value. The caller has to FREE the returned retValue. WARNING: Some value types are not supported yet, resulting in a @retValue of "???". TODO: XML Schema 1.0 does not define canonical representations for: duration, gYearMonth, gYear, gMonthDay, gMonth, gDay, anyURI, QName, NOTATION. This will be fixed in XML Schema 1.1. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Get the canonical representation of the value. The caller has to free the returned @retValue. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Extract the value of a facet + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Get the callback information used to handle errors for a parser context + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Lookup a type in the default XML Schemas type library + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Accessor for the type of a value + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Get the error and warning callback information + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Initialize the default XML Schemas type library + + + + defined(LIBXML_SCHEMAS_ENABLED) + Evaluates if a specific facet can be used in conjunction with a type. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Check if any error was detected during validation. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML Schemas parse context for that document. NB. The document may be modified during the parsing process. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Allocate a new Facet structure. + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML Schemas parse context for that memory buffer expected to contain an XML Schemas file. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Allocate a new NOTATION value. The given values are consumed and freed with the struct. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML Schemas parse context for that file/resource expected to contain an XML Schemas file. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Allocate a new QName value. The given values are consumed and freed with the struct. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Allocate a new simple type value. The type can be of XML_SCHEMAS_STRING. WARNING: This one is intended to be expanded for other string based types. We need this for anySimpleType as well. The given value is consumed and freed with the struct. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Create an XML Schemas validation context based on the given schema. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + parse a schema definition resource and build an internal XML Schema structure which can be used to validate instances. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Plug a SAX based validation layer in a SAX parsing event flow. The original @saxptr and @dataptr data are replaced by new pointers but the calls to the original will be maintained. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Unplug a SAX based validation layer in a SAX parsing event flow. The original pointers used in the call are restored. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + DEPRECATED: Use xmlSchemaSetParserStructuredErrors. Set the callback functions used to handle errors for a validation context + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Set the structured error callback + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Register a callback function that will be called to load documents or external entities. Available since 2.14.0. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + DEPRECATED: Use xmlSchemaSetValidStructuredErrors. Set the error and warning callback information + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Sets the options to be used during the validation. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Set the structured error callback + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Check that a value conforms to the lexical space of the predefined type. if true a value is computed and returned in @val. + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Check that a value conforms to the lexical space of the predefined type. if true a value is computed and returned in @val. This one does apply any normalization to the value. + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Get the validation context options. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + allow access to the parser context of the schema validation context + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Validate a document tree in memory. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Check a value against a facet condition + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Check a value against a facet condition. This takes value normalization according to the specified whitespace types into account. Note that @value needs to be the *normalized* value if the facet is of type "pattern". + + + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Do a schemas validation of the given resource, it will use the SAX streamable validation internally. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Checka a value against a "length", "minLength" and "maxLength" facet; sets @length to the computed length of @value. + + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Checka a value against a "length", "minLength" and "maxLength" facet; sets @length to the computed length of @value. + + + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Checks the value of a list simple type against a facet. + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Validate a branch of a tree, starting with the given @elem. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Check that a value conforms to the lexical space of the predefined type. if true a value is computed and returned in @val. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Workaround to provide file error reporting information when this is not provided by current APIs + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Allows to set a locator function to the validation context, which will be used to provide file and line information since those are not provided as part of the SAX validation flow Setting @f to NULL disable the locator. + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Validate an input based on a flow of SAX event from the parser and forward the events to the @sax handler with the provided @user_data the user provided @sax handler must be a SAX2 one. + + + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Signature of an error callback from an XSD validation + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + A schemas validation locator, a callback called by the validator. This is used when file or node information are not available to find out what file and line number are affected + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Signature of a warning callback from an XSD validation + + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Appends a next sibling to a list of computed values. + + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Accessor for the boolean value of a computed value. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Accessor for the string value of a computed value. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Accessor for the next sibling of a list of computed values. + + + + + defined(LIBXML_SCHEMAS_ENABLED) + Replaces 0xd, 0x9 and 0xa with a space. + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Deallocate a Schematron structure. + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Free the resources associated to the schema parser context + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Free the resources associated to the schema validation context + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Create an XML Schematrons parse context for that document. NB. The document may be modified during the parsing process. + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Create an XML Schematrons parse context for that memory buffer expected to contain an XML Schematrons file. + + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Create an XML Schematrons parse context for that file/resource expected to contain an XML Schematrons file. + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Create an XML Schematrons validation context based on the given schema. + + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + parse a schema definition resource and build an internal XML Schema structure which can be used to validate instances. + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Set the structured error callback + + + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Validate a tree instance against the schematron + + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Signature of an error callback from a Schematron validation + + + + + + + defined(LIBXML_SCHEMATRON_ENABLED) + Signature of a warning callback from a Schematron validation + + + + + + + Search a Ns registered under a given name space for a document. recurse on the parents until it finds the defined namespace or return NULL otherwise. @nameSpace can be NULL, this is a search for the default namespace. We don't allow to cross entities boundaries. If you don't declare the namespace within those you will be in troubles !!! A warning is generated to cover this case. + + + + + + + Search a Ns aliasing a given URI. Recurse on the parents until it finds the defined namespace or return NULL otherwise. + + + + + + + DEPRECATED: Use xmlBufferSetAllocationScheme. Set the buffer allocation method. Types are XML_BUFFER_ALLOC_EXACT - use exact sizes, keeps memory usage down XML_BUFFER_ALLOC_DOUBLEIT - double buffer when extra needed, improves performance + + + + + DEPRECATED: Use xmlSetDocCompressMode set the default compression mode used, ZLIB based Correct values: 0 (uncompressed) to 9 (max compression) + + + + + set the compression ratio for a document, ZLIB based Correct values: 0 (uncompressed) to 9 (max compression) + + + + + + DEPRECATED: This is a global setting and not thread-safe. Use xmlCtxtSetResourceLoader or similar functions. Changes the default external entity resolver function for the application. + + + + + DEPRECATED: See xmlSetStructuredErrorFunc for alternatives. Set the global "generic" handler and context for error messages. The generic error handler will only receive fragments of error messages which should be concatenated or printed to a stream. If handler is NULL, use the built-in default handler which prints to stderr. Since this is a global setting, it's a good idea to reset the error handler to its default value after collecting the errors you're interested in. For multi-threaded applications, this must be set separately for each thread. + + + + + + Associate all subtrees in @list with a new document. Internal function, see xmlSetTreeDoc. + + + + + + Set the namespace of an element or attribute node. Passing a NULL namespace unsets the namespace. + + + + + + Set (or reset) an attribute carried by a node. The ns structure must be in scope, this is not checked + + + + + + + + Set (or reset) an attribute carried by a node. If @name has a prefix, then the corresponding namespace-binding will be used, if in scope; it is an error it there's no such ns-binding for the prefix in scope. + + + + + + + DEPRECATED: Use a per-context error handler. It's recommended to use the per-context error handlers instead: - xmlCtxtSetErrorHandler (since 2.13.0) - xmlTextReaderSetStructuredErrorHandler - xmlXPathSetErrorHandler (since 2.13.0) - xmlXIncludeSetErrorHandler (since 2.13.0) - xmlSchemaSetParserStructuredErrors - xmlSchemaSetValidStructuredErrors - xmlRelaxNGSetParserStructuredErrors - xmlRelaxNGSetValidStructuredErrors Set the global "structured" handler and context for error messages. If handler is NULL, the error handler is deactivated. The structured error handler takes precedence over "generic" handlers, even per-context generic handlers. Since this is a global setting, it's a good idea to deactivate the error handler after collecting the errors you're interested in. For multi-threaded applications, this must be set separately for each thread. + + + + + + This is an internal function which shouldn't be used. It is invoked by functions like xmlAddChild, xmlAddSibling or xmlReplaceNode. @tree must be the root node of an unlinked subtree. Associate all nodes in a tree with a new document. Also copy strings from the old document's dictionary and remove ID attributes from the old ID table. + + + + + + defined(LIBXML_SAX1_ENABLED) + DEPRECATED: Don't use. Setup the parser context to parse a new buffer; Clears any prior contents from the parser context. The buffer parameter must not be NULL, but the filename parameter can be + + + + + + + DEPRECATED: Internal function, do not use. Skip whitespace in the input stream. + + + + + DEPRECATED: Internal function, don't use. This will dump the content of the element content definition Intended just for the debug routine + + + + + + + + DEPRECATED: Don't use. parse an UTF8 encoded XML qualified name string [NS 5] QName ::= (Prefix ':')? LocalPart [NS 6] Prefix ::= NCName [NS 7] LocalPart ::= NCName + + + + + + + DEPRECATED: This function doesn't report malloc failures. parse an XML qualified name string [NS 5] QName ::= (Prefix ':')? LocalPart [NS 6] Prefix ::= NCName [NS 7] LocalPart ::= NCName + + + + + + parse an XML qualified name string,i + + + + + + defined(LIBXML_OUTPUT_ENABLED) + DEPRECATED: Internal function, don't use. Deprecated, unsafe, use xmlSnprintfElementContent + + + + + + + Blocks further parser processing + + + + + Check if both strings are equal of have same content. Should be a bit more readable and faster than xmlStrcmp() + + + + + + Formats @msg and places result into @buf. + + + + + + + + Check if a QName is Equal to a given string + + + + + + + Formats @msg and places result into @buf. + + + + + + + + a strcasecmp for xmlChar's + + + + + + a case-ignoring strstr for xmlChar's + + + + + + a strcat for array of xmlChar's. Since they are supposed to be encoded in UTF-8 or an encoding with 8bit based chars, we assume a termination mark of '0'. + + + + + + a strchr for xmlChar's + + + + + + a strcmp for xmlChar's + + + + + + a strdup for array of xmlChar's. Since they are supposed to be encoded in UTF-8 or an encoding with 8bit based chars, we assume a termination mark of '0'. + + + + + Signature for an strdup() implementation. + + + + + defined(LIBXML_PATTERN_ENABLED) + push one level from the stream. + + + + + defined(LIBXML_PATTERN_ENABLED) + Push new data onto the stream. NOTE: if the call xmlPatterncompile() indicated a dictionary, then strings for name and ns will be expected to come from the dictionary. Both @name and @ns being NULL means the / i.e. the root of the document. This can also act as a reset. Otherwise the function will act as if it has been given an element-node. + + + + + + + defined(LIBXML_PATTERN_ENABLED) + Push new attribute data onto the stream. NOTE: if the call xmlPatterncompile() indicated a dictionary, then strings for name and ns will be expected to come from the dictionary. Both @name and @ns being NULL means the / i.e. the root of the document. This can also act as a reset. Otherwise the function will act as if it has been given an attribute-node. + + + + + + + defined(LIBXML_PATTERN_ENABLED) + Push new data onto the stream. NOTE: if the call xmlPatterncompile() indicated a dictionary, then strings for name and ns will be expected to come from the dictionary. Both @name and @ns being NULL means the / i.e. the root of the document. This can also act as a reset. Different from xmlStreamPush() this function can be fed with nodes of type: element-, attribute-, text-, cdata-section-, comment- and processing-instruction-node. + + + + + + + + defined(LIBXML_PATTERN_ENABLED) + Query if the streaming pattern additionally needs to be fed with text-, cdata-section-, comment- and processing-instruction-nodes. If the result is 0 then only element-nodes and attribute-nodes need to be pushed. + + + + + DEPRECATED: Internal function, do not use. The current char value, if using UTF-8 this may actually span multiple bytes in the input buffer. + + + + + + + DEPRECATED: Internal function, don't use. + + + + + + + + + + DEPRECATED: Use xmlNodeSetContent. Parse an attribute value and build a node list containing only text and entity reference nodes. The resulting nodes will be associated with the document if provided. The document is also used to look up entities. The input is not validated. Syntax errors or references to undeclared entities will be ignored silently with unspecified results. + + + + + + DEPRECATED: Internal function, don't use. + + + + + + + + + + + DEPRECATED: Use xmlNodeSetContentLen. See xmlStringGetNodeList. + + + + + + + length of a xmlChar's string + + + + + a strncasecmp for xmlChar's + + + + + + + a strncat for array of xmlChar's, it will extend @cur with the len first bytes of @add. Note that if @len < 0 then this is an API error and NULL will be returned. + + + + + + + same as xmlStrncat, but creates a new string. The original two strings are not freed. If @len is < 0 then the length will be calculated automatically. + + + + + + + a strncmp for xmlChar's + + + + + + + a strndup for array of xmlChar's + + + + + + a strstr for xmlChar's + + + + + + Extract a substring of a given string + + + + + + + Signature of the function to use when there is an error and the module handles the new error reporting mechanism. + + + + + + DEPRECATED: Use the modern options API with XML_PARSE_NOENT. Set and return the previous value for default entity support. Initially the parser always keep entity references instead of substituting entity values in the output. This function has to be used to change the default parser behavior SAX::substituteEntities() has to be used for changing that on a file by file basis. + + + + + Use encoding specified by enum to decode input data. This overrides the encoding found in the XML declaration. This function can also be used to override the encoding of chunks passed to xmlParseChunk. + + + + + + Use specified encoding to decode input data. This overrides the encoding found in the XML declaration. This function can also be used to override the encoding of chunks passed to xmlParseChunk. Available since 2.13.0. + + + + + + DEPRECATED: Internal function, don't use. Use encoding handler to decode input data. + + + + + + + Use encoding handler to decode input data. This function can be used to enforce the encoding of chunks passed to xmlParseChunk. + + + + + + Concat the given string at the end of the existing node content. If @len is -1, the string length will be calculated. + + + + + + + Merge the second text node into the first. The second node is unlinked and freed. + + + + + + defined(LIBXML_READER_ENABLED) + Provides the number of attributes of the current node + + + + + defined(LIBXML_READER_ENABLED) + The base URI of the node. + + + + + defined(LIBXML_READER_ENABLED) + DEPRECATED: The returned value is mostly random and useless. It reflects the parser reading ahead and is in no way related to the current node. This function provides the current index of the parser used by the reader, relative to the start of the current entity. This function actually just wraps a call to xmlBytesConsumed() for the parser context associated with the reader. See xmlBytesConsumed() for more information. + + + + + defined(LIBXML_READER_ENABLED) + This method releases any resources allocated by the current instance changes the state to Closed and close any underlying input. + + + + + defined(LIBXML_READER_ENABLED) + The base URI of the node. + + + + + defined(LIBXML_READER_ENABLED) + Determine the encoding of the document being read. + + + + + defined(LIBXML_READER_ENABLED) + The local name of the node. + + + + + defined(LIBXML_READER_ENABLED) + The qualified name of the node, equal to Prefix :LocalName. + + + + + defined(LIBXML_READER_ENABLED) + The URI defining the namespace associated with the node. + + + + + defined(LIBXML_READER_ENABLED) + A shorthand reference to the namespace associated with the node. + + + + + defined(LIBXML_READER_ENABLED) + Get an interned string from the reader, allows for example to speedup string name comparisons + + + + + + defined(LIBXML_READER_ENABLED) + Provides the text value of the node if present + + + + + defined(LIBXML_READER_ENABLED) + The xml:lang scope within which the node resides. + + + + + defined(LIBXML_READER_ENABLED) + Determine the XML version of the document being read. + + + + + defined(LIBXML_READER_ENABLED) + Hacking interface allowing to get the xmlDocPtr corresponding to the current document being accessed by the xmlTextReader. NOTE: as a result of this call, the reader will not destroy the associated XML document and calling xmlFreeDoc() on the result is needed once the reader parsing has finished. + + + + + defined(LIBXML_READER_ENABLED) + Hacking interface allowing to get the xmlNodePtr corresponding to the current node being accessed by the xmlTextReader. This is dangerous because the underlying node may be destroyed on the next Reads. + + + + + defined(LIBXML_READER_ENABLED) + The depth of the node in the tree. + + + + + defined(LIBXML_READER_ENABLED) + Signature of an error callback from a reader parser + + + + + + + + defined(LIBXML_READER_ENABLED) + Reads the contents of the current node and the full subtree. It then makes the subtree available until the next xmlTextReaderRead() call + + + + + defined(LIBXML_READER_ENABLED) + Provides the value of the attribute with the specified qualified name. + + + + + + defined(LIBXML_READER_ENABLED) + Provides the value of the attribute with the specified index relative to the containing element. + + + + + + defined(LIBXML_READER_ENABLED) + Provides the value of the specified attribute + + + + + + + defined(LIBXML_READER_ENABLED) + Retrieve the error callback function and user argument. + + + + + + + defined(LIBXML_READER_ENABLED) + Available since 2.13.0. + + + + + defined(LIBXML_READER_ENABLED) + Provide the column number of the current parsing point. + + + + + defined(LIBXML_READER_ENABLED) + Provide the line number of the current parsing point. + + + + + defined(LIBXML_READER_ENABLED) + Read the parser internal property. + + + + + + defined(LIBXML_READER_ENABLED) + Method to get the remainder of the buffered XML. this method stops the parser, set its state to End Of File and return the input stream with what is left that the parser did not use. The implementation is not good, the parser certainly progressed past what's left in reader->input, and there is an allocation problem. Best would be to rewrite it differently. + + + + + defined(LIBXML_READER_ENABLED) + Whether the node has attributes. + + + + + defined(LIBXML_READER_ENABLED) + Whether the node can have a text value. + + + + + defined(LIBXML_READER_ENABLED) + Whether an Attribute node was generated from the default value defined in the DTD or schema. + + + + + defined(LIBXML_READER_ENABLED) + Check if the current node is empty + + + + + defined(LIBXML_READER_ENABLED) + Determine whether the current node is a namespace declaration rather than a regular attribute. + + + + + defined(LIBXML_READER_ENABLED) + Retrieve the validity status from the parser context + + + + + defined(LIBXML_READER_ENABLED) + The local name of the node. + + + + + defined(LIBXML_READER_ENABLED) + Obtain the base URI for the given locator. + + + + + defined(LIBXML_READER_ENABLED) + Obtain the line number for the given locator. + + + + + defined(LIBXML_READER_ENABLED) + Resolves a namespace prefix in the scope of the current element. + + + + + + defined(LIBXML_READER_ENABLED) + Moves the position of the current instance to the attribute with the specified qualified name. + + + + + + defined(LIBXML_READER_ENABLED) + Moves the position of the current instance to the attribute with the specified index relative to the containing element. + + + + + + defined(LIBXML_READER_ENABLED) + Moves the position of the current instance to the attribute with the specified local name and namespace URI. + + + + + + + defined(LIBXML_READER_ENABLED) + Moves the position of the current instance to the node that contains the current Attribute node. + + + + + defined(LIBXML_READER_ENABLED) + Moves the position of the current instance to the first attribute associated with the current node. + + + + + defined(LIBXML_READER_ENABLED) + Moves the position of the current instance to the next attribute associated with the current node. + + + + + defined(LIBXML_READER_ENABLED) + The qualified name of the node, equal to Prefix :LocalName. + + + + + defined(LIBXML_READER_ENABLED) + The URI defining the namespace associated with the node. + + + + + defined(LIBXML_READER_ENABLED) + Skip to the node following the current one in document order while avoiding the subtree if any. + + + + + defined(LIBXML_READER_ENABLED) + Skip to the node following the current one in document order while avoiding the subtree if any. Currently implemented only for Readers built on a document + + + + + defined(LIBXML_READER_ENABLED) + Get the node type of the current node Reference: http://www.gnu.org/software/dotgnu/pnetlib-doc/System/Xml/XmlNodeType.html + + + + + defined(LIBXML_READER_ENABLED) + The value indicating whether to normalize white space and attribute values. Since attribute value and end of line normalizations are a MUST in the XML specification only the value true is accepted. The broken behaviour of accepting out of range character entities like &#0; is of course not supported either. + + + + + defined(LIBXML_READER_ENABLED) + A shorthand reference to the namespace associated with the node. + + + + + defined(LIBXML_READER_ENABLED) + This tells the XML Reader to preserve the current node. The caller must also use xmlTextReaderCurrentDoc() to keep an handle on the resulting document once parsing has finished + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_PATTERN_ENABLED) + This tells the XML Reader to preserve all nodes matched by the pattern. The caller must also use xmlTextReaderCurrentDoc() to keep an handle on the resulting document once parsing has finished + + + + + + + defined(LIBXML_READER_ENABLED) + The quotation mark character used to enclose the value of an attribute. + + + + + defined(LIBXML_READER_ENABLED) + Moves the position of the current instance to the next node in the stream, exposing its properties. + + + + + defined(LIBXML_READER_ENABLED) + Parses an attribute value into one or more Text and EntityReference nodes. + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_WRITER_ENABLED) + Reads the contents of the current node, including child nodes and markup. + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_WRITER_ENABLED) + Reads the contents of the current node, including child nodes and markup. + + + + + defined(LIBXML_READER_ENABLED) + Gets the read state of the reader. + + + + + defined(LIBXML_READER_ENABLED) + Reads the contents of an element or a text node as a string. + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_SCHEMAS_ENABLED) + Use RelaxNG to validate the document as it is processed. Activation is only possible before the first Read(). if @schema is NULL, then RelaxNG validation is deactivated. @ The @schema should not be freed until the reader is deallocated or its use has been deactivated. + + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_SCHEMAS_ENABLED) + Use RelaxNG schema to validate the document as it is processed. Activation is only possible before the first Read(). If @rng is NULL, then RelaxNG schema validation is deactivated. + + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_SCHEMAS_ENABLED) + Use RelaxNG schema context to validate the document as it is processed. Activation is only possible before the first Read(). If @ctxt is NULL, then RelaxNG schema validation is deactivated. + + + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_SCHEMAS_ENABLED) + Use W3C XSD schema to validate the document as it is processed. Activation is only possible before the first Read(). If @xsd is NULL, then XML Schema validation is deactivated. + + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_SCHEMAS_ENABLED) + Use W3C XSD schema context to validate the document as it is processed. Activation is only possible before the first Read(). If @ctxt is NULL, then XML Schema validation is deactivated. + + + + + + + defined(LIBXML_READER_ENABLED) + DEPRECATED: Use xmlTextReaderSetStructuredErrorHandler. Register a callback function that will be called on error and warnings. If @f is NULL, the default error and warning handlers are restored. + + + + + + + defined(LIBXML_READER_ENABLED) + Set the maximum amplification factor. See xmlCtxtSetMaxAmplification. + + + + + + defined(LIBXML_READER_ENABLED) + Change the parser processing behaviour by changing some of its internal properties. Note that some properties can only be changed before any read has been done. + + + + + + + defined(LIBXML_READER_ENABLED) + Register a callback function that will be called to load external resources like entities. Available since 2.14.0. + + + + + + + defined(LIBXML_READER_ENABLED) && defined(LIBXML_SCHEMAS_ENABLED) + Use XSD Schema to validate the document as it is processed. Activation is only possible before the first Read(). if @schema is NULL, then Schema validation is deactivated. The @schema should not be freed until the reader is deallocated or its use has been deactivated. + + + + + + defined(LIBXML_READER_ENABLED) + Register a callback function that will be called on error and warnings. If @f is NULL, the default error and warning handlers are restored. + + + + + + + defined(LIBXML_READER_ENABLED) + Setup an XML reader with new options + + + + + + + + + defined(LIBXML_READER_ENABLED) + Determine the standalone status of the document being read. + + + + + defined(LIBXML_READER_ENABLED) + Provides the text value of the node if present + + + + + defined(LIBXML_READER_ENABLED) + The xml:lang scope within which the node resides. + + + + + defined(LIBXML_WRITER_ENABLED) + Flushes and closes the output buffer. Available since 2.13.0. + + + + + defined(LIBXML_WRITER_ENABLED) + End the current xml element. + + + + + defined(LIBXML_WRITER_ENABLED) + End an xml CDATA section. + + + + + defined(LIBXML_WRITER_ENABLED) + End the current xml comment. + + + + + defined(LIBXML_WRITER_ENABLED) + End an xml DTD. + + + + + defined(LIBXML_WRITER_ENABLED) + End an xml DTD attribute list. + + + + + defined(LIBXML_WRITER_ENABLED) + End an xml DTD element. + + + + + defined(LIBXML_WRITER_ENABLED) + End an xml DTD entity. + + + + + defined(LIBXML_WRITER_ENABLED) + End an xml document. All open elements are closed, and the content is flushed to the output. + + + + + defined(LIBXML_WRITER_ENABLED) + End the current xml element. + + + + + defined(LIBXML_WRITER_ENABLED) + End the current xml PI. + + + + + defined(LIBXML_WRITER_ENABLED) + Flush the output buffer. + + + + + defined(LIBXML_WRITER_ENABLED) + End the current xml element. Writes an end tag even if the element is empty + + + + + defined(LIBXML_WRITER_ENABLED) + Set indentation output. indent = 0 do not indentation. indent > 0 do indentation. + + + + + + defined(LIBXML_WRITER_ENABLED) + Set string indentation. + + + + + + defined(LIBXML_WRITER_ENABLED) + Set the character used for quoting attributes. + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml attribute. + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml attribute with namespace support. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml CDATA section. + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml comment. + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml DTD. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml DTD ATTLIST. + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml DTD element. + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml DTD ATTLIST. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Start a new xml document + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml element. + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml element with namespace support. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Start an xml PI. + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml attribute. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml attribute. + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an base64 encoded xml text. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a BinHex encoded xml text. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml CDATA. + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml comment. + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD. + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD ATTLIST. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD element. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD entity. + + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD external entity. The entity must have been started with xmlTextWriterStartDTDEntity + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write the contents of a DTD external entity. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD internal entity. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD entity. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml element. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml element with namespace support. + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml attribute. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml attribute.with namespace support + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml CDATA. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml comment. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD with a formatted markup declarations part. + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted DTD ATTLIST. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted DTD element. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted DTD internal entity. + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml element. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml element with namespace support. + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted PI. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted raw xml text. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml text. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml PI. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a raw xml text. + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml text. TODO: what about entities and special chars?? + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml text. + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml attribute. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml attribute.with namespace support + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml CDATA. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write an xml comment. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a DTD with a formatted markup declarations part. + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted DTD ATTLIST. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted DTD element. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted DTD internal entity. + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml element. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml element with namespace support. + + + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml PI. + + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted raw xml text. + + + + + + + defined(LIBXML_WRITER_ENABLED) + Write a formatted xml text. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + + + + + + + + + + + + + + + + + + + + + + + defined(LIBXML_OUTPUT_ENABLED) + + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of AegeanNumbers UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of AlphabeticPresentationForms UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Arabic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of ArabicPresentationForms-A UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of ArabicPresentationForms-B UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Armenian UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Arrows UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of BasicLatin UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Bengali UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of the UCS Block + + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of BlockElements UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Bopomofo UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of BopomofoExtended UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of BoxDrawing UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of BraillePatterns UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Buhid UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of ByzantineMusicalSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKCompatibility UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKCompatibilityForms UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKCompatibilityIdeographs UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKCompatibilityIdeographsSupplement UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKRadicalsSupplement UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKSymbolsandPunctuation UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKUnifiedIdeographs UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKUnifiedIdeographsExtensionA UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CJKUnifiedIdeographsExtensionB UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of the UCS Category + + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of C UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Cc UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Cf UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Co UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Cs UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of L UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Ll UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Lm UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Lo UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Lt UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Lu UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of M UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Mc UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Me UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Mn UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of N UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Nd UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Nl UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of No UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of P UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Pc UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Pd UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Pe UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Pf UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Pi UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Po UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Ps UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of S UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Sc UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Sk UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Sm UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of So UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Z UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Zl UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Zp UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Zs UCS Category + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Cherokee UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CombiningDiacriticalMarks UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CombiningDiacriticalMarksforSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CombiningHalfMarks UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CombiningMarksforSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of ControlPictures UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CurrencySymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CypriotSyllabary UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Cyrillic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of CyrillicSupplement UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Deseret UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Devanagari UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Dingbats UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of EnclosedAlphanumerics UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of EnclosedCJKLettersandMonths UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Ethiopic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of GeneralPunctuation UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of GeometricShapes UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Georgian UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Gothic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Greek UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of GreekExtended UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of GreekandCoptic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Gujarati UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Gurmukhi UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of HalfwidthandFullwidthForms UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of HangulCompatibilityJamo UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of HangulJamo UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of HangulSyllables UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Hanunoo UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Hebrew UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of HighPrivateUseSurrogates UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of HighSurrogates UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Hiragana UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of IPAExtensions UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of IdeographicDescriptionCharacters UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Kanbun UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of KangxiRadicals UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Kannada UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Katakana UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of KatakanaPhoneticExtensions UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Khmer UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of KhmerSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Lao UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Latin-1Supplement UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of LatinExtended-A UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of LatinExtendedAdditional UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of LatinExtended-B UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of LetterlikeSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Limbu UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of LinearBIdeograms UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of LinearBSyllabary UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of LowSurrogates UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Malayalam UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MathematicalAlphanumericSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MathematicalOperators UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MiscellaneousMathematicalSymbols-A UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MiscellaneousMathematicalSymbols-B UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MiscellaneousSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MiscellaneousSymbolsandArrows UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MiscellaneousTechnical UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Mongolian UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of MusicalSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Myanmar UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of NumberForms UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Ogham UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of OldItalic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of OpticalCharacterRecognition UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Oriya UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Osmanya UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of PhoneticExtensions UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of PrivateUse UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of PrivateUseArea UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Runic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Shavian UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Sinhala UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SmallFormVariants UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SpacingModifierLetters UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Specials UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SuperscriptsandSubscripts UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SupplementalArrows-A UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SupplementalArrows-B UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SupplementalMathematicalOperators UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SupplementaryPrivateUseArea-A UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of SupplementaryPrivateUseArea-B UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Syriac UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Tagalog UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Tagbanwa UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Tags UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of TaiLe UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of TaiXuanJingSymbols UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Tamil UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Telugu UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Thaana UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Thai UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Tibetan UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of Ugaritic UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of UnifiedCanadianAboriginalSyllabics UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of VariationSelectors UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of VariationSelectorsSupplement UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of YiRadicals UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of YiSyllables UCS Block + + + + + defined(LIBXML_UNICODE_ENABLED) + Check whether the character is part of YijingHexagramSymbols UCS Block + + + + + Escaping routine, does not do validity checks ! It will try to escape the chars needing this, but this is heuristic based it's impossible to be sure. + + + + + This routine escapes a string to hex, ignoring unreserved characters a-z, A-Z, 0-9, "-._~", a few sub-delims "!*'()", the gen-delim "@" (why?) and the characters in the exception list. + + + + + + Unescaping routine, but does not check that the string is an URI. The output is a direct unsigned char translation of %XX values (no encoding) Note that the length of the result can only be smaller or same size as the input string. + + + + + + + compares the two UCS4 values + + + + + + calculates the internal size of a UTF8 character + + + + + compute the length of an UTF8 string, it doesn't do a full UTF8 checking of the content of the string. + + + + + a function to provide the relative location of a UTF8 char + + + + + + a strndup for array of UTF8's + + + + + + a function to provide the equivalent of fetching a character from a string array + + + + + + storage size of an UTF8 string the behaviour is not guaranteed if the input string is not UTF-8 + + + + + + Create a substring from a given UTF-8 string Note: positions are given in units of UTF-8 chars + + + + + + + Unlink a node from its tree. The node is not freed. Unless it is reinserted, it must be managed manually and freed eventually by calling xmlFreeNode. + + + + + xmlUnlockLibrary() is used to release a re-entrant lock on the libxml2 library. + + + + Remove an attribute carried by a node. + + + + + + + Remove an attribute carried by a node. This handles only attributes in no namespace. + + + + + + defined(LIBXML_VALID_ENABLED) && defined(LIBXML_REGEXP_ENABLED) + DEPRECATED: Internal function, don't use. (Re)Build the automata associated to the content model of this element + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Does the validation related extra step of the normalization of attribute values: If the declared value is not CDATA, then the XML processor must further process the normalized attribute value by discarding any leading and trailing space (#x20) characters, and by replacing sequences of space (#x20) characters by single space (#x20) character. Also check VC: Standalone Document Declaration in P32, and update ctxt->valid accordingly + + + + + + + + + defined(LIBXML_VALID_ENABLED) + Build/extend a list of potential children allowed by the content tree + + + + + + + + defined(LIBXML_VALID_ENABLED) + This function returns the list of authorized children to insert within an existing tree while respecting the validity constraints forced by the Dtd. The insertion point is defined using @prev and @next in the following ways: to insert before 'node': xmlValidGetValidElements(node->prev, node, ... to insert next 'node': xmlValidGetValidElements(node, node->next, ... to replace 'node': xmlValidGetValidElements(node->prev, node->next, ... to prepend a child to 'node': xmlValidGetValidElements(NULL, node->childs, to append a child to 'node': xmlValidGetValidElements(node->last, NULL, ... pointers to the element names are inserted at the beginning of the array and do not need to be freed. + + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Does the validation related extra step of the normalization of attribute values: If the declared value is not CDATA, then the XML processor must further process the normalized attribute value by discarding any leading and trailing space (#x20) characters, and by replacing sequences of space (#x20) characters by single space (#x20) character. + + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Try to validate a single attribute definition basically it does the following checks as described by the XML-1.0 recommendation: - [ VC: Attribute Default Legal ] - [ VC: Enumeration ] - [ VC: ID Attribute Default ] The ID/IDREF uniqueness and matching are done separately + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Validate that the given attribute value match the proper production [ VC: ID ] Values of type ID must match the Name production.... [ VC: IDREF ] Values of type IDREF must match the Name production, and values of type IDREFS must match Names ... [ VC: Entity Name ] Values of type ENTITY must match the Name production, values of type ENTITIES must match Names ... [ VC: Name Token ] Values of type NMTOKEN must match the Nmtoken production; values of type NMTOKENS must match Nmtokens. + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: This function can't report malloc or other failures. Use xmlCtxtValidateDocument. Try to validate the document instance basically it does the all the checks described by the XML Rec i.e. validates the internal and external subset (if present) and validate the document tree. + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Does the final step for the document validation once all the incremental validation steps have been completed basically it does the following checks described by the XML Rec Check all the IDREF/IDREFS attributes definition for validity + + + + + + defined(LIBXML_VALID_ENABLED) + Try to validate the document against the dtd instance Basically it does check all the definitions in the DtD. Note the the internal subset (if present) is de-coupled (i.e. not used), which could give problems if ID or IDREF is present. + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Does the final step for the dtds validation once all the subsets have been parsed basically it does the following checks described by the XML Rec - check that ENTITY and ENTITIES type attributes default or possible values matches one of the defined entities. - check that NOTATION type attributes default or possible values matches one of the defined notations. + + + + + + defined(LIBXML_VALID_ENABLED) + Try to validate the subtree under an element + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Try to validate a single element definition basically it does the following checks as described by the XML-1.0 recommendation: - [ VC: One ID per Element Type ] - [ VC: No Duplicate Types ] - [ VC: Unique Element Type Declaration ] + + + + + + + Check that a value conforms to the lexical space of NCName + + + + + + Check that a value conforms to the lexical space of NMToken + + + + + + Check that a value conforms to the lexical space of Name + + + + + + defined(LIBXML_VALID_ENABLED) + Validate that the given value match Name production + + + + + defined(LIBXML_VALID_ENABLED) + Validate that the given value match Names production + + + + + defined(LIBXML_VALID_ENABLED) + Validate that the given value match Nmtoken production [ VC: Name Token ] + + + + + defined(LIBXML_VALID_ENABLED) + Validate that the given value match Nmtokens production [ VC: Name Token ] + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Try to validate a single notation definition basically it does the following checks as described by the XML-1.0 recommendation: - it seems that no validity constraint exists on notation declarations But this function get called anyway ... + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Validate that the given name match a notation declaration. - [ VC: Notation Declared ] + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Try to validate a single attribute for an element basically it does the following checks as described by the XML-1.0 recommendation: - [ VC: Attribute Value Type ] - [ VC: Fixed Attribute Default ] - [ VC: Entity Name ] - [ VC: Name Token ] - [ VC: ID ] - [ VC: IDREF ] - [ VC: Entity Name ] - [ VC: Notation Attributes ] The ID/IDREF uniqueness and matching are done separately + + + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Try to validate a single element and it's attributes, basically it does the following checks as described by the XML-1.0 recommendation: - [ VC: Element Valid ] - [ VC: Required Attribute ] Then call xmlValidateOneAttribute() for each attribute present. The ID/IDREF checkings are done separately + + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Try to validate a single namespace declaration for an element basically it does the following checks as described by the XML-1.0 recommendation: - [ VC: Attribute Value Type ] - [ VC: Fixed Attribute Default ] - [ VC: Entity Name ] - [ VC: Name Token ] - [ VC: ID ] - [ VC: IDREF ] - [ VC: Entity Name ] - [ VC: Notation Attributes ] The ID/IDREF uniqueness and matching are done separately + + + + + + + + + + defined(LIBXML_VALID_ENABLED) && defined(LIBXML_REGEXP_ENABLED) + DEPRECATED: Internal function, don't use. Pop the element end from the validation stack. + + + + + + + + defined(LIBXML_VALID_ENABLED) && defined(LIBXML_REGEXP_ENABLED) + DEPRECATED: Internal function, don't use. check the CData parsed for validation in the current stack + + + + + + + defined(LIBXML_VALID_ENABLED) && defined(LIBXML_REGEXP_ENABLED) + DEPRECATED: Internal function, don't use. Push a new element start on the validation stack. + + + + + + + + Check that a value conforms to the lexical space of QName + + + + + + defined(LIBXML_VALID_ENABLED) + DEPRECATED: Internal function, don't use. Try to validate a the root element basically it does the following check as described by the XML-1.0 recommendation: - [ VC: Root Element Type ] it doesn't try to recurse or apply other check to the element + + + + + + Callback called when a validity error is found. This is a message oriented function similar to an *printf function. + + + + + + + Callback called when a validity warning is found. This is a message oriented function similar to an *printf function. + + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Free an XInclude context + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Available since 2.13.0. + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Creates a new XInclude context + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Implement the XInclude substitution on the XML document @doc + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Implement the XInclude substitution on the XML document @doc + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Implement the XInclude substitution on the XML document @doc + + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Implement the XInclude substitution for the given subtree reusing the information and data coming from the given context. + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Implement the XInclude substitution for the given subtree + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Implement the XInclude substitution for the given subtree + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Implement the XInclude substitution on the XML node @tree + + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Register a callback function that will be called on errors and warnings. If handler is NULL, the error handler will be deactivated. Available since 2.13.0. + + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Set the flags used for further processing of XML resources. + + + + + + defined(LIBXML_XINCLUDE_ENABLED) + Register a callback function that will be called to load included documents. Available since 2.14.0. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the add operation on XPath objects: The numeric operators convert their operands to numbers as if by calling the number function. + + + + + defined(LIBXML_XPATH_ENABLED) + An axis traversal function. To traverse an axis, the engine calls the first time with cur == NULL and repeat until the function returns NULL indicating the end of the axis traversal. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the boolean() XPath function boolean boolean(object) The boolean function converts its argument to a boolean as follows: - a number is true if and only if it is neither positive or negative zero nor NaN - a node-set is true if and only if it is non-empty - a string is true if and only if its length is non-zero + + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a boolean to its number value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a boolean to its string value. + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a node-set to its boolean value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a node-set to its number value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a node-set to its string value. + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a node to its number value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a node to its string value. + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a number to its boolean value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a number to its string value. + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a string to its boolean value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts a string to its number value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts an XPath object to its boolean value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts an XPath object to its number value + + + + + defined(LIBXML_XPATH_ENABLED) + Converts an existing object to its string() equivalent + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the ceiling() XPath function number ceiling(number) The ceiling function returns the smallest (closest to negative infinity) number that is not less than the argument and that is an integer. + + + + + + defined(LIBXML_XPATH_ENABLED) + Compare two nodes w.r.t document order + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the compare operation on XPath objects: @arg1 < @arg2 (1, 1, ... @arg1 <= @arg2 (1, 0, ... @arg1 > @arg2 (0, 1, ... @arg1 >= @arg2 (0, 0, ... When neither object to be compared is a node-set and the operator is <=, <, >=, >, then the objects are compared by converted both objects to numbers and comparing the numbers according to IEEE 754. The < comparison will be true if and only if the first number is less than the second number. The <= comparison will be true if and only if the first number is less than or equal to the second number. The > comparison will be true if and only if the first number is greater than the second number. The >= comparison will be true if and only if the first number is greater than or equal to the second number. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Compile an XPath expression + + + + + defined(LIBXML_XPATH_ENABLED) + Evaluate the Precompiled XPath expression in the given context. + + + + + + defined(LIBXML_XPATH_ENABLED) + Applies the XPath boolean() function on the result of the given compiled expression. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the concat() XPath function string concat(string, string, string*) The concat function returns the concatenation of its arguments. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the contains() XPath function boolean contains(string, string) The contains function returns true if the first argument string contains the second argument string, and otherwise returns false. + + + + + + defined(LIBXML_XPATH_ENABLED) + Creates/frees an object cache on the XPath context. If activates XPath objects (xmlXPathObject) will be cached internally to be reused. @options: 0: This will set the XPath object caching: @value: This will set the maximum number of XPath objects to be cached per slot There are two slots for node-set and misc objects. Use <0 for the default number (100). Other values for @options have currently no effect. + + + + + + + + defined(LIBXML_XPATH_ENABLED) + Converts an existing object to its boolean() equivalent + + + + + defined(LIBXML_XPATH_ENABLED) + A conversion function is associated to a type and used to cast the new type to primitive values. + + + + + + defined(LIBXML_XPATH_ENABLED) + Converts an existing object to its number() equivalent + + + + + defined(LIBXML_XPATH_ENABLED) + Converts an existing object to its string() equivalent + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the count() XPath function number count(node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + Compile an XPath expression + + + + + + defined(LIBXML_XPATH_ENABLED) && defined(LIBXML_DEBUG_ENABLED) + Dumps the tree of the compiled XPath expression. + + + + + + + defined(LIBXML_XPATH_ENABLED) && defined(LIBXML_DEBUG_ENABLED) + Dump the content of the object for debugging purposes + + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets difference() function: node-set set:difference (node-set, node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets distinct() function: node-set set:distinct (node-set) @nodes is sorted by document order, then #exslSetsDistinctSorted is called with the sorted node-set + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets distinct() function: node-set set:distinct (node-set) + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the div operation on XPath objects @arg1 / @arg2: The numeric operators convert their operands to numbers as if by calling the number function. + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the equal operation on XPath objects content: @arg1 == @arg2 + + + + + defined(LIBXML_XPATH_ENABLED) + Handle an XPath error + + + + + + defined(LIBXML_XPATH_ENABLED) + Evaluate the XPath Location Path in the given context. + + + + + + defined(LIBXML_XPATH_ENABLED) + DEPRECATED: Internal function, don't use. Parse and evaluate an XPath expression in the given context, then push the result on the context stack + + + + + defined(LIBXML_XPATH_ENABLED) + Alias for xmlXPathEval(). + + + + + + defined(LIBXML_XPATH_ENABLED) + An XPath evaluation function, the parameters are on the XPath context stack. + + + + + + defined(LIBXML_XPATH_ENABLED) + Evaluate a predicate result for the current node. A PredicateExpr is evaluated by evaluating the Expr and converting the result to a boolean. If the result is a number, the result will be converted to true if the number is equal to the position of the context node in the context node list (as returned by the position function) and will be converted to false otherwise; if the result is not a number, then the result will be converted as if by a call to the boolean function. + + + + + + defined(LIBXML_XPATH_ENABLED) + Evaluate a predicate result for the current node. A PredicateExpr is evaluated by evaluating the Expr and converting the result to a boolean. If the result is a number, the result will be converted to true if the number is equal to the position of the context node in the context node list (as returned by the position function) and will be converted to false otherwise; if the result is not a number, then the result will be converted as if by a call to the boolean function. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the false() XPath function boolean false() + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the floor() XPath function number floor(number) The floor function returns the largest (closest to positive infinity) number that is not greater than the argument and that is an integer. + + + + + + defined(LIBXML_XPATH_ENABLED) + Free up the memory allocated by @comp + + + + + defined(LIBXML_XPATH_ENABLED) + Free up an xmlXPathContext + + + + + defined(LIBXML_XPATH_ENABLED) + Free the NodeSet compound (not the actual nodes !). + + + + + defined(LIBXML_XPATH_ENABLED) + Free up the xmlXPathObjectPtr @obj but don't deallocate the objects in the list contrary to xmlXPathFreeObject(). + + + + + defined(LIBXML_XPATH_ENABLED) + Free up an xmlXPathObjectPtr object. + + + + + defined(LIBXML_XPATH_ENABLED) + Free up an xmlXPathParserContext + + + + + defined(LIBXML_XPATH_ENABLED) + Prototype for callbacks used to plug function lookup in the XPath engine. + + + + + + + defined(LIBXML_XPATH_ENABLED) + An XPath function. The arguments (if any) are popped out from the context stack and the result is pushed on the stack. + + + + + + defined(LIBXML_XPATH_ENABLED) + Search in the Function array of the context for the given function. + + + + + + defined(LIBXML_XPATH_ENABLED) + Search in the Function array of the context for the given function. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets has-same-nodes function: boolean set:has-same-node(node-set, node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the id() XPath function node-set id(object) The id function selects elements by their unique ID (see [5.2.1 Unique IDs]). When the argument to id is of type node-set, then the result is the union of the result of applying id to the string value of each of the nodes in the argument node-set. When the argument to id is of any other type, the argument is converted to a string as if by a call to the string function; the string is split into a whitespace-separated list of tokens (whitespace is any sequence of characters matching the production S); the result is a node-set containing the elements in the same document as the context node that have a unique ID equal to any of the tokens in the list. + + + + + + defined(LIBXML_XPATH_ENABLED) + DEPRECATED: Alias for xmlInitParser. + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets intersection() function: node-set set:intersection (node-set, node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + Checks whether a double is an infinity. + + + + + defined(LIBXML_XPATH_ENABLED) + Checks whether a double is a NaN. + + + + + defined(LIBXML_XPATH_ENABLED) + Is the name given a NodeType one. [38] NodeType ::= 'comment' | 'text' | 'processing-instruction' | 'node' + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the lang() XPath function boolean lang(string) The lang function returns true or false depending on whether the language of the context node as specified by xml:lang attributes is the same as or is a sublanguage of the language specified by the argument string. The language of the context node is determined by the value of the xml:lang attribute on the context node, or, if the context node has no xml:lang attribute, by the value of the xml:lang attribute on the nearest ancestor of the context node that has an xml:lang attribute. If there is no such attribute, then lang + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the last() XPath function number last() The last function returns the number of nodes in the context node list. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets leading() function: node-set set:leading (node-set, node-set) @nodes1 and @nodes2 are sorted by document order, then #exslSetsLeadingSorted is called. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets leading() function: node-set set:leading (node-set, node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the local-name() XPath function string local-name(node-set?) The local-name function returns a string containing the local part of the name of the node in the argument node-set that is first in document order. If the node-set is empty or the first node has no name, an empty string is returned. If the argument is omitted it defaults to the context node. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the mod operation on XPath objects: @arg1 / @arg2 The numeric operators convert their operands to numbers as if by calling the number function. + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the multiply operation on XPath objects: The numeric operators convert their operands to numbers as if by calling the number function. + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the namespace-uri() XPath function string namespace-uri(node-set?) The namespace-uri function returns a string containing the namespace URI of the expanded name of the node in the argument node-set that is first in document order. If the node-set is empty, the first node has no name, or the expanded name has no namespace URI, an empty string is returned. If the argument is omitted it defaults to the context node. + + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathObjectPtr of type boolean and of value @val + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathObjectPtr of type string and of value @val + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathContext + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathObjectPtr of type double and of value @val + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathObjectPtr of type NodeSet and initialize it with the single Node @val + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathObjectPtr of type NodeSet and initialize it with the Nodeset @val + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathParserContext + + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathObjectPtr of type string and of value @val + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlXPathObjectPtr of type Value Tree (XSLT) and initialize it with the tree root @val + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "ancestor" direction the ancestor axis contains the ancestors of the context node; the ancestors of the context node consist of the parent of context node and the parent's parent and so on; the nodes are ordered in reverse document order; thus the parent is the first node on the axis, and the parent's parent is the second node on the axis + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "ancestor-or-self" direction he ancestor-or-self axis contains the context node and ancestors of the context node in reverse document order; thus the context node is the first node on the axis, and the context node's parent the second; parent here is defined the same as with the parent axis. + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "attribute" direction TODO: support DTD inherited default attributes + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "child" direction The child axis contains the children of the context node in document order. + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "descendant" direction the descendant axis contains the descendants of the context node in document order; a descendant is a child or a child of a child and so on. + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "descendant-or-self" direction the descendant-or-self axis contains the context node and the descendants of the context node in document order; thus the context node is the first node on the axis, and the first child of the context node is the second node on the axis + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "following" direction The following axis contains all nodes in the same document as the context node that are after the context node in document order, excluding any descendants and excluding attribute nodes and namespace nodes; the nodes are ordered in document order + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "following-sibling" direction The following-sibling axis contains the following siblings of the context node in document order. + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "namespace" direction the namespace axis contains the namespace nodes of the context node; the order of nodes on this axis is implementation-defined; the axis will be empty unless the context node is an element We keep the XML namespace node at the end of the list. + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "parent" direction The parent axis contains the parent of the context node, if there is one. + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "preceding" direction the preceding axis contains all nodes in the same document as the context node that are before the context node in document order, excluding any ancestors and excluding attribute nodes and namespace nodes; the nodes are ordered in reverse document order + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "preceding-sibling" direction The preceding-sibling axis contains the preceding siblings of the context node in reverse document order; the first preceding sibling is first on the axis; the sibling preceding that node is the second on the axis and so on. + + + + + + defined(LIBXML_XPATH_ENABLED) + Traversal function for the "self" direction The self axis contains just the context node itself + + + + + + defined(LIBXML_XPATH_ENABLED) + Evaluate the XPath Location Path in the given context. The node 'node' is set as the context node. The context node is not restored. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets leading() function: node-set set:leading (node-set, node-set) @nodes is sorted by document order, then #exslSetsNodeLeadingSorted is called. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets leading() function: node-set set:leading (node-set, node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + add a new xmlNodePtr to an existing NodeSet + + + + + + defined(LIBXML_XPATH_ENABLED) + add a new namespace node to an existing NodeSet + + + + + + + defined(LIBXML_XPATH_ENABLED) + add a new xmlNodePtr to an existing NodeSet, optimized version when we are sure the node is not already in the set. + + + + + + defined(LIBXML_XPATH_ENABLED) + checks whether @cur contains @val + + + + + + defined(LIBXML_XPATH_ENABLED) + Create a new xmlNodeSetPtr of type double and of value @val + + + + + defined(LIBXML_XPATH_ENABLED) + Removes an xmlNodePtr from an existing NodeSet + + + + + + defined(LIBXML_XPATH_ENABLED) + Namespace nodes in libxml don't match the XPath semantic. In a node set the namespace nodes are duplicated and the next pointer is set to the parent node in the XPath semantic. Check if such a node needs to be freed + + + + + defined(LIBXML_XPATH_ENABLED) + Merges two nodesets, all nodes from @val2 are added to @val1 if @val1 is NULL, a new set is created and copied from @val2 + + + + + + defined(LIBXML_XPATH_ENABLED) + Removes an entry from an existing NodeSet list. + + + + + + defined(LIBXML_XPATH_ENABLED) + Sort the node set in document order + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets trailing() function: node-set set:trailing (node-set, node-set) @nodes is sorted by document order, then #xmlXPathNodeTrailingSorted is called. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets trailing() function: node-set set:trailing (node-set, node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the normalize-space() XPath function string normalize-space(string?) The normalize-space function returns the argument string with white space normalized by stripping leading and trailing whitespace and replacing sequences of whitespace characters by a single space. Whitespace characters are the same allowed by the S production in XML. If the argument is omitted, it defaults to the context node converted to a string, in other words the value of the context node. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the equal operation on XPath objects content: @arg1 == @arg2 + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the not() XPath function boolean not(boolean) The not function returns true if its argument is false, and false otherwise. + + + + + + defined(LIBXML_XPATH_ENABLED) + Search in the namespace declaration array of the context for the given namespace name associated to the given prefix + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the number() XPath function number number(object?) + + + + + + defined(LIBXML_XPATH_ENABLED) + allocate a new copy of a given object + + + + + defined(LIBXML_XPATH_ENABLED) + Call this routine to speed up XPath computation on static documents. This stamps all the element nodes with the document order Like for line information, the order is kept in the element->content field, the value stored is actually - the node number (starting at -1) to be able to differentiate from line numbers. + + + + + defined(LIBXML_XPATH_ENABLED) + parse an XML namespace non qualified name. [NS 3] NCName ::= (Letter | '_') (NCNameChar)* [NS 4] NCNameChar ::= Letter | Digit | '.' | '-' | '_' | CombiningChar | Extender + + + + + defined(LIBXML_XPATH_ENABLED) + parse an XML name [4] NameChar ::= Letter | Digit | '.' | '-' | '_' | ':' | CombiningChar | Extender [5] Name ::= (Letter | '_' | ':') (NameChar)* + + + + + defined(LIBXML_XPATH_ENABLED) + Pops a boolean from the stack, handling conversion if needed. Check error with #xmlXPathCheckError. + + + + + defined(LIBXML_XPATH_ENABLED) + Pops an external object from the stack, handling conversion if needed. Check error with #xmlXPathCheckError. + + + + + defined(LIBXML_XPATH_ENABLED) + Pops a node-set from the stack, handling conversion if needed. Check error with #xmlXPathCheckError. + + + + + defined(LIBXML_XPATH_ENABLED) + Pops a number from the stack, handling conversion if needed. Check error with #xmlXPathCheckError. + + + + + defined(LIBXML_XPATH_ENABLED) + Pops a string from the stack, handling conversion if needed. Check error with #xmlXPathCheckError. + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the position() XPath function number position() The position function returns the position of the context node in the context node list. The first position is 1, and so the last position will be equal to last(). + + + + + + defined(LIBXML_XPATH_ENABLED) + DEPRECATED: No-op since 2.14.0. Registers all default XPath functions in this context + + + + + defined(LIBXML_XPATH_ENABLED) + Register a new function. If @f is NULL it unregisters the function + + + + + + + defined(LIBXML_XPATH_ENABLED) + Registers an external mechanism to do function lookup. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Register a new function. If @f is NULL it unregisters the function + + + + + + + + defined(LIBXML_XPATH_ENABLED) + Register a new namespace. If @ns_uri is NULL it unregisters the namespace + + + + + + + defined(LIBXML_XPATH_ENABLED) + Register a new variable value. If @value is NULL it unregisters the variable + + + + + + + defined(LIBXML_XPATH_ENABLED) + register an external mechanism to do variable lookup + + + + + + + defined(LIBXML_XPATH_ENABLED) + Register a new variable value. If @value is NULL it unregisters the variable + + + + + + + + defined(LIBXML_XPATH_ENABLED) + Cleanup the XPath context data associated to registered functions + + + + + defined(LIBXML_XPATH_ENABLED) + Cleanup the XPath context data associated to registered variables + + + + + defined(LIBXML_XPATH_ENABLED) + Cleanup the XPath context data associated to registered variables + + + + + defined(LIBXML_XPATH_ENABLED) + Initialize the context to the root of the document + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the round() XPath function number round(number) The round function returns the number that is closest to the argument and that is an integer. If there are two such numbers, then the one that is closest to positive infinity is returned. + + + + + + defined(LIBXML_XPATH_ENABLED) + Sets 'node' as the context node. The node must be in the same document as that associated with the context. + + + + + + defined(LIBXML_XPATH_ENABLED) + Register a callback function that will be called on errors and warnings. If handler is NULL, the error handler will be deactivated. Available since 2.13.0. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the starts-with() XPath function boolean starts-with(string, string) The starts-with function returns true if the first argument string starts with the second argument string, and otherwise returns false. + + + + + + defined(LIBXML_XPATH_ENABLED) + [30a] Float ::= Number ('e' Digits?)? [30] Number ::= Digits ('.' Digits?)? | '.' Digits [31] Digits ::= [0-9]+ Compile a Number in the string In complement of the Number expression, this function also handles negative values : '-' Number. + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the string() XPath function string string(object?) The string function converts an object to a string as follows: - A node-set is converted to a string by returning the value of the node in the node-set that is first in document order. If the node-set is empty, an empty string is returned. - A number is converted to a string as follows + NaN is converted to the string NaN + positive zero is converted to the string 0 + negative zero is converted to the string 0 + positive infinity is converted to the string Infinity + negative infinity is converted to the string -Infinity + if the number is an integer, the number is represented in decimal form as a Number with no decimal point and no leading zeros, preceded by a minus sign (-) if the number is negative + otherwise, the number is represented in decimal form as a Number including a decimal point with at least one digit before the decimal point and at least one digit after the decimal point, preceded by a minus sign (-) if the number is negative; there must be no leading zeros before the decimal point apart possibly from the one required digit immediately before the decimal point; beyond the one required digit after the decimal point there must be as many, but only as many, more digits as are needed to uniquely distinguish the number from all other IEEE 754 numeric values. - The boolean false value is converted to the string false. The boolean true value is converted to the string true. If the argument is omitted, it defaults to a node-set with the context node as its only member. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the string-length() XPath function number string-length(string?) The string-length returns the number of characters in the string (see [3.6 Strings]). If the argument is omitted, it defaults to the context node converted to a string, in other words the value of the context node. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the subtraction operation on XPath objects: The numeric operators convert their operands to numbers as if by calling the number function. + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the substring-after() XPath function string substring-after(string, string) The substring-after function returns the substring of the first argument string that follows the first occurrence of the second argument string in the first argument string, or the empty string if the first argument string does not contain the second argument string. For example, substring-after("1999/04/01","/") returns 04/01, and substring-after("1999/04/01","19") returns 99/04/01. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the substring-before() XPath function string substring-before(string, string) The substring-before function returns the substring of the first argument string that precedes the first occurrence of the second argument string in the first argument string, or the empty string if the first argument string does not contain the second argument string. For example, substring-before("1999/04/01","/") returns 1999. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the substring() XPath function string substring(string, number, number?) The substring function returns the substring of the first argument starting at the position specified in the second argument with length specified in the third argument. For example, substring("12345",2,3) returns "234". If the third argument is not specified, it returns the substring starting at the position specified in the second argument and continuing to the end of the string. For example, substring("12345",2) returns "2345". More precisely, each character in the string (see [3.6 Strings]) is considered to have a numeric position: the position of the first character is 1, the position of the second character is 2 and so on. The returned substring contains those characters for which the position of the character is greater than or equal to the second argument and, if the third argument is specified, less than the sum of the second and third arguments; the comparisons and addition used for the above follow the standard IEEE 754 rules. Thus: - substring("12345", 1.5, 2.6) returns "234" - substring("12345", 0, 3) returns "12" - substring("12345", 0 div 0, 3) returns "" - substring("12345", 1, 0 div 0) returns "" - substring("12345", -42, 1 div 0) returns "12345" - substring("12345", -1 div 0, 1 div 0) returns "" + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the sum() XPath function number sum(node-set) The sum function returns the sum of the values of the nodes in the argument node-set. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets trailing() function: node-set set:trailing (node-set, node-set) @nodes1 and @nodes2 are sorted by document order, then #xmlXPathTrailingSorted is called. + + + + + + defined(LIBXML_XPATH_ENABLED) + Implements the EXSLT - Sets trailing() function: node-set set:trailing (node-set, node-set) + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the translate() XPath function string translate(string, string, string) The translate function returns the first argument string with occurrences of characters in the second argument string replaced by the character at the corresponding position in the third argument string. For example, translate("bar","abc","ABC") returns the string BAr. If there is a character in the second argument string with no character at a corresponding position in the third argument string (because the second argument string is longer than the third argument string), then occurrences of that character in the first argument string are removed. For example, translate("--aaa--","abc-","ABC") + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the true() XPath function boolean true() + + + + + + defined(LIBXML_XPATH_ENABLED) + Implement the unary - operation on an XPath object The numeric operators convert their operands to numbers as if by calling the number function. + + + + + defined(LIBXML_XPATH_ENABLED) + Search in the Variable array of the context for the given variable value. + + + + + + defined(LIBXML_XPATH_ENABLED) + Prototype for callbacks used to plug variable lookup in the XPath engine. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Search in the Variable array of the context for the given variable value. + + + + + + + defined(LIBXML_XPATH_ENABLED) + Wraps a string into an XPath object. + + + + + defined(LIBXML_XPATH_ENABLED) + Wraps the @val data into an XPath object. + + + + + defined(LIBXML_XPATH_ENABLED) + Wrap the Nodeset @val in a new xmlXPathObjectPtr + + + + + defined(LIBXML_XPATH_ENABLED) + Wraps the @val string into an XPath object. + + + + + defined(LIBXML_XPATH_ENABLED) + Formats an error message. + + + + + + + + defined(LIBXML_XPTR_ENABLED) + Evaluate the XPath Location Path in the given context. + + + + + + defined(LIBXML_XPTR_ENABLED) + Create a new XPointer context + + + + + + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/doc/meson.build b/local-test-libxml2-delta-02/afc-libxml2/doc/meson.build new file mode 100644 index 0000000000000000000000000000000000000000..1ae3eccd3732ef6143c31525e4aedd112c569b8f --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/doc/meson.build @@ -0,0 +1,12 @@ + +install_man(files(['xml2-config.1', 'xmlcatalog.1', 'xmllint.1'])) + + +tutorial_files = files( + 'xmlcatalog.html', + 'xmllint.html', +) + +install_data(tutorial_files, install_dir: dir_doc) + +subdir('devhelp') diff --git a/local-test-libxml2-delta-02/afc-libxml2/doc/xmllint.1 b/local-test-libxml2-delta-02/afc-libxml2/doc/xmllint.1 new file mode 100644 index 0000000000000000000000000000000000000000..2e69cffb620c353007fe73e203ea90702df97a69 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/doc/xmllint.1 @@ -0,0 +1,686 @@ +'\" t +.\" Title: xmllint +.\" Author: John Fleck +.\" Generator: DocBook XSL Stylesheets vsnapshot +.\" Date: 12/26/2024 +.\" Manual: xmllint Manual +.\" Source: libxml2 +.\" Language: English +.\" +.TH "XMLLINT" "1" "12/26/2024" "libxml2" "xmllint Manual" +.\" ----------------------------------------------------------------- +.\" * Define some portability stuff +.\" ----------------------------------------------------------------- +.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.\" http://bugs.debian.org/507673 +.\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html +.\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.\" ----------------------------------------------------------------- +.\" * set default formatting +.\" ----------------------------------------------------------------- +.\" disable hyphenation +.nh +.\" disable justification (adjust text to left margin only) +.ad l +.\" ----------------------------------------------------------------- +.\" * MAIN CONTENT STARTS HERE * +.\" ----------------------------------------------------------------- +.SH "NAME" +xmllint \- command line XML tool +.SH "SYNOPSIS" +.HP \w'\fBxmllint\fR\ 'u +\fBxmllint\fR [\fB\-\-version\fR | \fB\-\-debug\fR | \fB\-\-quiet\fR | \fB\-\-shell\fR | \fB\-\-xpath\ "\fR\fB\fIXPath_expression\fR\fR\fB"\fR | \fB\-\-debugent\fR | \fB\-\-copy\fR | \fB\-\-recover\fR | \fB\-\-huge\fR | \fB\-\-nocompact\fR | \fB\-\-nodefdtd\fR | \fB\-\-nodict\fR | \fB\-\-noenc\fR | \fB\-\-noent\fR | \fB\-\-nofixup\-base\-uris\fR | \fB\-\-noout\fR | \fB\-\-nonet\fR | \fB\-\-path\ "\fR\fB\fIPATH(S)\fR\fR\fB"\fR | \fB\-\-load\-trace\fR | \fB\-\-htmlout\fR | \fB\-\-nowrap\fR | \fB\-\-valid\fR | \fB\-\-postvalid\fR | \fB\-\-dtdvalid\ \fR\fB\fIURL\fR\fR | \fB\-\-dtdvalidfpi\ \fR\fB\fIFPI\fR\fR | \fB\-\-timing\fR | \fB\-\-output\ \fR\fB\fIFILE\fR\fR | \fB\-\-repeat\fR | \fB\-\-insert\fR | \fB\-\-compress\fR | \fB\-\-html\fR | \fB\-\-xmlout\fR | \fB\-\-push\fR | \fB\-\-memory\fR | \fB\-\-max\-ampl\ \fR\fB\fIINTEGER\fR\fR | \fB\-\-maxmem\ \fR\fB\fINBBYTES\fR\fR | \fB\-\-nowarning\fR | \fB\-\-noblanks\fR | \fB\-\-nocdata\fR | \fB\-\-format\fR | \fB\-\-pretty\ \fR\fB\fIINTEGER\fR\fR | \fB\-\-encode\ \fR\fB\fIENCODING\fR\fR | \fB\-\-dropdtd\fR | \fB\-\-nsclean\fR | \fB\-\-testIO\fR | \fB\-\-catalogs\fR | \fB\-\-nocatalogs\fR | \fB\-\-auto\fR | \fB\-\-xinclude\fR | \fB\-\-noxincludenode\fR | \fB\-\-loaddtd\fR | \fB\-\-dtdattr\fR | \fB\-\-stream\fR | \fB\-\-walker\fR | \fB\-\-pattern\ \fR\fB\fIPATTERNVALUE\fR\fR | \fB\-\-relaxng\ \fR\fB\fISCHEMA\fR\fR | \fB\-\-schema\ \fR\fB\fISCHEMA\fR\fR | \fB\-\-schematron\ \fR\fB\fISCHEMA\fR\fR | \fB\-\-c14n\fR | \fB\-\-c14n11\fR | \fB\-\-exc\-c14n\fR | \fB\-\-pedantic\fR | \fB\-\-sax\fR | \fB\-\-sax1\fR | \fB\-\-oldxml10\fR] {\fIXML\-FILE(S)\fR... | \-} +.SH "DESCRIPTION" +.PP +The +\fBxmllint\fR +program parses one or more +XML +files, specified on the command line as +\fIXML\-FILE\fR +(or the standard input if the filename provided is +\fB\-\fR +)\&. It prints various types of output, depending upon the options selected\&. It is useful for detecting errors both in +XML +code and in the +XML +parser itself\&. +.PP +\fBxmllint\fR +is included in +\fBlibxml\fR(3)\&. +.SH "OPTIONS" +.PP +\fBxmllint\fR +accepts the following options (in alphabetical order): +.PP +\fB\-\-auto\fR +.RS 4 +Generate a small document for testing purposes\&. +.RE +.PP +\fB\-\-catalogs\fR +.RS 4 +Use the +SGML +catalog(s) from +\fBSGML_CATALOG_FILES\fR\&. Otherwise +XML +catalogs starting from +/etc/xml/catalog +or, more specifically, +${sysconfdir}/xml/catalog +are used by default\&. +.RE +.PP +\fB\-\-compress\fR +.RS 4 +Turn on +\fBgzip\fR(1) +compression of output\&. +.RE +.PP +\fB\-\-copy\fR +.RS 4 +Test the internal copy implementation\&. +.RE +.PP +\fB\-\-c14n\fR, \fB\-\-c14n11\fR, \fB\-\-exc\-c14n\fR +.RS 4 +Use the W3C +XML +Canonicalisation (C14N) to serialize the result of parsing to +stdout\&. It keeps comments in the result\&. +.RE +.PP +\fB\-\-dtdvalid \fR\fB\fIURL\fR\fR +.RS 4 +Use the +DTD +specified by an +\fIURL\fR +for validation\&. +.RE +.PP +\fB\-\-dtdvalidfpi \fR\fB\fIFPI\fR\fR +.RS 4 +Use the +DTD +specified by a Formal Public Identifier +\fIFPI\fR +for validation, note that this will require a catalog exporting that Formal Public Identifier to work\&. +.RE +.PP +\fB\-\-debug\fR +.RS 4 +Parse a file and output an annotated tree of the in\-memory version of the document\&. +.RE +.PP +\fB\-\-debugent\fR +.RS 4 +Debug the entities defined in the document\&. +.RE +.PP +\fB\-\-dropdtd\fR +.RS 4 +Remove +DTD +from output\&. +.RE +.PP +\fB\-\-dtdattr\fR +.RS 4 +Fetch external +DTD +and populate the tree with inherited attributes\&. +.RE +.PP +\fB\-\-encode \fR\fB\fIENCODING\fR\fR +.RS 4 +Output in the given encoding\&. Note that this works for full document not fragments or result from XPath queries\&. +.RE +.PP +\fB\-\-format\fR +.RS 4 +Reformat and reindent the output\&. The +\fBXMLLINT_INDENT\fR +environment variable controls the indentation\&. The default value is two spaces " ")\&. +.RE +.PP +\fB\-\-html\fR +.RS 4 +Use the +HTML +parser\&. +.RE +.PP +\fB\-\-htmlout\fR +.RS 4 +Output results as an +HTML +file\&. This causes +\fBxmllint\fR +to output the necessary +HTML +tags surrounding the result tree output so the results can be displayed/viewed in a browser\&. +.RE +.PP +\fB\-\-huge\fR +.RS 4 +Ignore some hardcoded parser limits\&. +.RE +.PP +\fB\-\-insert\fR +.RS 4 +Test for valid insertions\&. +.RE +.PP +\fB\-\-loaddtd\fR +.RS 4 +Fetch an external +DTD\&. +.RE +.PP +\fB\-\-load\-trace\fR +.RS 4 +Display all the documents loaded during the processing to +stderr\&. +.RE +.PP +\fB\-\-max\-ampl \fR\fB\fIINTEGER\fR\fR +.RS 4 +Set the maximum amplification factor which protects against exponential entity expansion ("billion laughs")\&. The default value is 5\&. Documents making heavy use of entity expansion may require a higher value\&. +.RE +.PP +\fB\-\-maxmem \fR\fB\fINNBYTES\fR\fR +.RS 4 +Test the parser memory support\&. +\fINNBYTES\fR +is the maximum number of bytes the library is allowed to allocate\&. This can also be used to make sure batch processing of +XML +files will not exhaust the virtual memory of the server running them\&. +.RE +.PP +\fB\-\-memory\fR +.RS 4 +Parse from memory\&. +.RE +.PP +\fB\-\-noblanks\fR +.RS 4 +Drop ignorable blank spaces\&. +.RE +.PP +\fB\-\-nocatalogs\fR +.RS 4 +Do not use any catalogs\&. +.RE +.PP +\fB\-\-nocdata\fR +.RS 4 +Substitute CDATA section by equivalent text nodes\&. +.RE +.PP +\fB\-\-nocompact\fR +.RS 4 +Do not generate compact text nodes (parser option XML_PARSE_COMPACT)\&. Only for debugging\&. +.RE +.PP +\fB\-\-nodefdtd\fR +.RS 4 +Do not set default HTML doctype (parser option HTML_PARSE_NODEFDTD)\&. +.RE +.PP +\fB\-\-nodict\fR +.RS 4 +Don\*(Aqt use dictionaries (parser option XML_PARSE_NODICT)\&. Only for debugging\&. +.RE +.PP +\fB\-\-noenc\fR +.RS 4 +Ignore encoding declaration (parser option XML_PARSE_IGNORE_ENC)\&. +.RE +.PP +\fB\-\-noent\fR +.RS 4 +Substitute entity values for entity references\&. By default, +\fBxmllint\fR +leaves entity references in place\&. +.RE +.PP +\fB\-\-nofixup\-base\-uris\fR +.RS 4 +Don\*(Aqt fix xml:base URIs when processing XIncludes (parser option XML_PARSE_NOBASEFIX)\&. +.RE +.PP +\fB\-\-nonet\fR +.RS 4 +Do not use the Internet to fetch +DTDs or entities\&. +.RE +.PP +\fB\-\-noout\fR +.RS 4 +Suppress output\&. By default, +\fBxmllint\fR +outputs the result tree\&. +.RE +.PP +\fB\-\-nowarning\fR +.RS 4 +Do not emit warnings from the parser and/or validator\&. +.RE +.PP +\fB\-\-nowrap\fR +.RS 4 +Do not output +HTML +doc wrapper\&. +.RE +.PP +\fB\-\-noxincludenode\fR +.RS 4 +Do XInclude processing but do not generate XInclude start and end nodes\&. +.RE +.PP +\fB\-\-nsclean\fR +.RS 4 +Remove redundant namespace declarations\&. +.RE +.PP +\fB\-\-oldxml10\fR +.RS 4 +Use deprecated parsing rules before XML 1\&.0, 5th edition\&. +.RE +.PP +\fB\-\-output \fR\fB\fIFILE\fR\fR +.RS 4 +Define a file path where +\fBxmllint\fR +will save the result of parsing\&. Usually the programs build a tree and save it on +stdout, with this option the result +XML +instance will be saved onto a file\&. +.RE +.PP +\fB\-\-path "\fR\fB\fIPATH(S)\fR\fR\fB"\fR +.RS 4 +Use the (space\- or colon\-separated) list of filesystem paths specified by +\fIPATHS\fR +to load +DTDs or entities\&. Enclose space\-separated lists by quotation marks\&. +.RE +.PP +\fB\-\-pattern \fR\fB\fIPATTERNVALUE\fR\fR +.RS 4 +Used to exercise the pattern recognition engine, which can be used with the reader interface to the parser\&. It allows to select some nodes in the document based on an XPath (subset) expression\&. Used for debugging\&. +.RE +.PP +\fB\-\-pedantic\fR +.RS 4 +Enable additional warnings\&. +.RE +.PP +\fB\-\-postvalid\fR +.RS 4 +Validate after parsing has completed\&. +.RE +.PP +\fB\-\-pretty \fR\fB\fIINTEGER\fR\fR +.RS 4 +Value 0 means no formatting, 1 means XML_SAVE_FORMAT (same as \-\-format), 2 means XML_SAVE_WSNONSIG\&. +.RE +.PP +\fB\-\-push\fR +.RS 4 +Use the push mode of the parser\&. +.RE +.PP +\fB\-\-quiet\fR +.RS 4 +Don\*(Aqt print informational messages to stderr\&. +.RE +.PP +\fB\-\-recover\fR +.RS 4 +Output any parsable portions of an invalid document\&. +.RE +.PP +\fB\-\-relaxng \fR\fB\fISCHEMA\fR\fR +.RS 4 +Use RelaxNG file named +\fISCHEMA\fR +for validation\&. +.RE +.PP +\fB\-\-repeat\fR +.RS 4 +Repeat 100 times, for timing or profiling\&. +.RE +.PP +\fB\-\-sax\fR +.RS 4 +Print SAX callbacks (only for debugging)\&. +.RE +.PP +\fB\-\-sax1\fR +.RS 4 +Use deprecated SAX1 interface (only for debugging)\&. +.RE +.PP +\fB\-\-schema \fR\fB\fISCHEMA\fR\fR +.RS 4 +Use a W3C +XML +Schema file named +\fISCHEMA\fR +for validation\&. +.RE +.PP +\fB\-\-schematron \fR\fB\fISCHEMA\fR\fR +.RS 4 +Use a Schematron file named +\fISCHEMA\fR +for validation\&. +.RE +.PP +\fB\-\-shell\fR +.RS 4 +Run a navigating shell\&. Details on available commands in shell mode are below (see +the section called \(lqSHELL COMMANDS\(rq)\&. +.RE +.PP +\fB\-\-xpath "\fR\fB\fIXPath_expression\fR\fR\fB"\fR +.RS 4 +Run an XPath expression given as argument and print the result\&. In case of a nodeset result, each node in the node set is serialized in full in the output\&. In case of an empty node set the "XPath set is empty" result will be shown and exit code 11 will be returned\&.\&. This feature is EXPERIMENTAL\&. Implementation details can change without futher notice\&. +.RE +.PP +\fB\-\-stream\fR +.RS 4 +Use streaming +API +\- useful when used in combination with +\fB\-\-relaxng\fR +or +\fB\-\-valid\fR +options for validation of files that are too large to be held in memory\&. +.RE +.PP +\fB\-\-testIO\fR +.RS 4 +Test user input/output support\&. +.RE +.PP +\fB\-\-timing\fR +.RS 4 +Output information about the time it takes +\fBxmllint\fR +to perform the various steps\&. +.RE +.PP +\fB\-\-valid\fR +.RS 4 +Determine if the document is a valid instance of the included Document Type Definition (DTD)\&. A +DTD +to be validated against also can be specified at the command line using the +\fB\-\-dtdvalid\fR +option\&. By default, +\fBxmllint\fR +also checks to determine if the document is well\-formed\&. +.RE +.PP +\fB\-\-version\fR +.RS 4 +Display the version of +\fBlibxml\fR(3) +used\&. +.RE +.PP +\fB\-\-walker\fR +.RS 4 +Test the walker module, which is a reader interface but for a document tree, instead of using the reader +API +on an unparsed document it works on an existing in\-memory tree\&. Used for debugging\&. +.RE +.PP +\fB\-\-xinclude\fR +.RS 4 +Do XInclude processing\&. +.RE +.PP +\fB\-\-xmlout\fR +.RS 4 +Used in conjunction with +\fB\-\-html\fR\&. Usually when +HTML +is parsed the document is saved with the +HTML +serializer\&. But with this option the resulting document is saved with the +XML +serializer\&. This is primarily used to generate +XHTML +from +HTML +input\&. +.RE +.SH "SHELL COMMANDS" +.PP +\fBxmllint\fR +offers an interactive shell mode invoked with the +\fB\-\-shell\fR +command\&. Available commands in shell mode include (in alphabetical order): +.PP +\fBbase\fR +.RS 4 +Display +XML +base of the node\&. +.RE +.PP +\fBbye\fR +.RS 4 +Leave the shell\&. +.RE +.PP +\fBcat \fR\fB\fINODE\fR\fR +.RS 4 +Display the given node or the current one\&. +.RE +.PP +\fBcd \fR\fB\fIPATH\fR\fR +.RS 4 +Change the current node to the given path (if unique) or root if no argument is given\&. +.RE +.PP +\fBdir \fR\fB\fIPATH\fR\fR +.RS 4 +Dumps information about the node (namespace, attributes, content)\&. +.RE +.PP +\fBdu \fR\fB\fIPATH\fR\fR +.RS 4 +Show the structure of the subtree under the given path or the current node\&. +.RE +.PP +\fBexit\fR +.RS 4 +Leave the shell\&. +.RE +.PP +\fBhelp\fR +.RS 4 +Show this help\&. +.RE +.PP +\fBload \fR\fB\fIFILENAME\fR\fR +.RS 4 +Load a new document with the given filename\&. +.RE +.PP +\fBls \fR\fB\fIPATH\fR\fR +.RS 4 +List contents of the given path or the current directory\&. +.RE +.PP +\fBpwd\fR +.RS 4 +Display the path to the current node\&. +.RE +.PP +\fBquit\fR +.RS 4 +Leave the shell\&. +.RE +.PP +\fBsave \fR\fB\fIFILENAME\fR\fR +.RS 4 +Save the current document to the given filename or to the original name\&. +.RE +.PP +\fBvalidate\fR +.RS 4 +Check the document for errors\&. +.RE +.PP +\fBwrite \fR\fB\fIFILENAME\fR\fR +.RS 4 +Write the current node to the given filename\&. +.RE +.SH "ENVIRONMENT" +.PP +\fBSGML_CATALOG_FILES\fR +.RS 4 +SGML +catalog behavior can be changed by redirecting queries to the user\*(Aqs own set of catalogs\&. This can be done by setting the +\fBSGML_CATALOG_FILES\fR +environment variable to a list of catalogs\&. An empty one should deactivate loading the default catalog\&. +.RE +.PP +\fBXML_CATALOG_FILES\fR +.RS 4 +XML +catalog behavior can be changed by redirecting queries to the user\*(Aqs own set of catalogs\&. This can be done by setting the +\fBXML_CATALOG_FILES\fR +environment variable to a space\-separated list of catalogs\&. Use percent\-encoding to escape spaces or other characters\&. An empty variable should deactivate loading the default catalog\&. +.RE +.PP +\fBXML_DEBUG_CATALOG\fR +.RS 4 +Setting the environment variable +\fBXML_DEBUG_CATALOG\fR +to +\fInon\-zero\fR +using the +\fBexport\fR +command outputs debugging information related to catalog operations\&. +.RE +.PP +\fBXMLLINT_INDENT\fR +.RS 4 +Setting the environment variable +\fBXMLLINT_INDENT\fR +controls the indentation\&. The default value is two spaces " "\&. +.RE +.SH "DIAGNOSTICS" +.PP +\fBxmllint\fR +return codes provide information that can be used when calling it from scripts\&. +.PP +\fB0\fR +.RS 4 +No error +.RE +.PP +\fB1\fR +.RS 4 +Unclassified +.RE +.PP +\fB2\fR +.RS 4 +Error in +DTD +.RE +.PP +\fB3\fR +.RS 4 +Validation error +.RE +.PP +\fB4\fR +.RS 4 +Validation error +.RE +.PP +\fB5\fR +.RS 4 +Error in schema compilation +.RE +.PP +\fB6\fR +.RS 4 +Error writing output +.RE +.PP +\fB7\fR +.RS 4 +Error in pattern (generated when +\fB\-\-pattern\fR +option is used) +.RE +.PP +\fB9\fR +.RS 4 +Out of memory error +.RE +.PP +\fB10\fR +.RS 4 +XPath evaluation error +.RE +.PP +\fB11\fR +.RS 4 +XPath result is empty +.RE +.SH "SEE ALSO" +.PP +\fBlibxml\fR(3) +.PP +More information can be found at +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +.sp -1 +.IP \(bu 2.3 +.\} +\fBlibxml\fR(3) +web page +\m[blue]\fB\%https://gitlab.gnome.org/GNOME/libxml2\fR\m[] +.RE +.sp +.SH "AUTHORS" +.PP +\fBJohn Fleck\fR <\&jfleck@inkstain\&.net\&> +.RS 4 +Author. +.RE +.PP +\fBZiying Sherwin\fR <\&sherwin@nlm\&.nih\&.gov\&> +.RS 4 +Author. +.RE +.PP +\fBHeiko Rupp\fR <\&hwr@pilhuhn\&.de\&> +.RS 4 +Author. +.RE +.SH "COPYRIGHT" +.br +Copyright \(co 2001, 2004 +.br diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/att11 b/local-test-libxml2-delta-02/afc-libxml2/test/att11 new file mode 100644 index 0000000000000000000000000000000000000000..32faaf30c1f5586f9e6c43ef5549ffe9edf0484c --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/att11 @@ -0,0 +1,13 @@ + + + + + +]> + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/att7 b/local-test-libxml2-delta-02/afc-libxml2/test/att7 new file mode 100644 index 0000000000000000000000000000000000000000..cd31c3d24dea84156c7db303da53e55926ce102b --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/att7 @@ -0,0 +1,10 @@ + + + +"> +]> + + + &test.ent; + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/att9 b/local-test-libxml2-delta-02/afc-libxml2/test/att9 new file mode 100644 index 0000000000000000000000000000000000000000..f06b531a98e7c83c6f54a9381bf427de33946fdc --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/att9 @@ -0,0 +1,5 @@ + + +]> + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/attrib.xml b/local-test-libxml2-delta-02/afc-libxml2/test/attrib.xml new file mode 100644 index 0000000000000000000000000000000000000000..5be33fa45b3a343fd29564bf762b4e802d6e0bd1 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/attrib.xml @@ -0,0 +1 @@ + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/boundaries1.xml b/local-test-libxml2-delta-02/afc-libxml2/test/boundaries1.xml new file mode 100644 index 0000000000000000000000000000000000000000..c4301cd3883b7f1610e081686a37ac5775268d5b --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/boundaries1.xml @@ -0,0 +1,16 @@ + +"> + '> + ]> --> +] > + + c1 --> + +text&a;text + + + c2 --> + + + c3 --> diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/cdata-3-byte-UTF-8.xml b/local-test-libxml2-delta-02/afc-libxml2/test/cdata-3-byte-UTF-8.xml new file mode 100644 index 0000000000000000000000000000000000000000..b959a1278d515066001bcb4fad0a13039b6712a1 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/cdata-3-byte-UTF-8.xml @@ -0,0 +1,7 @@ + + + +

+

+

+
diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/comment2.xml b/local-test-libxml2-delta-02/afc-libxml2/test/comment2.xml new file mode 100644 index 0000000000000000000000000000000000000000..9e122ecf0fb4d904a7afcee30e37a8863fd06ff8 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/comment2.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dav1 b/local-test-libxml2-delta-02/afc-libxml2/test/dav1 new file mode 100644 index 0000000000000000000000000000000000000000..cbfd4c428d61fb9a3bd484b75fe11fd171490501 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dav1 @@ -0,0 +1,25 @@ + + + + + + Box type A + + + J.J. Dingleheimerschmidt + + + HTTP/1.1 200 OK + + + + + + + HTTP/1.1 403 Forbidden + The user does not have access to the DingALing property. + + + There has been an access violation error. + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dav12 b/local-test-libxml2-delta-02/afc-libxml2/test/dav12 new file mode 100644 index 0000000000000000000000000000000000000000..d8d03fe93082a69c01e9f8c90a18d6a008e3f7bd --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dav12 @@ -0,0 +1,2 @@ + +http://www.ics.uci.edu/~ejw/contact.html diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dav13 b/local-test-libxml2-delta-02/afc-libxml2/test/dav13 new file mode 100644 index 0000000000000000000000000000000000000000..f44ae382789dc63a29fae2ad3422d79ff5adbc57 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dav13 @@ -0,0 +1,16 @@ + + + + + http://webdav.sb.aol.com/workspace/webdav/proposal.doc + + + http://webdav.sb.aol.com/workspace/webdav/ + + HTTP/1.1 202 Accepted + + + http://foo.bar/blah + HTTP/1.1 403 Forbidden + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dav19 b/local-test-libxml2-delta-02/afc-libxml2/test/dav19 new file mode 100644 index 0000000000000000000000000000000000000000..9535ffcf280b37b7584b141df229229dff6d2ee7 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dav19 @@ -0,0 +1,18 @@ + + + + + + + Write + Exclusive + + + Write + Shared + + + + HTTP/1.1 200 OK + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dav7 b/local-test-libxml2-delta-02/afc-libxml2/test/dav7 new file mode 100644 index 0000000000000000000000000000000000000000..ec4a9525d8e6f0e30f6dcec2fb445ce9afafdbcf --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dav7 @@ -0,0 +1,16 @@ + + + + http://www.foo.bar/container/resource1 + http://www.foo.bar/container/resource2 + HTTP/1.1 200 OK + + + http://www.foo.bar/container/ + HTTP/1.1 420 Method Failure + + + http://www.foo.bar/container/resource3 + HTTP/1.1 412 Precondition Failed + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/def-xml-attr.xml b/local-test-libxml2-delta-02/afc-libxml2/test/def-xml-attr.xml new file mode 100644 index 0000000000000000000000000000000000000000..531a854ab3bb226a5ac6a440798d86ccad22f47c --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/def-xml-attr.xml @@ -0,0 +1,7 @@ + + +]> + + Ja + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/defattr2.xml b/local-test-libxml2-delta-02/afc-libxml2/test/defattr2.xml new file mode 100644 index 0000000000000000000000000000000000000000..ab507092917f1808530fb63b153c18ea7b3c3e37 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/defattr2.xml @@ -0,0 +1,8 @@ + + +]> + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dtd11 b/local-test-libxml2-delta-02/afc-libxml2/test/dtd11 new file mode 100644 index 0000000000000000000000000000000000000000..bdd512be5a502b3daa4a7a7ca216e06a8e2aa9fe --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dtd11 @@ -0,0 +1,5 @@ + + +]> + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dtd12 b/local-test-libxml2-delta-02/afc-libxml2/test/dtd12 new file mode 100644 index 0000000000000000000000000000000000000000..a0fbf229495c08e87cd4cc790d4ca4e7a0b6b7e4 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dtd12 @@ -0,0 +1,5 @@ + + +]> +&WhatHeSaid; diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dtd13 b/local-test-libxml2-delta-02/afc-libxml2/test/dtd13 new file mode 100644 index 0000000000000000000000000000000000000000..d18d00c9cc23e5b0a39cf66c72e132c007d0e84d --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dtd13 @@ -0,0 +1,6 @@ + + +]> + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/dtd7 b/local-test-libxml2-delta-02/afc-libxml2/test/dtd7 new file mode 100644 index 0000000000000000000000000000000000000000..b151c21c30ebbe5e7ae3352e3a64c72b77eb6386 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/dtd7 @@ -0,0 +1,6 @@ + + + +]> +This is a valid document diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/ent1 b/local-test-libxml2-delta-02/afc-libxml2/test/ent1 new file mode 100644 index 0000000000000000000000000000000000000000..3e24756fb379fb153d776f9d1c33f4d689d9ea36 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/ent1 @@ -0,0 +1,7 @@ + + +]> + + &xml; + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/ent13 b/local-test-libxml2-delta-02/afc-libxml2/test/ent13 new file mode 100644 index 0000000000000000000000000000000000000000..f5ebd89121981eca6c2cec38334c1b5cb16ccdc9 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/ent13 @@ -0,0 +1,6 @@ + + +]> +a + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/ent9 b/local-test-libxml2-delta-02/afc-libxml2/test/ent9 new file mode 100644 index 0000000000000000000000000000000000000000..5db63babbeb41fc432650f24bc0434ae60f3b9ff --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/ent9 @@ -0,0 +1,61 @@ +,,,"> +]> + + &test1; +

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+

WE need lot of garbage now to trigger the problem

+ &test1; +
+ diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/japancrlf.xml b/local-test-libxml2-delta-02/afc-libxml2/test/japancrlf.xml new file mode 100644 index 0000000000000000000000000000000000000000..480cb2d44f7e9b4b470518965763413e3fc8e538 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/japancrlf.xml @@ -0,0 +1,6 @@ +<入力メッセージ + xmlns="http://schemas.cordys.com/webapps/1.0/bpm/c8c8b82a-0ac0-3d19-01e2-bda74af9b826"> + + \ No newline at end of file diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/ns2 b/local-test-libxml2-delta-02/afc-libxml2/test/ns2 new file mode 100644 index 0000000000000000000000000000000000000000..80aaf9452ad481876603d811ab3fc1befde565fd --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/ns2 @@ -0,0 +1,3 @@ + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/svg2 b/local-test-libxml2-delta-02/afc-libxml2/test/svg2 new file mode 100644 index 0000000000000000000000000000000000000000..aa1adac0ee7b506a35585fdc57a8c3e9f41c9b49 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/svg2 @@ -0,0 +1,58 @@ + + + + + + + + + + + + + + + + + + + + + + Java Font definition:Dialog 0 + + + Java Font definition:Helvetica 0 + + + + this is text + + + + Java Font definition:Dialog 0 + + + Java Font definition:Helvetica 700 + + + + sadfsadfsad + + + + + + + + + + + Java Font definition:Dialog 700 + + + Java Font definition:Dialog 700 + + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/svg3 b/local-test-libxml2-delta-02/afc-libxml2/test/svg3 new file mode 100644 index 0000000000000000000000000000000000000000..9b0de5a6f928c92bdccf2d65a73cc6db6490ad94 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/svg3 @@ -0,0 +1,722 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/utf16bom.xml b/local-test-libxml2-delta-02/afc-libxml2/test/utf16bom.xml new file mode 100644 index 0000000000000000000000000000000000000000..1916dc1ee83b74ade66fd747d5f58a8ee414b08d Binary files /dev/null and b/local-test-libxml2-delta-02/afc-libxml2/test/utf16bom.xml differ diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/wap.xml b/local-test-libxml2-delta-02/afc-libxml2/test/wap.xml new file mode 100644 index 0000000000000000000000000000000000000000..27955093dc0bd626828850693d03b967df1b71d1 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/wap.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + +

If automatic testing failed, select Failed + + + + + .

+
+ +
diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/xhtml1 b/local-test-libxml2-delta-02/afc-libxml2/test/xhtml1 new file mode 100644 index 0000000000000000000000000000000000000000..70e3a343eaf175209bf57ddba8f9ccf619b5a1a0 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/xhtml1 @@ -0,0 +1,34 @@ + + + + + + Virtual Library + + + + +

Moved to example.org.

+ + + foo + +

+ +

coucou

+

salut

+ +

test

+ +
+
Internet Engineering Task Force
+
An organization which establishes technical standards for the Internet
+
+ + + + diff --git a/local-test-libxml2-delta-02/afc-libxml2/test/xml2 b/local-test-libxml2-delta-02/afc-libxml2/test/xml2 new file mode 100644 index 0000000000000000000000000000000000000000..922314beb936cf5e39c895ba6168ea6ff6993228 --- /dev/null +++ b/local-test-libxml2-delta-02/afc-libxml2/test/xml2 @@ -0,0 +1,8 @@ + + + +' > +%xx; +]> +This sample shows a &tricky; method. diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..580cd305b56be53270602277b76a21ea664e84b4 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/Dockerfile @@ -0,0 +1,26 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +# Copy/Run this now to make the cache more resilient. +COPY fuzzbench_install_dependencies /usr/local/bin +RUN fuzzbench_install_dependencies + +ENV OSS_FUZZ_ON_DEMAND=1 + +COPY fuzzbench_build fuzzbench_run_fuzzer fuzzbench_measure /usr/local/bin/ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build new file mode 100644 index 0000000000000000000000000000000000000000..7231fe755ff0c02f9ff6dba1e5235505095e7aa6 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build @@ -0,0 +1,28 @@ +#! /bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# TODO(metzman): Do this in a docket image so we don't need to waste time +# reinstalling. +PYTHONPATH=$FUZZBENCH_PATH python3 -B -u -c "from fuzzers.$FUZZING_ENGINE import fuzzer; fuzzer.build()" + +if [ "$FUZZING_ENGINE" = "coverage" ]; then + cd $OUT + mkdir -p filestore/oss-fuzz-on-demand/coverage-binaries + # We expect an error regarding leading slashes. Just assume this step succeeds. + # TODO(metzman): Fix this when I get a chance. + tar -czvf filestore/oss-fuzz-on-demand/coverage-binaries/coverage-build-$PROJECT.tar.gz * /src /work || exit 0 +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies new file mode 100644 index 0000000000000000000000000000000000000000..94c0b8764805a90e348eed66d70816c723e42297 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies @@ -0,0 +1,22 @@ +#! /bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +apt-get update && apt-get install -y gcc gfortran python-dev libopenblas-dev liblapack-dev cython libpq-dev +wget -O /tmp/requirements.txt https://raw.githubusercontent.com/google/fuzzbench/master/requirements.txt +pip3 install pip --upgrade +CFLAGS= CXXFLAGS= pip3 install -r /tmp/requirements.txt +rm /tmp/requirements.txt diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_measure b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_measure new file mode 100644 index 0000000000000000000000000000000000000000..ba9a413a0b59557a4e8d029aa413967bd700041c --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_measure @@ -0,0 +1,32 @@ +#! /bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# TODO(metzman): Make these configurable. +export DB_PATH=$OUT/experiment.db +export SNAPSHOT_PERIOD=30 +export EXPERIMENT_FILESTORE=$OUT/filestore +export MAX_TOTAL_TIME=120 +export EXPERIMENT=oss-fuzz-on-demand + +rm -f $DB_PATH + +# FUZZER=mopt BENCHMARK=skcms + +export SQL_DATABASE_URL=sqlite:///$DB_PATH + +cd $FUZZBENCH_PATH +PYTHONPATH=. python3 -B experiment/measurer/standalone.py $MAX_TOTAL_TIME diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_run_fuzzer b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_run_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..008fdbb05cc390c3af022c6d2204a3014abcc8b9 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_run_fuzzer @@ -0,0 +1,41 @@ +#! /bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +export RUNNER_NICENESS="-5" +export EXPERIMENT_FILESTORE=$OUT/filestore +export EXPERIMENT=oss-fuzz-on-demand +export OSS_FUZZ_ON_DEMAND=1 +export OUTPUT_CORPUS_DIR=/output-corpus +export SEED_CORPUS_DIR=/input-corpus +mkdir $SEED_CORPUS_DIR +rm -rf $OUTPUT_CORPUS_DIR +mkdir $OUTPUT_CORPUS_DIR +export FUZZER=$FUZZING_ENGINE +# TODO(metzman): Make this configurable. +export MAX_TOTAL_TIME=120 +export SNAPSHOT_PERIOD=30 +export TRIAL_ID=1 +export FORCE_LOCAL=1 + +# BENCHMARK, FUZZ_TARGET +cd $OUT + +# Prevent permissions issues with pyc files and docker. +cp -r $FUZZBENCH_PATH /tmp/fuzzbench + +PYTHONPATH=/tmp/fuzzbench nice -n $RUNNER_NICENESS python3 -B -u /tmp/fuzzbench/experiment/runner.py +cat $EXPERIMENT_FILESTORE/$EXPERIMENT/experiment-folders/$BENCHMARK-$FUZZER/trial-$TRIAL_ID/results/fuzzer-log.txt diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..89333395375de51f083062a9c0c9d9985e8296f2 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +# Set up Golang environment variables (copied from /root/.bash_profile). +ENV GOPATH /root/go + +# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc). +# $GOPATH/bin is for the binaries from the dependencies installed via "go get". +ENV PATH $PATH:/root/.go/bin:$GOPATH/bin + +COPY gosigfuzz.c $GOPATH/gosigfuzz/ + +RUN install_go.sh + +# TODO(jonathanmetzman): Install this file using install_go.sh. +COPY ossfuzz_coverage_runner.go \ + $GOPATH/ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/gosigfuzz.c b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/gosigfuzz.c new file mode 100644 index 0000000000000000000000000000000000000000..d613ce64b78fcbce7180f48587ac5f8085ae402f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/gosigfuzz.c @@ -0,0 +1,47 @@ +/* + * Copyright 2023 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#include +#include + +static void fixSignalHandler(int signum) { + struct sigaction new_action; + struct sigaction old_action; + sigemptyset (&new_action.sa_mask); + sigaction (signum, NULL, &old_action); + new_action.sa_flags = old_action.sa_flags | SA_ONSTACK; + new_action.sa_sigaction = old_action.sa_sigaction; + new_action.sa_handler = old_action.sa_handler; + sigaction (signum, &new_action, NULL); +} + +static void FixStackSignalHandler() { + fixSignalHandler(SIGSEGV); + fixSignalHandler(SIGABRT); + fixSignalHandler(SIGALRM); + fixSignalHandler(SIGINT); + fixSignalHandler(SIGTERM); + fixSignalHandler(SIGBUS); + fixSignalHandler(SIGFPE); + fixSignalHandler(SIGXFSZ); + fixSignalHandler(SIGUSR1); + fixSignalHandler(SIGUSR2); +} + +int LLVMFuzzerInitialize(int *argc, char ***argv) { + FixStackSignalHandler(); + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go new file mode 100644 index 0000000000000000000000000000000000000000..ab2504888105835a80f6c0d1c0192bd724e2d52e --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mypackagebeingfuzzed + +import ( + "io/fs" + "io/ioutil" + "os" + "path/filepath" + "runtime/pprof" + "testing" +) + +func TestFuzzCorpus(t *testing.T) { + dir := os.Getenv("FUZZ_CORPUS_DIR") + if dir == "" { + t.Logf("No fuzzing corpus directory set") + return + } + filename := "" + defer func() { + if r := recover(); r != nil { + t.Error("Fuzz panicked in "+filename, r) + } + }() + profname := os.Getenv("FUZZ_PROFILE_NAME") + if profname != "" { + f, err := os.Create(profname + ".cpu.prof") + if err != nil { + t.Logf("error creating profile file %s\n", err) + } else { + _ = pprof.StartCPUProfile(f) + } + } + _, err := ioutil.ReadDir(dir) + if err != nil { + t.Logf("Not fuzzing corpus directory %s", err) + return + } + // recurse for regressions subdirectory + err = filepath.Walk(dir, func(fname string, info fs.FileInfo, err error) error { + if info.IsDir() { + return nil + } + data, err := ioutil.ReadFile(fname) + if err != nil { + t.Error("Failed to read corpus file", err) + return err + } + filename = fname + FuzzFunction(data) + return nil + }) + if err != nil { + t.Error("Failed to run corpus", err) + } + if profname != "" { + pprof.StopCPUProfile() + f, err := os.Create(profname + ".heap.prof") + if err != nil { + t.Logf("error creating heap profile file %s\n", err) + } + if err = pprof.WriteHeapProfile(f); err != nil { + t.Logf("error writing heap profile file %s\n", err) + } + f.Close() + } +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-javascript/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-javascript/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..1f5b27da6c1607355ce2296bda4fd15ec2d8b129 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-javascript/Dockerfile @@ -0,0 +1,20 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +RUN install_javascript.sh diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-jvm/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-jvm/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..6ae5da8afd452877d486cd9c91aaca9cf2c655d8 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-jvm/Dockerfile @@ -0,0 +1,68 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} AS base + +ENV JAVA_HOME /usr/lib/jvm/java-17-openjdk-amd64 +ENV JAVA_15_HOME /usr/lib/jvm/java-15-openjdk-amd64 +ENV JVM_LD_LIBRARY_PATH $JAVA_HOME/lib/server +ENV PATH $PATH:$JAVA_HOME/bin +ENV JAZZER_API_PATH "/usr/local/lib/jazzer_api_deploy.jar" +ENV JAZZER_JUNIT_PATH "/usr/local/bin/jazzer_junit.jar" + +RUN install_java.sh + +RUN chmod 777 /usr/local/bin && chmod 777 /usr/local/lib + +FROM base AS builder +RUN useradd -m jazzer_user + +USER jazzer_user + +WORKDIR $SRC + +RUN git clone https://github.com/aixcc-finals/jazzer-aixcc jazzer && \ + cd jazzer && \ + git checkout 43791565a765b854b537d878c9cab757ff1f2140 + +WORKDIR $SRC/jazzer + +RUN echo "build --java_runtime_version=local_jdk_17" >> .bazelrc \ + && echo "build --cxxopt=-stdlib=libc++" >> .bazelrc \ + && echo "build --linkopt=-lc++" >> .bazelrc + +RUN bazel build \ + //src/main/java/com/code_intelligence/jazzer:jazzer_standalone_deploy.jar \ + //deploy:jazzer-api \ + //deploy:jazzer-junit \ + //launcher:jazzer + +RUN cp $(bazel cquery --output=files //src/main/java/com/code_intelligence/jazzer:jazzer_standalone_deploy.jar) /usr/local/bin/jazzer_agent_deploy.jar && \ + cp $(bazel cquery --output=files //launcher:jazzer) /usr/local/bin/jazzer_driver && \ + cp $(bazel cquery --output=files //deploy:jazzer-api) $JAZZER_API_PATH && \ + cp $(bazel cquery --output=files //deploy:jazzer-junit) $JAZZER_JUNIT_PATH + +FROM base AS final + +COPY --from=builder /usr/local/bin/jazzer_agent_deploy.jar /usr/local/bin/jazzer_agent_deploy.jar +COPY --from=builder /usr/local/bin/jazzer_driver /usr/local/bin/jazzer_driver +COPY --from=builder $JAZZER_API_PATH $JAZZER_API_PATH +COPY --from=builder $JAZZER_JUNIT_PATH $JAZZER_JUNIT_PATH + +RUN chmod 755 /usr/local/bin && chmod 755 /usr/local/lib + +WORKDIR $SRC diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..df40041305a859953acd669a74095fb2d9a7b249 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile @@ -0,0 +1,20 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +RUN install_python.sh diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-ruby/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-ruby/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..b478d07b516a7d7bbb064db949e0bf7acee08c47 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-ruby/Dockerfile @@ -0,0 +1,56 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +RUN git clone https://github.com/trailofbits/ruzzy.git $SRC/ruzzy + +RUN install_ruby.sh +ENV PATH="$PATH:/usr/local/rvm/rubies/ruby-3.3.1/bin" + +RUN gem update --system 3.5.11 + +# Install ruzzy +WORKDIR $SRC/ruzzy + +# The MAKE variable allows overwriting the make command at runtime. This forces the +# Ruby C extension to respect ENV variables when compiling, like CC, CFLAGS, etc. +ENV MAKE="make --environment-overrides V=1" + +RUN CC="clang" \ +CXX="clang++" \ +LDSHARED="clang -shared" \ +LDSHAREDXX="clang++ -shared" \ +gem build + +RUN MAKE="make --environment-overrides V=1" \ +CC="clang" \ +CXX="clang++" \ +LDSHARED="clang -shared" \ +LDSHAREDXX="clang++ -shared" \ +CXXFLAGS="-fPIC" \ +CFLAGS="-fPIC" \ +RUZZY_DEBUG=1 gem install --install-dir /install/ruzzy --development --verbose ruzzy-*.gem + + +ENV LDSHARED="$CC -shared" +ENV LDSHAREDXX="$CXX -shared" + +ENV GEM_HOME="$OUT/fuzz-gem" +ENV GEM_PATH="/install/ruzzy" + +COPY ruzzy-build /usr/bin/ruzzy-build diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-ruby/ruzzy-build b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-ruby/ruzzy-build new file mode 100644 index 0000000000000000000000000000000000000000..459e909905d57466667a49ed4e5291798e1715f1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-ruby/ruzzy-build @@ -0,0 +1,28 @@ +#!/usr/bin/env bash -e +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +fuzz_target=$(basename "$1") +echo "BASENAME: $fuzz_target ---" +harness_sh=${fuzz_target::-3} + +cp $1 $OUT/$fuzz_target + +echo """#!/usr/bin/env bash + +ruzzy $fuzz_target +""" > $OUT/$harness_sh +chmod +x $OUT/$harness_sh diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-rust/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-rust/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..899adc9cff849fa96b422bba682c300ec04af5c0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-rust/Dockerfile @@ -0,0 +1,37 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +ENV CARGO_HOME=/rust +ENV RUSTUP_HOME=/rust/rustup +ENV PATH=$PATH:/rust/bin +# Set up custom environment variable for source code copy for coverage reports +ENV OSSFUZZ_RUSTPATH /rust + +# Force rustup to ignore `rust-toolchain` and `rust-toolchain.toml` files by +# manually specifying what toolchain to use. Note that this environment variable +# is additionally used by `install_rust.sh` as the toolchain to install. +# cf https://rust-lang.github.io/rustup/overrides.html +ENV RUSTUP_TOOLCHAIN nightly-2024-07-12 + +# Configure the linker used by default for x86_64 linux to be `clang` instead of +# rustc's default of `cc` which is able to find custom-built libraries like +# `libc++` by default more easily. +ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER clang + +RUN install_rust.sh diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-swift/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-swift/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..e558984c194daff35064cba3c5118c76f3e5c582 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-swift/Dockerfile @@ -0,0 +1,22 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +RUN install_swift.sh + +COPY precompile_swift /usr/local/bin/ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-swift/precompile_swift b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-swift/precompile_swift new file mode 100644 index 0000000000000000000000000000000000000000..ab855a620191656e572d6a68a869e26f020b0dc5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder-swift/precompile_swift @@ -0,0 +1,33 @@ +#!/bin/bash -eu +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +cp /usr/local/bin/llvm-symbolizer-swift $OUT/llvm-symbolizer + +export SWIFTFLAGS="-Xswiftc -parse-as-library -Xswiftc -static-stdlib --static-swift-stdlib" +if [ "$SANITIZER" = "coverage" ] +then + export SWIFTFLAGS="$SWIFTFLAGS -Xswiftc -profile-generate -Xswiftc -profile-coverage-mapping -Xswiftc -sanitize=fuzzer" +else + export SWIFTFLAGS="$SWIFTFLAGS -Xswiftc -sanitize=fuzzer,$SANITIZER --sanitize=$SANITIZER" + for f in $CFLAGS; do + export SWIFTFLAGS="$SWIFTFLAGS -Xcc=$f" + done + + for f in $CXXFLAGS; do + export SWIFTFLAGS="$SWIFTFLAGS -Xcxx=$f" + done +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..59b24d7a6cd9c4102df408d45dd71def7711c6f3 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/Dockerfile @@ -0,0 +1,213 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-clang:${IMG_TAG} + +COPY install_deps.sh / +RUN /install_deps.sh && rm /install_deps.sh + +# Build and install latest Python 3.10. +ENV PYTHON_VERSION 3.10.14 +RUN PYTHON_DEPS="\ + zlib1g-dev \ + libncurses5-dev \ + libgdbm-dev \ + libnss3-dev \ + libssl-dev \ + libsqlite3-dev \ + libreadline-dev \ + libffi-dev \ + libbz2-dev \ + liblzma-dev" && \ + unset CFLAGS CXXFLAGS && \ + apt-get install -y $PYTHON_DEPS && \ + cd /tmp && \ + curl -O https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tar.xz && \ + tar -xvf Python-$PYTHON_VERSION.tar.xz && \ + cd Python-$PYTHON_VERSION && \ + ./configure --enable-optimizations --enable-shared && \ + make -j$(nproc) install && \ + ldconfig && \ + ln -s /usr/local/bin/python3 /usr/local/bin/python && \ + cd .. && \ + rm -r /tmp/Python-$PYTHON_VERSION.tar.xz /tmp/Python-$PYTHON_VERSION && \ + rm -rf /usr/local/lib/python${PYTHON_VERSION%.*}/test && \ + python3 -m ensurepip && \ + python3 -m pip install --upgrade pip && \ + apt-get remove -y $PYTHON_DEPS # https://github.com/google/oss-fuzz/issues/3888 + + +ENV CCACHE_VERSION 4.10.2 +RUN cd /tmp && curl -OL https://github.com/ccache/ccache/releases/download/v$CCACHE_VERSION/ccache-$CCACHE_VERSION.tar.xz && \ + tar -xvf ccache-$CCACHE_VERSION.tar.xz && cd ccache-$CCACHE_VERSION && \ + mkdir build && cd build && \ + export LDFLAGS='-lpthread' && \ + cmake -D CMAKE_BUILD_TYPE=Release .. && \ + make -j && make install && \ + rm -rf /tmp/ccache-$CCACHE_VERSION /tmp/ccache-$CCACHE_VERSION.tar.xz + +# Install six for Bazel rules. +RUN unset CFLAGS CXXFLAGS && pip3 install -v --no-cache-dir \ + six==1.15.0 && rm -rf /tmp/* + +# Install Bazel through Bazelisk, which automatically fetches the latest Bazel version. +ENV BAZELISK_VERSION 1.9.0 +RUN curl -L https://github.com/bazelbuild/bazelisk/releases/download/v$BAZELISK_VERSION/bazelisk-linux-amd64 -o /usr/local/bin/bazel && \ + chmod +x /usr/local/bin/bazel + +# Default build flags for various sanitizers. +ENV SANITIZER_FLAGS_address "-fsanitize=address -fsanitize-address-use-after-scope" +ENV SANITIZER_FLAGS_hwaddress "-fsanitize=hwaddress -fuse-ld=lld -Wno-unused-command-line-argument" + +# Set of '-fsanitize' flags matches '-fno-sanitize-recover' + 'unsigned-integer-overflow'. +ENV SANITIZER_FLAGS_undefined "-fsanitize=array-bounds,bool,builtin,enum,function,integer-divide-by-zero,null,object-size,return,returns-nonnull-attribute,shift,signed-integer-overflow,unsigned-integer-overflow,unreachable,vla-bound,vptr -fno-sanitize-recover=array-bounds,bool,builtin,enum,function,integer-divide-by-zero,null,object-size,return,returns-nonnull-attribute,shift,signed-integer-overflow,unreachable,vla-bound,vptr" + +# Don't include "function" since it is unsupported on aarch64. +ENV SANITIZER_FLAGS_undefined_aarch64 "-fsanitize=array-bounds,bool,builtin,enum,integer-divide-by-zero,null,object-size,return,returns-nonnull-attribute,shift,signed-integer-overflow,unsigned-integer-overflow,unreachable,vla-bound,vptr -fno-sanitize-recover=array-bounds,bool,builtin,enum,integer-divide-by-zero,null,object-size,return,returns-nonnull-attribute,shift,signed-integer-overflow,unreachable,vla-bound,vptr" + +ENV SANITIZER_FLAGS_memory "-fsanitize=memory -fsanitize-memory-track-origins" + +ENV SANITIZER_FLAGS_thread "-fsanitize=thread" + +ENV SANITIZER_FLAGS_introspector "-O0 -flto -fno-inline-functions -fuse-ld=gold -Wno-unused-command-line-argument" + +# Do not use any sanitizers in the coverage build. +ENV SANITIZER_FLAGS_coverage "" + +# We use unsigned-integer-overflow as an additional coverage signal and have to +# suppress error messages. See https://github.com/google/oss-fuzz/issues/910. +ENV UBSAN_OPTIONS="silence_unsigned_overflow=1" + +# To suppress warnings from binaries running during compilation. +ENV DFSAN_OPTIONS='warn_unimplemented=0' + +# Default build flags for coverage feedback. +ENV COVERAGE_FLAGS="-fsanitize=fuzzer-no-link" + +# Use '-Wno-unused-command-line-argument' to suppress "warning: -ldl: 'linker' input unused" +# messages which are treated as errors by some projects. +ENV COVERAGE_FLAGS_coverage "-fprofile-instr-generate -fcoverage-mapping -pthread -Wl,--no-as-needed -Wl,-ldl -Wl,-lm -Wno-unused-command-line-argument" + +# Default sanitizer, fuzzing engine and architecture to use. +ENV SANITIZER="address" +ENV FUZZING_ENGINE="libfuzzer" +ENV ARCHITECTURE="x86_64" + +# DEPRECATED - NEW CODE SHOULD NOT USE THIS. OLD CODE SHOULD STOP. Please use +# LIB_FUZZING_ENGINE instead. +# Path to fuzzing engine library to support some old users of +# LIB_FUZZING_ENGINE. +ENV LIB_FUZZING_ENGINE_DEPRECATED="/usr/lib/libFuzzingEngine.a" + +# Argument passed to compiler to link against fuzzing engine. +# Defaults to the path, but is "-fsanitize=fuzzer" in libFuzzer builds. +ENV LIB_FUZZING_ENGINE="/usr/lib/libFuzzingEngine.a" + +# TODO: remove after tpm2 catchup. +ENV FUZZER_LDFLAGS "" + +WORKDIR $SRC + +RUN git clone https://github.com/AFLplusplus/AFLplusplus.git aflplusplus && \ + cd aflplusplus && \ + git checkout daaefcddc063b356018c29027494a00bcfc3e240 && \ + wget --no-check-certificate -O oss.sh https://raw.githubusercontent.com/vanhauser-thc/binary_blobs/master/oss.sh && \ + rm -rf .git && \ + chmod 755 oss.sh + +# Do precompiles before copying other scripts for better cache efficiency. +COPY precompile_afl /usr/local/bin/ +RUN precompile_afl + +RUN cd $SRC && \ + curl -L -O https://github.com/google/honggfuzz/archive/oss-fuzz.tar.gz && \ + mkdir honggfuzz && \ + cd honggfuzz && \ + tar -xz --strip-components=1 -f $SRC/oss-fuzz.tar.gz && \ + rm -rf examples $SRC/oss-fuzz.tar.gz + + +COPY precompile_honggfuzz /usr/local/bin/ +RUN precompile_honggfuzz + +RUN cd $SRC && \ + git clone https://github.com/google/fuzztest && \ + cd fuzztest && \ + git checkout a37d133f714395cabc20dd930969a889495c9f53 && \ + rm -rf .git + +ENV CENTIPEDE_BIN_DIR=$SRC/fuzztest/bazel-bin +COPY precompile_centipede /usr/local/bin/ +RUN precompile_centipede + +COPY sanitizers /usr/local/lib/sanitizers + +COPY bazel_build_fuzz_tests \ + cargo \ + compile \ + compile_afl \ + compile_centipede \ + compile_honggfuzz \ + compile_fuzztests.sh \ + compile_go_fuzzer \ + compile_javascript_fuzzer \ + compile_libfuzzer \ + compile_native_go_fuzzer \ + compile_python_fuzzer \ + debug_afl \ + # Go, JavaScript, Java, Python, Rust, and Swift installation scripts. + install_go.sh \ + install_javascript.sh \ + install_java.sh \ + install_python.sh \ + install_ruby.sh \ + install_rust.sh \ + install_swift.sh \ + python_coverage_helper.py \ + bash_parser.py \ + srcmap \ + write_labels.py \ + /usr/local/bin/ + +# TODO: Build this as part of a multi-stage build. +ADD https://commondatastorage.googleapis.com/clusterfuzz-builds/jcc/clang-jcc /usr/local/bin/ +ADD https://commondatastorage.googleapis.com/clusterfuzz-builds/jcc/clang++-jcc /usr/local/bin +ADD https://commondatastorage.googleapis.com/clusterfuzz-builds/jcc/clang-jcc2 /usr/local/bin/ +ADD https://commondatastorage.googleapis.com/clusterfuzz-builds/jcc/clang++-jcc2 /usr/local/bin +RUN chmod +x /usr/local/bin/clang-jcc /usr/local/bin/clang++-jcc /usr/local/bin/clang-jcc2 /usr/local/bin/clang++-jcc2 + +COPY llvmsymbol.diff $SRC +COPY detect_repo.py /opt/cifuzz/ +COPY bazel.bazelrc /root/.bazelrc + +# Set up ccache binary and cache directory. +# /ccache/bin will contain the compiler wrappers, and /ccache/cache will +# contain the actual cache, which can be saved. +# To use this, set PATH=/ccache/bin:$PATH. +RUN mkdir -p /ccache/bin && mkdir -p /ccache/cache && \ + ln -s /usr/local/bin/ccache /ccache/bin/clang && \ + ln -s /usr/local/bin/ccache /ccache/bin/clang++ && \ + ln -s /usr/local/bin/ccache /ccache/bin/clang-jcc && \ + ln -s /usr/local/bin/ccache /ccache/bin/clang++-jcc +ENV CCACHE_DIR /ccache/cache + +# Don't check that the compiler is the same, so we can switch between jcc and +# clang under the hood and re-use the same build cache. +ENV CCACHE_COMPILERCHECK none +ENV CCACHE_COMPILERTYPE clang + +CMD ["compile"] diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/README.md b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4e94bb0888c92f094cfefea3b9bf70d41bb8db22 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/README.md @@ -0,0 +1,83 @@ +# base-builder +> Abstract base image for project builders. + +Every project image supports multiple commands that can be invoked through docker after the image is built: + +
+docker run --rm -ti gcr.io/oss-fuzz/$project <command> <arguments...>
+
+ +# Supported Commands + +| Command | Description | +|---------|-------------| +| `compile` (default) | build all fuzz targets +| `/bin/bash` | drop into shell, execute `compile` script to start build. + +# Build Configuration + +A single build image can build same set of fuzzers in many configurations. +The configuration is picked through one or more environment variables. + +| Env Variable | Description +| ------------- | -------- +| `$SANITIZER ("address")` | Specifies predefined sanitizer configuration to use. `address` or `memory` or `undefined`. +| `$SANITIZER_FLAGS` | Specify compiler sanitizer flags directly. Overrides `$SANITIZER`. +| `$COVERAGE_FLAGS` | Specify compiler flags to use for fuzzer feedback coverage. +| `$BUILD_UID` | User id to use while building fuzzers. + +## Examples + +- *building sqlite3 fuzzer with UBSan (`SANITIZER=undefined`):* + + +
+docker run --rm -ti -e SANITIZER=undefined gcr.io/oss-fuzz/sqlite3
+
+ + +# Image Files Layout + +| Location|Env| Description | +|---------| -------- | ---------- | +| `/out/` | `$OUT` | Directory to store build artifacts (fuzz targets, dictionaries, options files, seed corpus archives). | +| `/src/` | `$SRC` | Directory to checkout source files | +| `/work/`| `$WORK` | Directory for storing intermediate files | +| `/usr/lib/libFuzzingEngine.a` | `$LIB_FUZZING_ENGINE` | Location of prebuilt fuzzing engine library (e.g. libFuzzer) that needs to be linked with all fuzz targets. + +While files layout is fixed within a container, the environment variables are +provided to be able to write retargetable scripts. + + +## Compiler Flags + +You *must* use special compiler flags to build your project and fuzz targets. +These flags are provided in following environment variables: + +| Env Variable | Description +| ------------- | -------- +| `$CC` | The C compiler binary. +| `$CXX`, `$CCC` | The C++ compiler binary. +| `$CFLAGS` | C compiler flags. +| `$CXXFLAGS` | C++ compiler flags. + +Most well-crafted build scripts will automatically use these variables. If not, +pass them manually to the build tool. + + +# Child Image Interface + +## Sources + +Child image has to checkout all sources that it needs to compile fuzz targets into +`$SRC` directory. When the image is executed, a directory could be mounted on top +of these with local checkouts using +`docker run -v $HOME/my_project:/src/my_project ...`. + +## Other Required Files + +Following files have to be added by child images: + +| File Location | Description | +| ------------- | ----------- | +| `$SRC/build.sh` | build script to build the project and its fuzz targets | diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..1d816992d009774c01a438023beb20c15162b7ea --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py @@ -0,0 +1,235 @@ +#!/usr/bin/python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +from glob import glob + +import bashlex + + +def find_all_bash_scripts_in_src(): + """Finds all bash scripts that exist in SRC/. This is used to idenfiy scripts + that may be needed for reading during the AST parsing. This is the case + when a given build script calls another build script, then we need to + read those.""" + all_local_scripts = [ + y for x in os.walk('/src/') for y in glob(os.path.join(x[0], '*.sh')) + ] + scripts_we_care_about = [] + to_ignore = {'aflplusplus', 'honggfuzz', '/fuzztest', '/centipede'} + for s in all_local_scripts: + if any([x for x in to_ignore if x in s]): + continue + scripts_we_care_about.append(s) + + print(scripts_we_care_about) + return scripts_we_care_about + + +def should_discard_command(ast_tree) -> bool: + """Returns True if the command shuold be avoided, otherwise False""" + try: + first_word = ast_tree.parts[0].word + except: # pylint: disable=bare-except + return False + + if ('cmake' in first_word and + any('--build' in part.word for part in ast_tree.parts)): + return False + + cmds_to_avoid_replaying = { + 'configure', 'autoheader', 'autoconf', 'autoreconf', 'cmake', 'autogen.sh' + } + if any([cmd for cmd in cmds_to_avoid_replaying if cmd in first_word]): + return True + + # Avoid all "make clean" calls. We dont want to erase previously build + # files. + try: + second_word = ast_tree.parts[1].word + except: # pylint: disable=bare-except + return False + if 'make' in first_word and 'clean' in second_word: + return True + + # No match was found to commands we dont want to build. There is no + # indication we shuold avoid. + return False + + +def is_local_redirection(ast_node, all_local_scripts): + """Return the list of scripts corresponding to the command, in case + the command is an execution of a local script.""" + # print("Checking") + + # Capture local script called with ./random/path/build.sh + + if len(ast_node.parts) >= 2: + try: + ast_node.parts[0].word + except: + return [] + if ast_node.parts[0].word == '.': + suffixes_matching = [] + #print(ast_node.parts[1].word) + for bash_script in all_local_scripts: + #print("- %s"%(bash_script)) + cmd_to_exec = ast_node.parts[1].word.replace('$SRC', 'src') + if bash_script.endswith(cmd_to_exec): + suffixes_matching.append(bash_script) + #print(suffixes_matching) + return suffixes_matching + # Capture a local script called with $SRC/random/path/build.sh + if len(ast_node.parts) >= 1: + if '$SRC' in ast_node.parts[0].word: + suffixes_matching = [] + print(ast_node.parts[0].word) + for bash_script in all_local_scripts: + print("- %s" % (bash_script)) + cmd_to_exec = ast_node.parts[0].word.replace('$SRC', 'src') + if bash_script.endswith(cmd_to_exec): + suffixes_matching.append(bash_script) + print(suffixes_matching) + return suffixes_matching + + return [] + + +def handle_ast_command(ast_node, all_scripts_in_fs, raw_script): + """Generate bash script string for command node""" + new_script = '' + if should_discard_command(ast_node): + return '' + + matches = is_local_redirection(ast_node, all_scripts_in_fs) + if len(matches) == 1: + new_script += parse_script(matches[0], all_scripts_in_fs) + '\n' + return '' + + # Extract the command from the script string + idx_start = ast_node.pos[0] + idx_end = ast_node.pos[1] + new_script += raw_script[idx_start:idx_end] + #new_script += '\n' + + # If mkdir is used, then ensure that '-p' is provided, as + # otherwise we will run into failures. We don't have to worry + # about multiple uses of -p as `mkdir -p -p -p`` is valid. + new_script = new_script.replace('mkdir', 'mkdir -p') + return new_script + + +def handle_ast_list(ast_node, all_scripts_in_fs, raw_script): + """Handles bashlex AST list.""" + new_script = '' + try_hard = 1 + + if not try_hard: + list_start = ast_node.pos[0] + list_end = ast_node.pos[1] + new_script += raw_script[list_start:list_end] # + '\n' + else: + # This is more refined logic. Ideally, this should work, but it's a bit + # more intricate to get right due to e.g. white-space between positions + # and more extensive parsing needed. We don't neccesarily need this + # level of success rate for what we're trying to achieve, so am disabling + # this for now. + for part in ast_node.parts: + if part.kind == 'list': + new_script += handle_ast_list(part, all_scripts_in_fs, raw_script) + elif part.kind == 'command': + new_script += handle_ast_command(part, all_scripts_in_fs, raw_script) + else: + idx_start = part.pos[0] + idx_end = part.pos[1] + new_script += raw_script[idx_start:idx_end] + new_script += ' ' + + # Make sure what was created is valid syntax, and otherwise return empty + try: + bashlex.parse(new_script) + except: # pylint: disable=bare-except + # Maybe return the original here instead of skipping? + return '' + return new_script + + +def handle_ast_compound(ast_node, all_scripts_in_fs, raw_script): + """Handles bashlex compound AST node.""" + new_script = '' + list_start = ast_node.pos[0] + list_end = ast_node.pos[1] + new_script += raw_script[list_start:list_end] + '\n' + return new_script + + +def handle_node(ast_node, all_scripts_in_fs, build_script): + """Generates a bash script string for a given node""" + if ast_node.kind == 'command': + return handle_ast_command(ast_node, all_scripts_in_fs, build_script) + elif ast_node.kind == 'list': + return handle_ast_list(ast_node, all_scripts_in_fs, build_script) + elif ast_node.kind == 'compound': + print('todo: handle compound') + return handle_ast_compound(ast_node, all_scripts_in_fs, build_script) + elif ast_node.kind == 'pipeline': + # Not supported + return '' + else: + raise Exception(f'Missing node handling: {ast_node.kind}') + + +def parse_script(bash_script, all_scripts) -> str: + """Top-level bash script parser""" + new_script = '' + with open(bash_script, 'r', encoding='utf-8') as f: + build_script = f.read() + try: + parts = bashlex.parse(build_script) + except bashlex.errors.ParsingError: + return '' + for part in parts: + new_script += handle_node(part, all_scripts, build_script) + new_script += '\n' + print("-" * 45) + print(part.kind) + print(part.dump()) + + return new_script + + +def main(): + """Main function""" + all_scripts = find_all_bash_scripts_in_src() + replay_bash_script = parse_script(sys.argv[1], all_scripts) + + print("REPLAYABLE BASH SCRIPT") + print("#" * 60) + print(replay_bash_script) + print("#" * 60) + + out_dir = os.getenv('OUT', '/out') + with open(f'{out_dir}/replay-build-script.sh', 'w', encoding='utf-8') as f: + f.write(replay_bash_script) + + src_dir = os.getenv('SRC', '/src') + with open(f'{src_dir}/replay_build.sh', 'w', encoding='utf-8') as f: + f.write(replay_bash_script) + + +if __name__ == "__main__": + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bazel.bazelrc b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bazel.bazelrc new file mode 100644 index 0000000000000000000000000000000000000000..a82293d7e8a10f4de481da391c6e64bb8bf32c3f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bazel.bazelrc @@ -0,0 +1,20 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + + +# Pass variables from environment. +build --action_env=FUZZ_INTROSPECTOR +build --action_env=FUZZINTRO_OUTDIR diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bazel_build_fuzz_tests b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bazel_build_fuzz_tests new file mode 100644 index 0000000000000000000000000000000000000000..5d52c424839503cd405d7edb83fee5691870fd6e --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bazel_build_fuzz_tests @@ -0,0 +1,90 @@ +#!/bin/bash -eu +# +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +: "${BAZEL_FUZZ_TEST_TAG:=fuzz-test}" +: "${BAZEL_FUZZ_TEST_EXCLUDE_TAG:=no-oss-fuzz}" +: "${BAZEL_PACKAGE_SUFFIX:=_oss_fuzz}" +: "${BAZEL_TOOL:=bazel}" +: "${BAZEL_EXTRA_BUILD_FLAGS:=}" + +if [ "$FUZZING_LANGUAGE" = "jvm" ]; then + BAZEL_LANGUAGE=java +else + BAZEL_LANGUAGE=cc +fi + +if [[ -z "${BAZEL_FUZZ_TEST_QUERY:-}" ]]; then + BAZEL_FUZZ_TEST_QUERY=" + let all_fuzz_tests = attr(tags, \"${BAZEL_FUZZ_TEST_TAG}\", \"//...\") in + let lang_fuzz_tests = attr(generator_function, \"^${BAZEL_LANGUAGE}_fuzz_test\$\", \$all_fuzz_tests) in + \$lang_fuzz_tests - attr(tags, \"${BAZEL_FUZZ_TEST_EXCLUDE_TAG}\", \$lang_fuzz_tests) + " +fi + +echo "Using Bazel query to find fuzz targets: ${BAZEL_FUZZ_TEST_QUERY}" + +declare -r OSS_FUZZ_TESTS=( + $(bazel query "${BAZEL_FUZZ_TEST_QUERY}" | sed "s/$/${BAZEL_PACKAGE_SUFFIX}/") +) + +echo "Found ${#OSS_FUZZ_TESTS[@]} fuzz test packages:" +for oss_fuzz_test in "${OSS_FUZZ_TESTS[@]}"; do + echo " ${oss_fuzz_test}" +done + +declare -r BAZEL_BUILD_FLAGS=( + "--@rules_fuzzing//fuzzing:cc_engine=@rules_fuzzing_oss_fuzz//:oss_fuzz_engine" \ + "--@rules_fuzzing//fuzzing:java_engine=@rules_fuzzing_oss_fuzz//:oss_fuzz_java_engine" \ + "--@rules_fuzzing//fuzzing:cc_engine_instrumentation=oss-fuzz" \ + "--@rules_fuzzing//fuzzing:cc_engine_sanitizer=none" \ + "--cxxopt=-stdlib=libc++" \ + "--linkopt=-lc++" \ + "--verbose_failures" \ + "--spawn_strategy=standalone" \ + "--action_env=CC=${CC}" "--action_env=CXX=${CXX}" \ + ${BAZEL_EXTRA_BUILD_FLAGS[*]} +) + +echo "Building the fuzz tests with the following Bazel options:" +echo " ${BAZEL_BUILD_FLAGS[@]}" + +${BAZEL_TOOL} build "${BAZEL_BUILD_FLAGS[@]}" "${OSS_FUZZ_TESTS[@]}" + +echo "Extracting the fuzz test packages in the output directory." +for oss_fuzz_archive in $(find bazel-bin/ -name "*${BAZEL_PACKAGE_SUFFIX}.tar"); do + tar --no-same-owner -xvf "${oss_fuzz_archive}" -C "${OUT}" +done + +if [ "$SANITIZER" = "coverage" ]; then + echo "Collecting the repository source files for coverage tracking." + declare -r COVERAGE_SOURCES="${OUT}/proc/self/cwd" + mkdir -p "${COVERAGE_SOURCES}" + declare -r RSYNC_FILTER_ARGS=( + "--include" "*.h" + "--include" "*.cc" + "--include" "*.hpp" + "--include" "*.cpp" + "--include" "*.c" + "--include" "*.inc" + "--include" "*/" + "--exclude" "*" + ) + rsync -avLk "${RSYNC_FILTER_ARGS[@]}" \ + "$(bazel info execution_root)/" \ + "${COVERAGE_SOURCES}/" +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang.py new file mode 100644 index 0000000000000000000000000000000000000000..2e2c0e49abb189d9dc1837ea193d0704dd486b51 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python3 +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Use git bisect to find the Clang/LLVM commit causing a regression.""" + +import logging +import os +import re +import shutil +import subprocess +import sys + + +def execute(command, *args, expect_zero=True, **kwargs): + """Execute |command| and return the returncode, stdout and stderr.""" + kwargs['stdout'] = subprocess.PIPE + kwargs['stderr'] = subprocess.PIPE + logging.debug('Running command: "%s"', str(command)) + process = subprocess.Popen(command, *args, **kwargs) + stdout, stderr = process.communicate() + stdout = stdout.decode('utf-8') + stderr = stderr.decode('utf-8') + retcode = process.returncode + logging.info('Command: "%s" returned: %d.\nStdout: %s.\nStderr: %s', + str(command), retcode, stdout, stderr) + if expect_zero and retcode != 0: + raise subprocess.CalledProcessError(retcode, command) + return retcode, stdout, stderr + + +def search_bisect_output(output): + """Search |output| for a message indicating the culprit commit has been + found.""" + # TODO(metzman): Is it necessary to look for "good"? + culprit_regex = re.compile('([a-z0-9]{40}) is the first (good|bad) commit') + match = re.match(culprit_regex, output) + return match.group(1) if match is not None else None + + +class GitRepo: + """Class for executing commmands on a git repo.""" + + def __init__(self, repo_dir): + self.repo_dir = repo_dir + + def do_command(self, git_subcommand): + """Execute a |git_subcommand| (a list of strings).""" + command = ['git', '-C', self.repo_dir] + git_subcommand + return execute(command) + + def test_commit(self, test_command): + """Build LLVM at the currently checkedout commit, then run |test_command|. + If returncode is 0 run 'git bisect good' otherwise return 'git bisect bad'. + Return None if bisect didn't finish yet. Return the culprit commit if it + does.""" + build_clang(self.repo_dir) + retcode, _, _ = execute(test_command, shell=True, expect_zero=False) + if retcode == 0: + retcode, stdout, _ = self.do_bisect_command('good') + else: + retcode, stdout, _ = self.do_bisect_command('bad') + return search_bisect_output(stdout) + + def bisect(self, good_commit, bad_commit, test_command): + """Do git bisect assuming |good_commit| is good, |bad_commit| is bad and + |test_command| is an oracle. Return the culprit commit.""" + self.bisect_start(good_commit, bad_commit, test_command) + result = self.test_commit(test_command) + while result is None: + result = self.test_commit(test_command) + return result + + def bisect_start(self, good_commit, bad_commit, test_command): + """Start doing git bisect.""" + self.do_bisect_command('start') + # Do bad commit first since it is more likely to be recent. + self.test_start_commit(bad_commit, 'bad', test_command) + self.test_start_commit(good_commit, 'good', test_command) + + def do_bisect_command(self, subcommand): + """Execute a git bisect |subcommand| (string) and return the result.""" + return self.do_command(['bisect', subcommand]) + + def test_start_commit(self, commit, label, test_command): + """Use |test_command| to test the first good or bad |commit| (depending on + |label|).""" + assert label in ('good', 'bad'), label + self.do_command(['checkout', commit]) + build_clang(self.repo_dir) + retcode, _, _ = execute(test_command, shell=True, expect_zero=False) + if label == 'good' and retcode != 0: + raise BisectError('Test command "%s" returns %d on first good commit %s' % + (test_command, retcode, commit)) + if label == 'bad' and retcode == 0: + raise BisectError('Test command "%s" returns %d on first bad commit %s' % + (test_command, retcode, commit)) + + self.do_bisect_command(label) + + +class BisectError(Exception): + """Error that was encountered during bisection.""" + + +def get_clang_build_env(): + """Get an environment for building Clang.""" + env = os.environ.copy() + for variable in ['CXXFLAGS', 'CFLAGS']: + if variable in env: + del env[variable] + return env + + +def install_clang_build_deps(): + """Instal dependencies necessary to build clang.""" + execute([ + 'apt-get', 'install', '-y', 'build-essential', 'make', 'cmake', + 'ninja-build', 'git', 'subversion', 'g++-multilib' + ]) + + +def clone_with_retries(repo, local_path, num_retries=10): + """Clone |repo| to |local_path| if it doesn't exist already. Try up to + |num_retries| times. Return False if unable to checkout.""" + if os.path.isdir(local_path): + return + for _ in range(num_retries): + if os.path.isdir(local_path): + shutil.rmtree(local_path) + retcode, _, _ = execute(['git', 'clone', repo, local_path], + expect_zero=False) + if retcode == 0: + return + raise Exception('Could not checkout %s.' % repo) + + +def get_clang_target_arch(): + """Get target architecture we want clang to target when we build it.""" + _, arch, _ = execute(['uname', '-m']) + if 'x86_64' in arch: + return 'X86' + if 'aarch64' in arch: + return 'AArch64' + raise Exception('Unsupported target: %s.' % arch) + + +def prepare_build(llvm_project_path): + """Prepare to build clang.""" + llvm_build_dir = os.path.join(os.getenv('WORK'), 'llvm-build') + if not os.path.exists(llvm_build_dir): + os.mkdir(llvm_build_dir) + execute([ + 'cmake', '-G', 'Ninja', '-DLIBCXX_ENABLE_SHARED=OFF', + '-DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON', '-DLIBCXXABI_ENABLE_SHARED=OFF', + '-DCMAKE_BUILD_TYPE=Release', + '-DLLVM_ENABLE_PROJECTS=libcxx;libcxxabi;compiler-rt;clang', + '-DLLVM_TARGETS_TO_BUILD=' + get_clang_target_arch(), + os.path.join(llvm_project_path, 'llvm') + ], + env=get_clang_build_env(), + cwd=llvm_build_dir) + return llvm_build_dir + + +def build_clang(llvm_project_path): + """Checkout, build and install Clang.""" + # TODO(metzman): Merge Python checkout and build code with + # checkout_build_install_llvm.sh. + # TODO(metzman): Look into speeding this process using ccache. + # TODO(metzman): Make this program capable of handling MSAN and i386 Clang + # regressions. + llvm_build_dir = prepare_build(llvm_project_path) + execute(['ninja', '-C', llvm_build_dir, 'install'], env=get_clang_build_env()) + + +def find_culprit_commit(test_command, good_commit, bad_commit): + """Returns the culprit LLVM commit that introduced a bug revealed by running + |test_command|. Uses git bisect and treats |good_commit| as the first latest + known good commit and |bad_commit| as the first known bad commit.""" + llvm_project_path = os.path.join(os.getenv('SRC'), 'llvm-project') + clone_with_retries('https://github.com/llvm/llvm-project.git', + llvm_project_path) + git_repo = GitRepo(llvm_project_path) + result = git_repo.bisect(good_commit, bad_commit, test_command) + print('Culprit commit', result) + return result + + +def main(): + # pylint: disable=line-too-long + """Finds the culprit LLVM commit that introduced a clang regression. + Can be tested using this command in a libsodium shell: + python3 bisect_clang.py "cd /src/libsodium; make clean; cd -; compile && /out/secret_key_auth_fuzzer -runs=100" \ + f7e52fbdb5a7af8ea0808e98458b497125a5eca1 \ + 8288453f6aac05080b751b680455349e09d49825 + """ + # pylint: enable=line-too-long + # TODO(metzman): Check CFLAGS for things like -fsanitize=fuzzer-no-link. + # TODO(metzman): Allow test_command to be optional and for just build.sh to be + # used instead. + test_command = sys.argv[1] + # TODO(metzman): Add in more automation so that the script can automatically + # determine the commits used in last Clang roll. + good_commit = sys.argv[2] + bad_commit = sys.argv[3] + # TODO(metzman): Make verbosity configurable. + logging.getLogger().setLevel(logging.DEBUG) + install_clang_build_deps() + find_culprit_commit(test_command, good_commit, bad_commit) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a11bf8640d787181d6e35df225c9f17098d02619 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py @@ -0,0 +1,294 @@ +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Tests for bisect_clang.py""" +import os +from unittest import mock +import unittest + +import bisect_clang + +FILE_DIRECTORY = os.path.dirname(__file__) +LLVM_REPO_PATH = '/llvm-project' + + +def get_git_command(*args): + """Returns a git command for the LLVM repo with |args| as arguments.""" + return ['git', '-C', LLVM_REPO_PATH] + list(args) + + +def patch_environ(testcase_obj): + """Patch environment.""" + env = {} + patcher = mock.patch.dict(os.environ, env) + testcase_obj.addCleanup(patcher.stop) + patcher.start() + + +class BisectClangTestMixin: # pylint: disable=too-few-public-methods + """Useful mixin for bisect_clang unittests.""" + + def setUp(self): # pylint: disable=invalid-name + """Initialization method for unittests.""" + patch_environ(self) + os.environ['SRC'] = '/src' + os.environ['WORK'] = '/work' + + +class GetClangBuildEnvTest(BisectClangTestMixin, unittest.TestCase): + """Tests for get_clang_build_env.""" + + def test_cflags(self): + """Test that CFLAGS are not used compiling clang.""" + os.environ['CFLAGS'] = 'blah' + self.assertNotIn('CFLAGS', bisect_clang.get_clang_build_env()) + + def test_cxxflags(self): + """Test that CXXFLAGS are not used compiling clang.""" + os.environ['CXXFLAGS'] = 'blah' + self.assertNotIn('CXXFLAGS', bisect_clang.get_clang_build_env()) + + def test_other_variables(self): + """Test that other env vars are used when compiling clang.""" + key = 'other' + value = 'blah' + os.environ[key] = value + self.assertEqual(value, bisect_clang.get_clang_build_env()[key]) + + +def read_test_data(filename): + """Returns data from |filename| in the test_data directory.""" + with open(os.path.join(FILE_DIRECTORY, 'test_data', filename)) as file_handle: + return file_handle.read() + + +class SearchBisectOutputTest(BisectClangTestMixin, unittest.TestCase): + """Tests for search_bisect_output.""" + + def test_search_bisect_output(self): + """Test that search_bisect_output finds the responsible commit when one + exists.""" + test_data = read_test_data('culprit-commit.txt') + self.assertEqual('ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d', + bisect_clang.search_bisect_output(test_data)) + + def test_search_bisect_output_none(self): + """Test that search_bisect_output doesnt find a non-existent culprit + commit.""" + self.assertIsNone(bisect_clang.search_bisect_output('hello')) + + +def create_mock_popen( + output=bytes('', 'utf-8'), err=bytes('', 'utf-8'), returncode=0): + """Creates a mock subprocess.Popen.""" + + class MockPopen: + """Mock subprocess.Popen.""" + commands = [] + testcases_written = [] + + def __init__(self, command, *args, **kwargs): # pylint: disable=unused-argument + """Inits the MockPopen.""" + stdout = kwargs.pop('stdout', None) + self.command = command + self.commands.append(command) + self.stdout = None + self.stderr = None + self.returncode = returncode + if hasattr(stdout, 'write'): + self.stdout = stdout + + def communicate(self, input_data=None): # pylint: disable=unused-argument + """Mock subprocess.Popen.communicate.""" + if self.stdout: + self.stdout.write(output) + + if self.stderr: + self.stderr.write(err) + + return output, err + + def poll(self, input_data=None): # pylint: disable=unused-argument + """Mock subprocess.Popen.poll.""" + return self.returncode + + return MockPopen + + +def mock_prepare_build_impl(llvm_project_path): # pylint: disable=unused-argument + """Mocked prepare_build function.""" + return '/work/llvm-build' + + +class BuildClangTest(BisectClangTestMixin, unittest.TestCase): + """Tests for build_clang.""" + + def test_build_clang_test(self): + """Tests that build_clang works as intended.""" + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl): + llvm_src_dir = '/src/llvm-project' + bisect_clang.build_clang(llvm_src_dir) + self.assertEqual([['ninja', '-C', '/work/llvm-build', 'install']], + mock_popen.commands) + + +class GitRepoTest(BisectClangTestMixin, unittest.TestCase): + """Tests for GitRepo.""" + + # TODO(metzman): Mock filesystem. Until then, use a real directory. + + def setUp(self): + super().setUp() + self.git = bisect_clang.GitRepo(LLVM_REPO_PATH) + self.good_commit = 'good_commit' + self.bad_commit = 'bad_commit' + self.test_command = 'testcommand' + + def test_do_command(self): + """Test do_command creates a new process as intended.""" + # TODO(metzman): Test directory changing behavior. + command = ['subcommand', '--option'] + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + self.git.do_command(command) + self.assertEqual([get_git_command('subcommand', '--option')], + mock_popen.commands) + + def _test_test_start_commit_unexpected(self, label, commit, returncode): + """Tests test_start_commit works as intended when the test returns an + unexpected value.""" + + def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument + if command == self.test_command: + return returncode, '', '' + return 0, '', '' + + with mock.patch('bisect_clang.execute', mock_execute_impl): + with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl): + with self.assertRaises(bisect_clang.BisectError): + self.git.test_start_commit(commit, label, self.test_command) + + def test_test_start_commit_bad_zero(self): + """Tests test_start_commit works as intended when the test on the first bad + commit returns 0.""" + self._test_test_start_commit_unexpected('bad', self.bad_commit, 0) + + def test_test_start_commit_good_nonzero(self): + """Tests test_start_commit works as intended when the test on the first good + commit returns nonzero.""" + self._test_test_start_commit_unexpected('good', self.good_commit, 1) + + def test_test_start_commit_good_zero(self): + """Tests test_start_commit works as intended when the test on the first good + commit returns 0.""" + self._test_test_start_commit_expected('good', self.good_commit, 0) # pylint: disable=no-value-for-parameter + + @mock.patch('bisect_clang.build_clang') + def _test_test_start_commit_expected(self, label, commit, returncode, + mock_build_clang): + """Tests test_start_commit works as intended when the test returns an + expected value.""" + command_args = [] + + def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument + command_args.append(command) + if command == self.test_command: + return returncode, '', '' + return 0, '', '' + + with mock.patch('bisect_clang.execute', mock_execute_impl): + self.git.test_start_commit(commit, label, self.test_command) + self.assertEqual([ + get_git_command('checkout', commit), self.test_command, + get_git_command('bisect', label) + ], command_args) + mock_build_clang.assert_called_once_with(LLVM_REPO_PATH) + + def test_test_start_commit_bad_nonzero(self): + """Tests test_start_commit works as intended when the test on the first bad + commit returns nonzero.""" + self._test_test_start_commit_expected('bad', self.bad_commit, 1) # pylint: disable=no-value-for-parameter + + @mock.patch('bisect_clang.GitRepo.test_start_commit') + def test_bisect_start(self, mock_test_start_commit): + """Tests bisect_start works as intended.""" + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + self.git.bisect_start(self.good_commit, self.bad_commit, + self.test_command) + self.assertEqual(get_git_command('bisect', 'start'), + mock_popen.commands[0]) + mock_test_start_commit.assert_has_calls([ + mock.call('bad_commit', 'bad', 'testcommand'), + mock.call('good_commit', 'good', 'testcommand') + ]) + + def test_do_bisect_command(self): + """Test do_bisect_command executes a git bisect subcommand as intended.""" + subcommand = 'subcommand' + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + self.git.do_bisect_command(subcommand) + self.assertEqual([get_git_command('bisect', subcommand)], + mock_popen.commands) + + @mock.patch('bisect_clang.build_clang') + def _test_test_commit(self, label, output, returncode, mock_build_clang): + """Test test_commit works as intended.""" + command_args = [] + + def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument + command_args.append(command) + if command == self.test_command: + return returncode, output, '' + return 0, output, '' + + with mock.patch('bisect_clang.execute', mock_execute_impl): + result = self.git.test_commit(self.test_command) + self.assertEqual([self.test_command, + get_git_command('bisect', label)], command_args) + mock_build_clang.assert_called_once_with(LLVM_REPO_PATH) + return result + + def test_test_commit_good(self): + """Test test_commit labels a good commit as good.""" + self.assertIsNone(self._test_test_commit('good', '', 0)) # pylint: disable=no-value-for-parameter + + def test_test_commit_bad(self): + """Test test_commit labels a bad commit as bad.""" + self.assertIsNone(self._test_test_commit('bad', '', 1)) # pylint: disable=no-value-for-parameter + + def test_test_commit_culprit(self): + """Test test_commit returns the culprit""" + test_data = read_test_data('culprit-commit.txt') + self.assertEqual('ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d', + self._test_test_commit('good', test_data, 0)) # pylint: disable=no-value-for-parameter + + +class GetTargetArchToBuildTest(unittest.TestCase): + """Tests for get_target_arch_to_build.""" + + def test_unrecognized(self): + """Test that an unrecognized architecture raises an exception.""" + with mock.patch('bisect_clang.execute') as mock_execute: + mock_execute.return_value = (None, 'mips', None) + with self.assertRaises(Exception): + bisect_clang.get_clang_target_arch() + + def test_recognized(self): + """Test that a recognized architecture returns the expected value.""" + arch_pairs = {'x86_64': 'X86', 'aarch64': 'AArch64'} + for uname_result, clang_target in arch_pairs.items(): + with mock.patch('bisect_clang.execute') as mock_execute: + mock_execute.return_value = (None, uname_result, None) + self.assertEqual(clang_target, bisect_clang.get_clang_target_arch()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/cargo b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/cargo new file mode 100644 index 0000000000000000000000000000000000000000..4376cfa5d4a339e1ec59193c1af58061e772fa96 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/cargo @@ -0,0 +1,55 @@ +#!/bin/bash -eu +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This is a wrapper around calling cargo +# This just expands RUSTFLAGS in case of a coverage build +# We need this until https://github.com/rust-lang/cargo/issues/5450 is merged +# because cargo uses relative paths for the current crate +# and absolute paths for its dependencies +# +################################################################################ + +if [ "$SANITIZER" = "coverage" ] && [ $1 = "build" ] +then + crate_src_abspath=`cargo metadata --no-deps --format-version 1 | jq -r '.workspace_root'` + export RUSTFLAGS="$RUSTFLAGS --remap-path-prefix src=$crate_src_abspath/src" +fi + +if [ "$SANITIZER" = "coverage" ] && [ $1 = "fuzz" ] && [ $2 = "build" ] +then + # hack to turn cargo fuzz build into cargo build so as to get coverage + # cargo fuzz adds "--target" "x86_64-unknown-linux-gnu" + ( + # go into fuzz directory if not already the case + cd fuzz || true + fuzz_src_abspath=`pwd` + # Default directory is fuzz_targets, but some projects like image-rs use fuzzers. + while read i; do + export RUSTFLAGS="$RUSTFLAGS --remap-path-prefix $i=$fuzz_src_abspath/$i" + # Bash while syntax so that we modify RUSTFLAGS in main shell instead of a subshell. + done <<< "$(find . -name "*.rs" | cut -d/ -f2 | uniq)" + # we do not want to trigger debug assertions and stops + export RUSTFLAGS="$RUSTFLAGS -C debug-assertions=no" + # do not optimize with --release, leading to Malformed instrumentation profile data + cargo build --bins + # copies the build output in the expected target directory + cd `cargo metadata --format-version 1 --no-deps | jq -r '.target_directory'` + mkdir -p x86_64-unknown-linux-gnu/release + cp -r debug/* x86_64-unknown-linux-gnu/release/ + ) + exit 0 +fi + +/rust/bin/cargo "$@" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile new file mode 100644 index 0000000000000000000000000000000000000000..8aa6580bc3d393ca5b75499c5fc0064bed1ee80f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile @@ -0,0 +1,420 @@ +#!/bin/bash -eu +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "---------------------------------------------------------------" + +sysctl -w vm.mmap_rnd_bits=28 + +OSS_FUZZ_ON_DEMAND="${OSS_FUZZ_ON_DEMAND:-0}" + +# Used for Rust introspector builds +RUST_SANITIZER=$SANITIZER + +if [ "$FUZZING_LANGUAGE" = "jvm" ]; then + if [ "$FUZZING_ENGINE" != "libfuzzer" ] && [ "$FUZZING_ENGINE" != "wycheproof" ]; then + echo "ERROR: JVM projects can be fuzzed with libFuzzer or tested with wycheproof engines only." + exit 1 + fi + if [ "$SANITIZER" != "address" ] && [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "undefined" ] && [ "$SANITIZER" != "none" ] && [ "$SANITIZER" != "introspector" ]; then + echo "ERROR: JVM projects can be fuzzed with AddressSanitizer or UndefinedBehaviorSanitizer or Introspector only." + exit 1 + fi + if [ "$ARCHITECTURE" != "x86_64" ]; then + echo "ERROR: JVM projects can be fuzzed on x86_64 architecture only." + exit 1 + fi +fi + +if [ "$FUZZING_LANGUAGE" = "rust" ]; then + if [ "$SANITIZER" = "introspector" ]; then + # introspector sanitizer flag will cause cargo build to fail. Rremove it + # temporarily, RUST_SANITIZER will hold the original sanitizer. + export SANITIZER=address + fi +fi + + +if [ "$FUZZING_LANGUAGE" = "javascript" ]; then + if [ "$FUZZING_ENGINE" != "libfuzzer" ]; then + echo "ERROR: JavaScript projects can be fuzzed with libFuzzer engine only." + exit 1 + fi + if [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "none" ]; then + echo "ERROR: JavaScript projects cannot be fuzzed with sanitizers." + exit 1 + fi + if [ "$ARCHITECTURE" != "x86_64" ]; then + echo "ERROR: JavaScript projects can be fuzzed on x86_64 architecture only." + exit 1 + fi +fi + +if [ "$FUZZING_LANGUAGE" = "python" ]; then + if [ "$FUZZING_ENGINE" != "libfuzzer" ]; then + echo "ERROR: Python projects can be fuzzed with libFuzzer engine only." + exit 1 + fi + if [ "$SANITIZER" != "address" ] && [ "$SANITIZER" != "undefined" ] && [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "introspector" ]; then + echo "ERROR: Python projects can be fuzzed with AddressSanitizer or UndefinedBehaviorSanitizer or Coverage or Fuzz Introspector only." + exit 1 + fi + if [ "$ARCHITECTURE" != "x86_64" ]; then + echo "ERROR: Python projects can be fuzzed on x86_64 architecture only." + exit 1 + fi +fi + +if [ -z "${SANITIZER_FLAGS-}" ]; then + FLAGS_VAR="SANITIZER_FLAGS_${SANITIZER}" + export SANITIZER_FLAGS=${!FLAGS_VAR-} +fi + +if [[ $ARCHITECTURE == "i386" ]]; then + export CFLAGS="-m32 $CFLAGS" + cp -R /usr/i386/lib/* /usr/local/lib + cp -R /usr/i386/include/* /usr/local/include +fi + +# Don't use a fuzzing engine with Jazzer which has libFuzzer built-in or with +# FuzzBench which will provide the fuzzing engine. +if [[ $FUZZING_ENGINE != "none" ]] && [[ $FUZZING_LANGUAGE != "jvm" ]] && [[ "${OSS_FUZZ_ON_DEMAND}" == "0" ]] ; then + # compile script might override environment, use . to call it. + . compile_${FUZZING_ENGINE} +fi + +if [[ $SANITIZER_FLAGS = *sanitize=memory* ]] +then + # Take all libraries from lib/msan + # export CXXFLAGS_EXTRA="-L/usr/msan/lib $CXXFLAGS_EXTRA" + cp -R /usr/msan/lib/* /usr/local/lib/x86_64-unknown-linux-gnu/ + cp -R /usr/msan/include/* /usr/local/include + + echo 'Building without MSan instrumented libraries.' +fi + +# Coverage flag overrides. +COVERAGE_FLAGS_VAR="COVERAGE_FLAGS_${SANITIZER}" +if [[ -n ${!COVERAGE_FLAGS_VAR+x} ]] +then + export COVERAGE_FLAGS="${!COVERAGE_FLAGS_VAR}" +fi + +# Only need the default coverage instrumentation for libFuzzer or honggfuzz. +# Other engines bring their own. +if [ $FUZZING_ENGINE = "none" ] || [ $FUZZING_ENGINE = "afl" ] || [ $FUZZING_ENGINE = "centipede" ] || [ "${OSS_FUZZ_ON_DEMAND}" != "0" ]; then + export COVERAGE_FLAGS= +fi + +# Rust does not support sanitizers and coverage flags via CFLAGS/CXXFLAGS, so +# use RUSTFLAGS. +# FIXME: Support code coverage once support is in. +# See https://github.com/rust-lang/rust/issues/34701. +if [ "$RUST_SANITIZER" == "introspector" ]; then + export RUSTFLAGS="-Cdebuginfo=2 -Cforce-frame-pointers" +elif [ "$SANITIZER" != "undefined" ] && [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "none" ] && [ "$ARCHITECTURE" != 'i386' ]; then + export RUSTFLAGS="--cfg fuzzing -Zsanitizer=${SANITIZER} -Cdebuginfo=1 -Cforce-frame-pointers" +else + export RUSTFLAGS="--cfg fuzzing -Cdebuginfo=1 -Cforce-frame-pointers" +fi +if [ "$SANITIZER" = "coverage" ] +then + # link to C++ from comment in f5098035eb1a14aa966c8651d88ea3d64323823d + export RUSTFLAGS="$RUSTFLAGS -Cinstrument-coverage -C link-arg=-lc++" +fi + +# Add Rust libfuzzer flags. +# See https://github.com/rust-fuzz/libfuzzer/blob/master/build.rs#L12. +export CUSTOM_LIBFUZZER_PATH="$LIB_FUZZING_ENGINE_DEPRECATED" +export CUSTOM_LIBFUZZER_STD_CXX=c++ + +export CFLAGS="$CFLAGS $SANITIZER_FLAGS $COVERAGE_FLAGS" +export CXXFLAGS="$CFLAGS $CXXFLAGS_EXTRA" + +if [ "$SANITIZER" = "undefined" ]; then + # Disable "function" sanitizer for C code for now, because many projects, + # possibly via legacy C code are affected. + # The projects should be fixed and this workaround be removed in the future. + # TODO(#11778): + # https://github.com/google/oss-fuzz/issues/11778 + export CFLAGS="$CFLAGS -fno-sanitize=function" +fi + +if [ "$FUZZING_LANGUAGE" = "go" ]; then + # required by Go 1.20 + export CXX="${CXX} -lresolv" +fi + +if [ "$FUZZING_LANGUAGE" = "python" ]; then + sanitizer_with_fuzzer_lib_dir=`python3 -c "import atheris; import os; print(atheris.path())"` + sanitizer_with_fuzzer_output_lib=$OUT/sanitizer_with_fuzzer.so + if [ "$SANITIZER" = "address" ]; then + cp $sanitizer_with_fuzzer_lib_dir/asan_with_fuzzer.so $sanitizer_with_fuzzer_output_lib + elif [ "$SANITIZER" = "undefined" ]; then + cp $sanitizer_with_fuzzer_lib_dir/ubsan_with_fuzzer.so $sanitizer_with_fuzzer_output_lib + fi + + # Disable leak checking as it is unsupported. + export CFLAGS="$CFLAGS -fno-sanitize=function,leak,vptr," + export CXXFLAGS="$CXXFLAGS -fno-sanitize=function,leak,vptr" +fi + +# Copy latest llvm-symbolizer in $OUT for stack symbolization. +cp $(which llvm-symbolizer) $OUT/ + +# Copy Jazzer to $OUT if needed. +if [ "$FUZZING_LANGUAGE" = "jvm" ]; then + cp $(which jazzer_agent_deploy.jar) $(which jazzer_driver) $(which jazzer_junit.jar) $OUT/ + jazzer_driver_with_sanitizer=$OUT/jazzer_driver_with_sanitizer + if [ "$SANITIZER" = "address" ]; then + cat > $jazzer_driver_with_sanitizer << 'EOF' +#!/bin/bash +this_dir=$(dirname "$0") +"$this_dir/jazzer_driver" --asan "$@" +EOF + elif [ "$SANITIZER" = "undefined" ]; then + cat > $jazzer_driver_with_sanitizer << 'EOF' +#!/bin/bash +this_dir=$(dirname "$0") +"$this_dir/jazzer_driver" --ubsan "$@" +EOF + elif [ "$SANITIZER" = "coverage" ] || [ "$SANITIZER" = "introspector" ]; then + # Coverage & introspector builds require no instrumentation. + cp $(which jazzer_driver) $jazzer_driver_with_sanitizer + fi + chmod +x $jazzer_driver_with_sanitizer + + # Disable leak checking since the JVM triggers too many false positives. + export CFLAGS="$CFLAGS -fno-sanitize=leak" + export CXXFLAGS="$CXXFLAGS -fno-sanitize=leak" +fi + +if [ "$SANITIZER" = "introspector" ] || [ "$RUST_SANITIZER" = "introspector" ]; then + export AR=llvm-ar + export NM=llvm-nm + export RANLIB=llvm-ranlib + + export CFLAGS="$CFLAGS -g" + export CXXFLAGS="$CXXFLAGS -g" + export FI_BRANCH_PROFILE=1 + export FUZZ_INTROSPECTOR=1 + export FUZZ_INTROSPECTOR_AUTO_FUZZ=1 + + # Move ar and ranlib + mv /usr/bin/ar /usr/bin/old-ar + mv /usr/bin/nm /usr/bin/old-nm + mv /usr/bin/ranlib /usr/bin/old-ranlib + + ln -sf /usr/local/bin/llvm-ar /usr/bin/ar + ln -sf /usr/local/bin/llvm-nm /usr/bin/nm + ln -sf /usr/local/bin/llvm-ranlib /usr/bin/ranlib + + apt-get install -y libjpeg-dev zlib1g-dev libyaml-dev + python3 -m pip install --upgrade pip setuptools + python3 -m pip install cxxfilt pyyaml beautifulsoup4 lxml soupsieve rust-demangler + python3 -m pip install --prefer-binary matplotlib + + # Install Fuzz-Introspector + pushd /fuzz-introspector/src + python3 -m pip install -e . + popd + + if [ "$FUZZING_LANGUAGE" = "python" ]; then + python3 /fuzz-introspector/src/main.py light --language=python + cp -rf $SRC/inspector/ /tmp/inspector-saved + elif [ "$FUZZING_LANGUAGE" = "jvm" ]; then + python3 /fuzz-introspector/src/main.py light --language=jvm + cp -rf $SRC/inspector/ /tmp/inspector-saved + elif [ "$FUZZING_LANGUAGE" = "rust" ]; then + python3 /fuzz-introspector/src/main.py light --language=rust + cp -rf $SRC/inspector/ /tmp/inspector-saved + else + python3 /fuzz-introspector/src/main.py light + + # Make a copy of the light. This is needed because we run two versions of + # introspector: one based on pure statis analysis and one based on + # regular LTO. + cp -rf $SRC/inspector/ /tmp/inspector-saved + + + # Move coverage report. + if [ -d "$OUT/textcov_reports" ] + then + find $OUT/textcov_reports/ -name "*.covreport" -exec cp {} $SRC/inspector/ \; + find $OUT/textcov_reports/ -name "*.json" -exec cp {} $SRC/inspector/ \; + fi + + # Make fuzz-introspector HTML report using light approach. + REPORT_ARGS="--name=$PROJECT_NAME" + + # Only pass coverage_url when COVERAGE_URL is set (in cloud builds) + if [[ ! -z "${COVERAGE_URL+x}" ]]; then + REPORT_ARGS="$REPORT_ARGS --coverage-url=${COVERAGE_URL}" + fi + + # Run pure static analysis fuzz introspector + fuzz-introspector full --target-dir=$SRC \ + --language=${FUZZING_LANGUAGE} \ + --out-dir=$SRC/inspector \ + ${REPORT_ARGS} + fi + + rsync -avu --delete "$SRC/inspector/" "$OUT/inspector" +fi + +echo "---------------------------------------------------------------" +echo "CC=$CC" +echo "CXX=$CXX" +echo "CFLAGS=$CFLAGS" +echo "CXXFLAGS=$CXXFLAGS" +echo "RUSTFLAGS=$RUSTFLAGS" +echo "---------------------------------------------------------------" + +if [ "${OSS_FUZZ_ON_DEMAND}" != "0" ]; then + fuzzbench_build + cp $(which llvm-symbolizer) $OUT/ + exit 0 +fi + + +if [[ ! -z "${CAPTURE_REPLAY_SCRIPT-}" ]]; then + # Capture a replaying build script which can be used for replaying the build + # after a vanilla build. This script is meant to be used in a cached + # container. + python3 -m pip install bashlex + python3 /usr/local/bin/bash_parser.py $SRC/build.sh +fi + +# Prepare the build command to run the project's build script. +if [[ ! -z "${REPLAY_ENABLED-}" ]]; then + # If this is a replay, then use replay_build.sh. This is expected to be + # running in a cached container where a build has already happened prior. + BUILD_CMD="bash -eux $SRC/replay_build.sh" +else + BUILD_CMD="bash -eux $SRC/build.sh" +fi + +# Set +u temporarily to continue even if GOPATH and OSSFUZZ_RUSTPATH are undefined. +set +u +# We need to preserve source code files for generating a code coverage report. +# We need exact files that were compiled, so copy both $SRC and $WORK dirs. +COPY_SOURCES_CMD="cp -rL --parents $SRC $WORK /usr/include /usr/local/include $GOPATH $OSSFUZZ_RUSTPATH /rustc $OUT" +set -u + +if [ "$FUZZING_LANGUAGE" = "rust" ]; then + # Copy rust std lib to its path with a hash. + export rustch=`rustc --version --verbose | grep commit-hash | cut -d' ' -f2` + mkdir -p /rustc/$rustch/ + export rustdef=`rustup toolchain list | grep default | cut -d' ' -f1` + cp -r /rust/rustup/toolchains/$rustdef/lib/rustlib/src/rust/library/ /rustc/$rustch/ +fi + +if [ "${BUILD_UID-0}" -ne "0" ]; then + adduser -u $BUILD_UID --disabled-password --gecos '' builder + chown -R builder $SRC $OUT $WORK + su -c "$BUILD_CMD" builder + if [ "$SANITIZER" = "coverage" ]; then + # Some directories have broken symlinks (e.g. honggfuzz), ignore the errors. + su -c "$COPY_SOURCES_CMD" builder 2>/dev/null || true + fi +else + $BUILD_CMD + if [ "$SANITIZER" = "coverage" ]; then + # Some directories have broken symlinks (e.g. honggfuzz), ignore the errors. + $COPY_SOURCES_CMD 2>/dev/null || true + fi +fi + +if [ "$SANITIZER" = "introspector" ] || [ "$RUST_SANITIZER" = "introspector" ]; then + unset CXXFLAGS + unset CFLAGS + export G_ANALYTICS_TAG="G-8WTFM1Y62J" + + # If we get to here, it means the e.g. LTO had no problems and succeeded. + # TO this end, we wlil restore the original light analysis and used the + # LTO processing itself. + rm -rf $SRC/inspector + cp -rf /tmp/inspector-saved $SRC/inspector + + cd /fuzz-introspector/src + python3 -m pip install -e . + cd /src/ + + if [ "$FUZZING_LANGUAGE" = "rust" ]; then + # Restore the sanitizer flag for rust + export SANITIZER="introspector" + fi + + mkdir -p $SRC/inspector + find $SRC/ -name "fuzzerLogFile-*.data" -exec cp {} $SRC/inspector/ \; + find $SRC/ -name "fuzzerLogFile-*.data.yaml" -exec cp {} $SRC/inspector/ \; + find $SRC/ -name "fuzzerLogFile-*.data.debug_*" -exec cp {} $SRC/inspector/ \; + find $SRC/ -name "allFunctionsWithMain-*.yaml" -exec cp {} $SRC/inspector/ \; + + # Move coverage report. + if [ -d "$OUT/textcov_reports" ] + then + find $OUT/textcov_reports/ -name "*.covreport" -exec cp {} $SRC/inspector/ \; + find $OUT/textcov_reports/ -name "*.json" -exec cp {} $SRC/inspector/ \; + fi + + cd $SRC/inspector + + # Make fuzz-introspector HTML report. + REPORT_ARGS="--name=$PROJECT_NAME" + # Only pass coverage_url when COVERAGE_URL is set (in cloud builds) + if [[ ! -z "${COVERAGE_URL+x}" ]]; then + REPORT_ARGS="$REPORT_ARGS --coverage-url=${COVERAGE_URL}" + fi + + # Do different things depending on languages + if [ "$FUZZING_LANGUAGE" = "python" ]; then + echo "GOING python route" + set -x + REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC/inspector" + REPORT_ARGS="$REPORT_ARGS --language=python" + fuzz-introspector report $REPORT_ARGS + rsync -avu --delete "$SRC/inspector/" "$OUT/inspector" + elif [ "$FUZZING_LANGUAGE" = "jvm" ]; then + echo "GOING jvm route" + set -x + find $OUT/ -name "jacoco.xml" -exec cp {} $SRC/inspector/ \; + REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC --out-dir=$SRC/inspector" + REPORT_ARGS="$REPORT_ARGS --language=jvm" + fuzz-introspector full $REPORT_ARGS + rsync -avu --delete "$SRC/inspector/" "$OUT/inspector" + elif [ "$FUZZING_LANGUAGE" = "rust" ]; then + echo "GOING rust route" + REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC --out-dir=$SRC/inspector" + REPORT_ARGS="$REPORT_ARGS --language=rust" + fuzz-introspector full $REPORT_ARGS + rsync -avu --delete "$SRC/inspector/" "$OUT/inspector" + else + # C/C++ + mkdir -p $SRC/inspector + # Correlate fuzzer binaries to fuzz-introspector's raw data + fuzz-introspector correlate --binaries-dir=$OUT/ + + # Generate fuzz-introspector HTML report, this generates + # the file exe_to_fuzz_introspector_logs.yaml + REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC/inspector" + # Use the just-generated correlation file + REPORT_ARGS="$REPORT_ARGS --correlation-file=exe_to_fuzz_introspector_logs.yaml" + fuzz-introspector report $REPORT_ARGS + + rsync -avu --delete "$SRC/inspector/" "$OUT/inspector" + fi +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_afl b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_afl new file mode 100644 index 0000000000000000000000000000000000000000..484d4668c86648d042081a8a1e254dd6a5c36256 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_afl @@ -0,0 +1,53 @@ +#!/bin/bash -eu +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# If LLVM once again does weird changes then enable this: +#export AFL_LLVM_INSTRUMENT=LLVM-NATIVE + +# AFL++ setup +echo "Copying precompiled AFL++" + +# Copy AFL++ tools necessary for fuzzing. +pushd $SRC/aflplusplus > /dev/null + +cp -f libAFLDriver.a $LIB_FUZZING_ENGINE + +# Some important projects include libraries, copy those even when they don't +# start with "afl-". Use "sort -u" to avoid a warning about duplicates. +ls afl-* *.txt *.a *.o *.so | sort -u | xargs cp -t $OUT +export CC="$SRC/aflplusplus/afl-clang-fast" +export CXX="$SRC/aflplusplus/afl-clang-fast++" + +# Set sane AFL++ environment defaults: +# Be quiet, otherwise this can break some builds. +export AFL_QUIET=1 +# No leak errors during builds. +export ASAN_OPTIONS="detect_leaks=0:symbolize=0:detect_odr_violation=0:abort_on_error=1" +# Do not abort on any problems (because this is during build where it is ok) +export AFL_IGNORE_PROBLEMS=1 +# No complain on unknown AFL environment variables +export AFL_IGNORE_UNKNOWN_ENVS=1 + +# Provide a way to document the AFL++ options used in this build: +echo +echo AFL++ target compilation setup: +env | egrep '^AFL_' | tee "$OUT/afl_options.txt" +echo + +popd > /dev/null + +echo " done." diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_centipede b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_centipede new file mode 100644 index 0000000000000000000000000000000000000000..dee31e2e641f2d6e342ec13f6ff176ca53d6726d --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_centipede @@ -0,0 +1,32 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "Skipping compilation; using precompiled centipede" + +if [[ "$SANITIZER" == 'none' ]]; then + cp "$CENTIPEDE_BIN_DIR/centipede" "$OUT" +fi + +cp "$CENTIPEDE_BIN_DIR/libcentipede_runner.pic.a" "$LIB_FUZZING_ENGINE" + +export CENTIPEDE_FLAGS=`cat "$SRC/fuzztest/centipede/clang-flags.txt" | tr '\n' ' '` +export LIBRARIES_FLAGS="-Wno-unused-command-line-argument -Wl,-ldl -Wl,-lrt -Wl,-lpthread -Wl,$SRC/fuzztest/centipede/weak.o" + +export CFLAGS="$CFLAGS $CENTIPEDE_FLAGS $LIBRARIES_FLAGS" +export CXXFLAGS="$CXXFLAGS $CENTIPEDE_FLAGS $LIBRARIES_FLAGS" + +echo 'done.' diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh new file mode 100644 index 0000000000000000000000000000000000000000..8377920e53284d940aa467b29c56bd14e0c6c437 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh @@ -0,0 +1,126 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +set -x + +# In order to identify fuzztest test case "bazel query" is used to search +# the project. A search of the entire project is done with a default "...", +# however, some projects may fail to, or have very long processing time, if +# searching the entire project. Additionally, it may include fuzzers in +# dependencies, which should not be build as part of a given project. +# Tensorflow is an example project that will fail when the entire project is +# queried. FUZZTEST_TARGET_FOLDER makes it posible to specify the folder +# where fuzztest fuzzers should be search for. FUZZTEST_TARGET_FOLDER is passed +# to "bazel query" below. +if [[ ${FUZZTEST_TARGET_FOLDER:-"unset"} == "unset" ]]; +then + export TARGET_FOLDER="..." +else + TARGET_FOLDER=${FUZZTEST_TARGET_FOLDER} +fi + +BUILD_ARGS="--config=oss-fuzz --subcommands" +if [[ ${FUZZTEST_EXTRA_ARGS:-"unset"} != "unset" ]]; +then + BUILD_ARGS="$BUILD_ARGS ${FUZZTEST_EXTRA_ARGS}" +fi + +# Trigger setup_configs rule of fuzztest as it generates the necessary +# configuration file based on OSS-Fuzz environment variables. +bazel run @com_google_fuzztest//bazel:setup_configs >> /etc/bazel.bazelrc + +# Bazel target names of the fuzz binaries. +FUZZ_TEST_BINARIES=$(bazel query "kind(\"cc_test\", rdeps(${TARGET_FOLDER}, @com_google_fuzztest//fuzztest:fuzztest_gtest_main))") + +# Bazel output paths of the fuzz binaries. +FUZZ_TEST_BINARIES_OUT_PATHS=$(bazel cquery "kind(\"cc_test\", rdeps(${TARGET_FOLDER}, @com_google_fuzztest//fuzztest:fuzztest_gtest_main))" --output=files) + +# Build the project and fuzz binaries +# Expose `FUZZTEST_EXTRA_TARGETS` environment variable, in the event a project +# includes non-FuzzTest fuzzers then this can be used to compile these in the +# same `bazel build` command as when building the FuzzTest fuzzers. +# This is to avoid having to call `bazel build` twice. +bazel build $BUILD_ARGS -- ${FUZZ_TEST_BINARIES[*]} ${FUZZTEST_EXTRA_TARGETS:-} + +# Iterate the fuzz binaries and list each fuzz entrypoint in the binary. For +# each entrypoint create a wrapper script that calls into the binaries the +# given entrypoint as argument. +# The scripts will be named: +# {binary_name}@{fuzztest_entrypoint} +for fuzz_main_file in $FUZZ_TEST_BINARIES_OUT_PATHS; do + FUZZ_TESTS=$($fuzz_main_file --list_fuzz_tests) + cp ${fuzz_main_file} $OUT/ + fuzz_basename=$(basename $fuzz_main_file) + chmod -x $OUT/$fuzz_basename + for fuzz_entrypoint in $FUZZ_TESTS; do + TARGET_FUZZER="${fuzz_basename}@$fuzz_entrypoint" + + # Write executer script + echo "#!/bin/sh +# LLVMFuzzerTestOneInput for fuzzer detection. +this_dir=\$(dirname \"\$0\") +chmod +x \$this_dir/$fuzz_basename +\$this_dir/$fuzz_basename --fuzz=$fuzz_entrypoint -- \$@" > $OUT/$TARGET_FUZZER + chmod +x $OUT/$TARGET_FUZZER + done +done + +# Synchronise coverage directory to bazel output artifacts. This is a +# best-effort basis in that it will include source code in common +# bazel output folders. +# For projects that store results in non-standard folders or want to +# manage what code to include in the coverage report more specifically, +# the FUZZTEST_DO_SYNC environment variable is made available. Projects +# can then implement a custom way of synchronising source code with the +# coverage build. Set FUZZTEST_DO_SYNC to something other than "yes" and +# no effort will be made to automatically synchronise the source code with +# the code coverage visualisation utility. +if [[ "$SANITIZER" = "coverage" && ${FUZZTEST_DO_SYNC:-"yes"} == "yes" ]] +then + # Synchronize bazel source files to coverage collection. + declare -r REMAP_PATH="${OUT}/proc/self/cwd" + mkdir -p "${REMAP_PATH}" + + # Synchronize the folder bazel-BAZEL_OUT_PROJECT. + declare -r RSYNC_FILTER_ARGS=("--include" "*.h" "--include" "*.cc" "--include" \ + "*.hpp" "--include" "*.cpp" "--include" "*.c" "--include" "*/" "--include" "*.inc" \ + "--exclude" "*") + + project_folders="$(find . -name 'bazel-*' -type l -printf '%P\n' | \ + grep -v -x -F \ + -e 'bazel-bin' \ + -e 'bazel-testlogs')" + for link in $project_folders; do + if [[ -d "${PWD}"/$link/external ]] + then + rsync -avLk "${RSYNC_FILTER_ARGS[@]}" "${PWD}"/$link/external "${REMAP_PATH}" + fi + # k8-opt is a common path for storing bazel output artifacts, e.g. bazel-out/k8-opt. + # It's the output folder for default amd-64 builds, but projects may specify custom + # platform output directories, see: https://github.com/bazelbuild/bazel/issues/13818 + # We support the default at the moment, and if a project needs custom synchronizing of + # output artifacts and code coverage we currently recommend using FUZZTEST_DO_SYNC. + if [[ -d "${PWD}"/$link/k8-opt ]] + then + rsync -avLk "${RSYNC_FILTER_ARGS[@]}" "${PWD}"/$link/k8-opt "${REMAP_PATH}"/$link + fi + done + + # Delete symlinks and sync the current folder. + find . -type l -ls -delete + rsync -av ${PWD}/ "${REMAP_PATH}" +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..df7d3e24d23c1caf7e262040021fa04240efa8bb --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer @@ -0,0 +1,69 @@ +#!/bin/bash -eu +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +path=$1 +function=$2 +fuzzer=$3 +tags="-tags gofuzz" +if [[ $# -eq 4 ]]; then + tags="-tags $4" +fi + +# makes directory change temporary +( +cd $GOPATH/src/$path || true +# in the case we are in the right directory, with go.mod but no go.sum +go mod tidy || true +# project was downloaded with go get if go list fails +go list $tags $path || { cd $GOPATH/pkg/mod/ && cd `echo $path | cut -d/ -f1-3 | awk '{print $1"@*"}'`; } || cd - +# project does not have go.mod if go list fails again +go list $tags $path || { go mod init $path && go mod tidy ;} + +if [[ $SANITIZER = *coverage* ]]; then + fuzzed_package=`go list $tags -f '{{.Name}}' $path` + abspath=`go list $tags -f {{.Dir}} $path` + cd $abspath + cp $GOPATH/ossfuzz_coverage_runner.go ./"${function,,}"_test.go + sed -i -e 's/FuzzFunction/'$function'/' ./"${function,,}"_test.go + sed -i -e 's/mypackagebeingfuzzed/'$fuzzed_package'/' ./"${function,,}"_test.go + sed -i -e 's/TestFuzzCorpus/Test'$function'Corpus/' ./"${function,,}"_test.go + + # The repo is the module path/name, which is already created above in case it doesn't exist, + # but not always the same as the module path. This is necessary to handle SIV properly. + fuzzed_repo=$(go list $tags -f {{.Module}} "$path") + abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo` + # give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir + echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath + # Additional packages for which to get coverage. + pkgaddcov="" + # to prevent bash from failing about unbound variable + GO_COV_ADD_PKG_SET=${GO_COV_ADD_PKG:-} + if [[ -n "${GO_COV_ADD_PKG_SET}" ]]; then + pkgaddcov=","$GO_COV_ADD_PKG + abspath_repo=`go list -m $tags -f {{.Dir}} $GO_COV_ADD_PKG || go list $tags -f {{.Dir}} $GO_COV_ADD_PKG` + echo "s=^$GO_COV_ADD_PKG"="$abspath_repo"= >> $OUT/$fuzzer.gocovpath + fi + go test -run Test${function}Corpus -v $tags -coverpkg $fuzzed_repo/...$pkgaddcov -c -o $OUT/$fuzzer $path +else + # Compile and instrument all Go files relevant to this fuzz target. + echo "Running go-fuzz $tags -func $function -o $fuzzer.a $path" + go-fuzz $tags -func $function -o $fuzzer.a $path + + # Link Go code ($fuzzer.a) with fuzzing engine to produce fuzz target binary. + $CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer +fi +) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_honggfuzz b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_honggfuzz new file mode 100644 index 0000000000000000000000000000000000000000..cf206e46a4686462e1af5acc354efa13b8538976 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_honggfuzz @@ -0,0 +1,33 @@ +#!/bin/bash -eu +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "Skipping compilation; using precompiled honggfuzz" + +cp $SRC/honggfuzz/honggfuzz.a $LIB_FUZZING_ENGINE +cp $SRC/honggfuzz/honggfuzz $OUT/ + +# Set flags necessary for netdriver compilation. +export LIB_HFND="-Wl,-u,LIBHFNETDRIVER_module_netdriver -Wl,--start-group $SRC/honggfuzz/libhfnetdriver/libhfnetdriver.a $SRC/honggfuzz/libhfcommon/libhfcommon.a -Wl,--end-group" + +export HFND_CXXFLAGS='-DHFND_FUZZING_ENTRY_FUNCTION_CXX(x,y)=extern const char* LIBHFNETDRIVER_module_netdriver;const char** LIBHFNETDRIVER_tmp1 = &LIBHFNETDRIVER_module_netdriver;extern "C" int HonggfuzzNetDriver_main(x,y);int HonggfuzzNetDriver_main(x,y)' +export HFND_CFLAGS='-DHFND_FUZZING_ENTRY_FUNCTION(x,y)=extern const char* LIBHFNETDRIVER_module_netdriver;const char** LIBHFNETDRIVER_tmp1 = &LIBHFNETDRIVER_module_netdriver;int HonggfuzzNetDriver_main(x,y);int HonggfuzzNetDriver_main(x,y)' + +# Custom coverage flags, roughly in sync with: +# https://github.com/google/honggfuzz/blob/oss-fuzz/hfuzz_cc/hfuzz-cc.c +export COVERAGE_FLAGS="-fsanitize-coverage=trace-pc-guard,indirect-calls,trace-cmp" + +echo " done." diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_javascript_fuzzer b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_javascript_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..83ece10aa814e7e8ee3440583ff124c4fa03764e --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_javascript_fuzzer @@ -0,0 +1,37 @@ +#!/bin/bash -eu +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +project=$1 +# Path the fuzz target source file relative to the project's root. +fuzz_target=$2 +# Arguments to pass to Jazzer.js +jazzerjs_args=${@:3} + +# Copy source code into the $OUT directory and install Jazzer.js into the project. +if [ ! -d $OUT/$project ]; then + cp -r $SRC/$project $OUT/$project +fi + +fuzzer_basename=$(basename -s .js $fuzz_target) + +# Create an execution wrapper that executes Jazzer.js with the correct arguments. +echo "#!/bin/bash +# LLVMFuzzerTestOneInput so that the wrapper script is recognized as a fuzz target for 'check_build'. +project_dir=\$(dirname \"\$0\")/$project +\$project_dir/node_modules/@jazzer.js/core/dist/cli.js \$project_dir/$fuzz_target $jazzerjs_args \$JAZZERJS_EXTRA_ARGS -- \$@" > $OUT/$fuzzer_basename + +chmod +x $OUT/$fuzzer_basename diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_libfuzzer b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_libfuzzer new file mode 100644 index 0000000000000000000000000000000000000000..9acd0ccb64256e2b91e008df7ce1f1ee06ebd865 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_libfuzzer @@ -0,0 +1,25 @@ +#!/bin/bash -eu +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo -n "Compiling libFuzzer to $LIB_FUZZING_ENGINE... " +export LIB_FUZZING_ENGINE="-fsanitize=fuzzer" +if [ "$FUZZING_LANGUAGE" = "go" ]; then + export LIB_FUZZING_ENGINE="$LIB_FUZZING_ENGINE $GOPATH/gosigfuzz/gosigfuzz.o" +fi + +cp /usr/local/lib/clang/*/lib/$ARCHITECTURE-unknown-linux-gnu/libclang_rt.fuzzer.a $LIB_FUZZING_ENGINE_DEPRECATED +echo " done." diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..7a7fa67df811fa4d4a24f3f0d80fd17729fbb52b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer @@ -0,0 +1,60 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +function build_native_go_fuzzer() { + fuzzer=$1 + function=$2 + path=$3 + tags="-tags gofuzz" + + if [[ $SANITIZER == *coverage* ]]; then + current_dir=$(pwd) + mkdir $OUT/rawfuzzers || true + cd $abs_file_dir + go test $tags -c -run $fuzzer -o $OUT/$fuzzer -cover + cp "${fuzzer_filename}" "${OUT}/rawfuzzers/${fuzzer}" + + fuzzed_repo=$(go list $tags -f {{.Module}} "$path") + abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo` + # give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir + echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath + + cd $current_dir + else + go-118-fuzz-build $tags -o $fuzzer.a -func $function $abs_file_dir + $CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer + fi +} + +path=$1 +function=$2 +fuzzer=$3 +tags="-tags gofuzz" + +# Get absolute path. +abs_file_dir=$(go list $tags -f {{.Dir}} $path) + +# TODO(adamkorcz): Get rid of "-r" flag here. +fuzzer_filename=$(grep -r -l --include='*.go' -s "$function" "${abs_file_dir}") + +# Test if file contains a line with "func $function" and "testing.F". +if [ $(grep -r "func $function" $fuzzer_filename | grep "testing.F" | wc -l) -eq 1 ] +then + build_native_go_fuzzer $fuzzer $function $abs_file_dir +else + echo "Could not find the function: func ${function}(f *testing.F)" +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_python_fuzzer b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_python_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..a36c05f3d1a2b2abb3ca492cf26022486cf33ebf --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_python_fuzzer @@ -0,0 +1,128 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# In order to enable PySecSan for a given module, set the environment +# variable ENABLE_PYSECSAN="YES" + +fuzzer_path=$1 +shift 1 + +fuzzer_basename=$(basename -s .py $fuzzer_path) +fuzzer_package=${fuzzer_basename}.pkg + +PYFUZZ_WORKPATH=$SRC/pyfuzzworkdir/ +FUZZ_WORKPATH=$PYFUZZ_WORKPATH/$fuzzer_basename + +if [[ $SANITIZER = *introspector* ]]; then + # Extract the source package the fuzzer targets. This must happen before + # we enter the virtual environment in the following lines because we need + # to use the same python environment that installed the fuzzer dependencies. + python3 /fuzz-introspector/frontends/python/prepare_fuzz_imports.py $fuzzer_path isossfuzz + + # We must ensure python3.9, this is because we use certain + # AST logic from there. + # The below should probably be refined + apt-get install -y python3.9 + apt-get update + apt-get install -y python3-pip + python3.9 -m pip install virtualenv + python3.9 -m virtualenv .venv + . .venv/bin/activate + pip3 install pyyaml + export PYTHONPATH="/fuzz-introspector/frontends/python/PyCG" + + ARGS="--fuzzer $fuzzer_path" + if [ -n "${PYFUZZPACKAGE-}" ]; then + ARGS="$ARGS --package=${PYFUZZPACKAGE}" + fi + python /fuzz-introspector/frontends/python/main.py $ARGS + ls -la ./ + exit 0 +fi + +# In coverage mode prepend coverage logic to the fuzzer source +if [[ $SANITIZER = *coverage* ]]; then + cat < coverage_wrapper.py +###### Coverage stub +import atexit +import coverage +cov = coverage.coverage(data_file='.coverage', cover_pylib=True) +cov.start() +# Register an exist handler that will print coverage +def exit_handler(): + cov.stop() + cov.save() +atexit.register(exit_handler) +####### End of coverage stub +EOF + + # Prepend stub and create tmp file + cat coverage_wrapper.py $fuzzer_path > tmp_fuzzer_coverage.py + + # Overwrite existing fuzzer with new fuzzer that has stub + mv tmp_fuzzer_coverage.py $fuzzer_path +fi + +# If PYSECSAN is enabled, ensure that we can build with it. +if [[ ${ENABLE_PYSECSAN:-"0"} != "0" ]]; +then + # Make sure pysecsan is installed + if [[ ! -d "/pysecsan" ]]; + then + pushd /usr/local/lib/sanitizers/pysecsan + python3 -m pip install . + popd + fi + + cat < pysecsan_wrapper.py +import pysecsan; pysecsan.add_hooks(); +EOF + + # Prepend stub and create tmp file + cat pysecsan_wrapper.py $fuzzer_path > tmp_fuzzer_pysecsan.py + + # Overwrite existing fuzzer with new fuzzer that has stub + mv tmp_fuzzer_pysecsan.py $fuzzer_path +fi + +rm -rf $PYFUZZ_WORKPATH +mkdir $PYFUZZ_WORKPATH $FUZZ_WORKPATH + +pyinstaller --distpath $OUT --workpath=$FUZZ_WORKPATH --onefile --name $fuzzer_package "$@" $fuzzer_path + +# Disable executable bit from package as OSS-Fuzz uses executable bits to +# identify fuzz targets. We re-enable the executable bit in wrapper script +# below. +chmod -x $OUT/$fuzzer_package + +# In coverage mode save source files of dependencies in pyinstalled binary +if [[ $SANITIZER = *coverage* ]]; then + rm -rf /medio/ + python3 /usr/local/bin/python_coverage_helper.py $FUZZ_WORKPATH "/medio" + zip -r $fuzzer_package.deps.zip /medio + mv $fuzzer_package.deps.zip $OUT/ +fi + +# Create execution wrapper. +echo "#!/bin/sh +# LLVMFuzzerTestOneInput for fuzzer detection. +this_dir=\$(dirname \"\$0\") +chmod +x \$this_dir/$fuzzer_package +LD_PRELOAD=\$this_dir/sanitizer_with_fuzzer.so \ +ASAN_OPTIONS=\$ASAN_OPTIONS:symbolize=1:external_symbolizer_path=\$this_dir/llvm-symbolizer:detect_leaks=0 \ +\$this_dir/$fuzzer_package \$@" > $OUT/$fuzzer_basename +chmod +x $OUT/$fuzzer_basename diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl new file mode 100644 index 0000000000000000000000000000000000000000..c53dae8156d623620be6750bf242ed714a182dcf --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl @@ -0,0 +1,40 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Source this file for afl++ debug sessions. +apt-get update +apt-get install -y strace gdb vim joe psmisc + +pushd $SRC/aflplusplus > /dev/null +git checkout dev +git pull +test -n "$1" && { git checkout "$1" ; git pull ; } +CFLAGS_SAVE="$CFLAGS" +CXXFLAGS_SAVE="$CXXFLAGS" +unset CFLAGS +unset CXXFLAGS +make +export CFLAGS="$CFLAGS_SAVE" +export CXXFLAGS="$CXXFLAGS_SAVE" +popd > /dev/null + +export ASAN_OPTIONS="detect_leaks=0:symbolize=0:detect_odr_violation=0:abort_on_error=1" +export AFL_LLVM_LAF_ALL=1 +export AFL_LLVM_CMPLOG=1 +touch "$OUT/afl_cmplog.txt" +export AFL_LLVM_DICT2FILE=$OUT/afl++.dict +ulimit -c unlimited diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo.py new file mode 100644 index 0000000000000000000000000000000000000000..e677e102329f2c7b3efb49eb77df19f83b46ce9f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo.py @@ -0,0 +1,182 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module to get the the name of a git repo containing a specific commit +inside of an OSS-Fuzz project. + +Example Usage: + + python detect_repo.py --src_dir /src --example_commit + b534f03eecd8a109db2b085ab24d419b6486de97 + +Prints the location of the git remote repo as well as the repo's name +seperated by a space. + + https://github.com/VirusTotal/yara.git yara + +""" +import argparse +import logging +import os +import subprocess + +GO_PATH = '/root/go/src/' + + +def main(): + """Function to get a git repo's url and name referenced by OSS-Fuzz + Dockerfile. + + Raises: + ValueError when a commit or a ref is not provided. + """ + parser = argparse.ArgumentParser( + description= + 'Finds a specific git repo in an oss-fuzz project\'s docker file.') + parser.add_argument('--repo_name', help='The name of the git repo.') + parser.add_argument('--src_dir', help='The location of the possible repo.') + parser.add_argument('--example_commit', + help='A commit SHA referencing the project\'s main repo.') + + args = parser.parse_args() + if not args.repo_name and not args.example_commit: + raise ValueError( + 'Requires an example commit or a repo name to find repo location.') + if args.src_dir: + src_dir = args.src_dir + else: + src_dir = os.environ.get('SRC', '/src') + + for single_dir in get_dirs_to_search(src_dir, args.repo_name): + full_path = os.path.join(src_dir, single_dir) + if not os.path.isdir(full_path): + continue + if args.example_commit and check_for_commit(full_path, args.example_commit): + print('Detected repo:', get_repo(full_path), full_path) + return + if args.repo_name and check_for_repo_name(full_path, args.repo_name): + print('Detected repo:', get_repo(full_path), full_path) + return + logging.error('No git repos with specific commit: %s found in %s', + args.example_commit, src_dir) + + +def get_dirs_to_search(src_dir, repo_name): + """Gets a list of directories to search for the main git repo. + + Args: + src_dir: The location set for the projects SRC. + repo_name: The name of the repo you are searching for. + + Returns: + A list of directorys to search. + """ + dirs_to_search = os.listdir(src_dir) + if os.path.exists(GO_PATH) and repo_name: + for root, dirs, _ in os.walk(GO_PATH): + for test_dir in dirs: + if repo_name in test_dir: + dirs_to_search.append(os.path.join(root, test_dir)) + return dirs_to_search + + +def get_repo(repo_path): + """Gets a git repo link from a specific directory in a docker image. + + Args: + repo_path: The directory on the image where the git repo exists. + + Returns: + The repo location or None. + """ + output, return_code = execute(['git', 'config', '--get', 'remote.origin.url'], + location=repo_path, + check_result=True) + if return_code == 0 and output: + return output.rstrip() + return None + + +def check_for_repo_name(repo_path, expected_repo_name): + """Returns True if the repo at |repo_path| repo_name matches + |expected_repo_name|. + + Args: + repo_path: The directory of a git repo. + expected_repo_name: The name of the target git repo. + """ + if not os.path.exists(os.path.join(repo_path, '.git')): + return False + + repo_url, _ = execute(['git', 'config', '--get', 'remote.origin.url'], + location=repo_path) + # Handle two common cases: + # https://github.com/google/syzkaller/ + # https://github.com/google/syzkaller.git + repo_url = repo_url.replace('.git', '').rstrip().rstrip('/') + actual_repo_name = repo_url.split('/')[-1] + return actual_repo_name == expected_repo_name + + +def check_for_commit(repo_path, commit): + """Checks a directory for a specific commit. + + Args: + repo_path: The name of the directory to test for the commit. + commit: The commit SHA to check for. + + Returns: + True if directory contains that commit. + """ + + # Check if valid git repo. + if not os.path.exists(os.path.join(repo_path, '.git')): + return False + + # Check if history fetch is needed. + if os.path.exists(os.path.join(repo_path, '.git', 'shallow')): + execute(['git', 'fetch', '--unshallow'], location=repo_path) + + # Check if commit is in history. + _, return_code = execute(['git', 'cat-file', '-e', commit], + location=repo_path) + return return_code == 0 + + +def execute(command, location, check_result=False): + """Runs a shell command in the specified directory location. + + Args: + command: The command as a list to be run. + location: The directory the command is run in. + check_result: Should an exception be thrown on failed command. + + Returns: + The stdout of the command, the error code. + + Raises: + RuntimeError: running a command resulted in an error. + """ + process = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=location) + output, err = process.communicate() + if check_result and (process.returncode or err): + raise RuntimeError( + 'Error: %s\n running command: %s\n return code: %s\n out %s\n' % + (err, command, process.returncode, output)) + if output is not None: + output = output.decode('ascii') + return output, process.returncode + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0243b3ac513e942825e445bf4786bc593f48a338 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py @@ -0,0 +1,121 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test the functionality of the detect_repo module. +This will consist of the following functional test: + 1. Determine if an OSS-Fuzz projects main repo can be detected from example + commits. + 2. Determine if an OSS-Fuzz project main repo can be detected from a + repo name. +""" +import os +import re +import sys +import tempfile +import unittest +from unittest import mock + +import detect_repo + +# Appending to path for access to repo_manager module. +# pylint: disable=wrong-import-position +sys.path.append( + os.path.dirname(os.path.dirname(os.path.dirname( + os.path.abspath(__file__))))) +import repo_manager +import test_repos +# pylint: enable=wrong-import-position + + +class TestCheckForRepoName(unittest.TestCase): + """Tests for check_for_repo_name.""" + + @mock.patch('os.path.exists', return_value=True) + @mock.patch('detect_repo.execute', + return_value=('https://github.com/google/syzkaller/', None)) + def test_go_get_style_url(self, _, __): + """Tests that check_for_repo_name works on repos that were downloaded using + go get.""" + self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller')) + + @mock.patch('os.path.exists', return_value=True) + @mock.patch('detect_repo.execute', + return_value=('https://github.com/google/syzkaller', None)) + def test_missing_git_and_slash_url(self, _, __): + """Tests that check_for_repo_name works on repos who's URLs do not end in + ".git" or "/".""" + self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller')) + + @mock.patch('os.path.exists', return_value=True) + @mock.patch('detect_repo.execute', + return_value=('https://github.com/google/syzkaller.git', None)) + def test_normal_style_repo_url(self, _, __): + """Tests that check_for_repo_name works on normally cloned repos.""" + self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller')) + + +@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'), + 'INTEGRATION_TESTS=1 not set') +class DetectRepoIntegrationTest(unittest.TestCase): + """Class to test the functionality of the detect_repo module.""" + + def test_infer_main_repo_from_commit(self): + """Tests that the main repo can be inferred based on an example commit.""" + + with tempfile.TemporaryDirectory() as tmp_dir: + # Construct example repo's to check for commits. + for test_repo in test_repos.TEST_REPOS: + repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir) + self.check_with_repo(test_repo.git_url, + test_repo.git_repo_name, + tmp_dir, + commit=test_repo.old_commit) + + def test_infer_main_repo_from_name(self): + """Tests that the main project repo can be inferred from a repo name.""" + with tempfile.TemporaryDirectory() as tmp_dir: + for test_repo in test_repos.TEST_REPOS: + repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir) + self.check_with_repo(test_repo.git_url, test_repo.git_repo_name, + tmp_dir) + + def check_with_repo(self, repo_origin, repo_name, tmp_dir, commit=None): + """Checks the detect repo's main method for a specific set of inputs. + + Args: + repo_origin: URL of the git repo. + repo_name: The name of the directory it is cloned to. + tmp_dir: The location of the directory of git repos to be searched. + commit: The commit that should be used to look up the repo. + """ + command = ['python3', 'detect_repo.py', '--src_dir', tmp_dir] + + if commit: + command += ['--example_commit', commit] + else: + command += ['--repo_name', repo_name] + + out, _ = detect_repo.execute(command, + location=os.path.dirname( + os.path.realpath(__file__))) + match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip()) + if match and match.group(1) and match.group(2): + self.assertEqual(match.group(1), repo_origin) + self.assertEqual(match.group(2), os.path.join(tmp_dir, repo_name)) + else: + self.assertIsNone(repo_origin) + self.assertIsNone(repo_name) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh new file mode 100644 index 0000000000000000000000000000000000000000..777e4d1b69f3865207fba671457508662d0a49c2 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh @@ -0,0 +1,44 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install base-builder's dependencies in a architecture-aware way. + + +case $(uname -m) in + x86_64) + dpkg --add-architecture i386 + ;; +esac + +apt-get update && \ + apt-get install -y \ + binutils-dev \ + build-essential \ + curl \ + wget \ + git \ + jq \ + patchelf \ + rsync \ + subversion \ + zip + +case $(uname -m) in + x86_64) + apt-get install -y libc6-dev-i386 + ;; +esac diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh new file mode 100644 index 0000000000000000000000000000000000000000..f2a93bd76702f33c0c3319fcc74ddbfd42852057 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh @@ -0,0 +1,43 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +cd /tmp + +wget https://go.dev/dl/go1.23.4.linux-amd64.tar.gz +mkdir temp-go +tar -C temp-go/ -xzf go1.23.4.linux-amd64.tar.gz + +mkdir /root/.go/ +mv temp-go/go/* /root/.go/ +rm -rf temp-go + +echo 'Set "GOPATH=/root/go"' +echo 'Set "PATH=$PATH:/root/.go/bin:$GOPATH/bin"' + +go install github.com/mdempsky/go114-fuzz-build@latest +ln -s $GOPATH/bin/go114-fuzz-build $GOPATH/bin/go-fuzz + +# Build signal handler +if [ -f "$GOPATH/gosigfuzz/gosigfuzz.c" ]; then + clang -c $GOPATH/gosigfuzz/gosigfuzz.c -o $GOPATH/gosigfuzz/gosigfuzz.o +fi + +cd /tmp +git clone https://github.com/AdamKorcz/go-118-fuzz-build +cd go-118-fuzz-build +go build +mv go-118-fuzz-build $GOPATH/bin/ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh new file mode 100644 index 0000000000000000000000000000000000000000..d7743c6cd37c9c09f6459c3ed1805d7ddd4b56df --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh @@ -0,0 +1,31 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install OpenJDK 17 and trim its size by removing unused components. This enables using Jazzer's mutation framework. +cd /tmp +curl --silent -L -O https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-x64_bin.tar.gz && \ +mkdir -p $JAVA_HOME +tar -xz --strip-components=1 -f openjdk-17.0.2_linux-x64_bin.tar.gz --directory $JAVA_HOME && \ +rm -f openjdk-17.0.2_linux-x64_bin.tar.gz +rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip + +# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15. +curl --silent -L -O https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-x64_bin.tar.gz && \ +mkdir -p $JAVA_15_HOME +tar -xz --strip-components=1 -f openjdk-15.0.2_linux-x64_bin.tar.gz --directory $JAVA_15_HOME && \ +rm -f openjdk-15.0.2_linux-x64_bin.tar.gz +rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_javascript.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_javascript.sh new file mode 100644 index 0000000000000000000000000000000000000000..232658b0484dc67202352a2bc9025168fd8e4aa0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_javascript.sh @@ -0,0 +1,27 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +# see installation instructions: https://github.com/nodesource/distributions#available-architectures +apt-get update +apt-get install -y ca-certificates curl gnupg +mkdir -p /etc/apt/keyrings +curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg + +NODE_MAJOR=20 +echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list + +apt-get update +apt-get install nodejs -y diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_python.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_python.sh new file mode 100644 index 0000000000000000000000000000000000000000..0d5fcb96450a9e402d1c8a5af30e09bc42bd36ff --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_python.sh @@ -0,0 +1,23 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "ATHERIS INSTALL" +unset CFLAGS CXXFLAGS +# PYI_STATIC_ZLIB=1 is needed for installing pyinstaller 5.0 +export PYI_STATIC_ZLIB=1 +LIBFUZZER_LIB=$( echo /usr/local/lib/clang/*/lib/x86_64-unknown-linux-gnu/libclang_rt.fuzzer_no_main.a ) pip3 install -v --no-cache-dir "atheris>=2.3.0" "pyinstaller==6.10.0" "setuptools==72.1.0" "coverage==6.3.2" +rm -rf /tmp/* diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh new file mode 100644 index 0000000000000000000000000000000000000000..76e996727197aa93b49dfbb935dff7b57c077341 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +apt update +apt install -y lsb-release software-properties-common gnupg2 binutils xz-utils libyaml-dev +gpg2 --keyserver keyserver.ubuntu.com --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB +curl -sSL https://get.rvm.io | bash + +. /etc/profile.d/rvm.sh + +rvm install ruby-3.3.1 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh new file mode 100644 index 0000000000000000000000000000000000000000..45fbec6ed4d6afeec0897f38c9a7c1bab0c4eafa --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh @@ -0,0 +1,22 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +curl https://sh.rustup.rs | sh -s -- -y --default-toolchain=$RUSTUP_TOOLCHAIN --profile=minimal +cargo install cargo-fuzz --locked && rm -rf /rust/registry +# Needed to recompile rust std library for MSAN +rustup component add rust-src +cp -r /usr/local/lib/x86_64-unknown-linux-gnu/* /usr/local/lib/ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh new file mode 100644 index 0000000000000000000000000000000000000000..1d0d16701f39965afc493e9cb64bc0a6f76f4c16 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh @@ -0,0 +1,67 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + + +SWIFT_PACKAGES="wget \ + binutils \ + git \ + gnupg2 \ + libc6-dev \ + libcurl4 \ + libedit2 \ + libgcc-9-dev \ + libpython2.7 \ + libsqlite3-0 \ + libstdc++-9-dev \ + libxml2 \ + libz3-dev \ + pkg-config \ + tzdata \ + zlib1g-dev" +SWIFT_SYMBOLIZER_PACKAGES="build-essential make cmake ninja-build git python3 g++-multilib binutils-dev zlib1g-dev" +apt-get update && apt install -y $SWIFT_PACKAGES && \ + apt install -y $SWIFT_SYMBOLIZER_PACKAGES --no-install-recommends + + +wget -q https://download.swift.org/swift-5.10.1-release/ubuntu2004/swift-5.10.1-RELEASE/swift-5.10.1-RELEASE-ubuntu20.04.tar.gz +tar xzf swift-5.10.1-RELEASE-ubuntu20.04.tar.gz +cp -r swift-5.10.1-RELEASE-ubuntu20.04/usr/* /usr/ +rm -rf swift-5.10.1-RELEASE-ubuntu20.04.tar.gz swift-5.10.1-RELEASE-ubuntu20.04/ +# TODO: Move to a seperate work dir +git clone https://github.com/llvm/llvm-project.git +cd llvm-project +git checkout 63bf228450b8403e0c5e828d276be47ffbcd00d0 # TODO: Keep in sync with base-clang. +git apply ../llvmsymbol.diff --verbose +cmake -G "Ninja" \ + -DLIBCXX_ENABLE_SHARED=OFF \ + -DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON \ + -DLIBCXXABI_ENABLE_SHARED=OFF \ + -DCMAKE_BUILD_TYPE=Release \ + -DLLVM_TARGETS_TO_BUILD=X86 \ + -DCMAKE_C_COMPILER=clang \ + -DCMAKE_CXX_COMPILER=clang++ \ + -DLLVM_BUILD_TESTS=OFF \ + -DLLVM_INCLUDE_TESTS=OFF llvm +ninja -j$(nproc) llvm-symbolizer +cp bin/llvm-symbolizer /usr/local/bin/llvm-symbolizer-swift + +cd $SRC +rm -rf llvm-project llvmsymbol.diff + +# TODO: Cleanup packages +apt-get remove --purge -y wget zlib1g-dev +apt-get autoremove -y diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/build_jcc.bash b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/build_jcc.bash new file mode 100644 index 0000000000000000000000000000000000000000..6a62c22890f0edbb24ccdd73d5a8d089f44e7632 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/build_jcc.bash @@ -0,0 +1,25 @@ +#!/bin/bash -eu +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +go build jcc.go +go build jcc2.go +gsutil cp jcc gs://clusterfuzz-builds/jcc/clang++-jcc +gsutil cp jcc gs://clusterfuzz-builds/jcc/clang-jcc + +gsutil cp jcc2 gs://clusterfuzz-builds/jcc/clang++-jcc2 +gsutil cp jcc2 gs://clusterfuzz-builds/jcc/clang-jcc2 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/go.mod b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/go.mod new file mode 100644 index 0000000000000000000000000000000000000000..c32967d291b649cbe030aae34d0121b02a971647 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/go.mod @@ -0,0 +1,3 @@ +module github.com/google/jcc + +go 1.21 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc.go new file mode 100644 index 0000000000000000000000000000000000000000..7eda88dd8a309908d475245261d76199817223f2 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc.go @@ -0,0 +1,88 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "bytes" + "fmt" + "log" + "os" + "os/exec" + "path/filepath" +) + +func ExecBuildCommand(bin string, args []string) (int, string, string) { + // Executes the original command. + cmd := exec.Command(bin, args...) + var outb, errb bytes.Buffer + cmd.Stdout = &outb + cmd.Stderr = &errb + cmd.Stdin = os.Stdin + cmd.Run() + return cmd.ProcessState.ExitCode(), outb.String(), errb.String() +} + +func Compile(bin string, args []string) (int, string, string) { + // Run the actual command. + return ExecBuildCommand(bin, args) +} + +func AppendStringToFile(filepath, new_content string) error { + // Appends |new_content| to the content of |filepath|. + file, err := os.OpenFile(filepath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer file.Close() + + _, err = file.WriteString(new_content) + return err +} + +func WriteStdErrOut(args []string, outstr string, errstr string) { + // Prints |outstr| to stdout, prints |errstr| to stderr, and saves |errstr| to err.log. + fmt.Print(outstr) + fmt.Fprint(os.Stderr, errstr) + // Record what compile args produced the error and the error itself in log file. + AppendStringToFile("/tmp/err.log", fmt.Sprintf("%s\n", args)+errstr) +} + +func main() { + f, err := os.OpenFile("/tmp/jcc.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + + if err != nil { + log.Println(err) + } + defer f.Close() + if _, err := f.WriteString(fmt.Sprintf("%s\n", os.Args)); err != nil { + log.Println(err) + } + + args := os.Args[1:] + basename := filepath.Base(os.Args[0]) + isCPP := basename == "clang++-jcc" + newArgs := args + + var bin string + if isCPP { + bin = "clang++" + } else { + bin = "clang" + } + fullCmdArgs := append([]string{bin}, newArgs...) + retcode, out, errstr := Compile(bin, newArgs) + WriteStdErrOut(fullCmdArgs, out, errstr) + os.Exit(retcode) +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc2.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc2.go new file mode 100644 index 0000000000000000000000000000000000000000..a2d9e73ea2f58c2b1ed3279fe19ac4d6a43e8126 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc2.go @@ -0,0 +1,401 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io/fs" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" +) + +var MaxMissingHeaderFiles = 10 +var CppifyHeadersMagicString = "\n/* JCCCppifyHeadersMagicString */\n" + +func CopyFile(src string, dst string) { + contents, err := ioutil.ReadFile(src) + if err != nil { + panic(err) + } + err = ioutil.WriteFile(dst, contents, 0644) + if err != nil { + panic(err) + } +} + +func TryFixCCompilation(cmdline []string) ([]string, int, string, string) { + var newFile string = "" + for i, arg := range cmdline { + if !strings.HasSuffix(arg, ".c") { + continue + } + if _, err := os.Stat(arg); errors.Is(err, os.ErrNotExist) { + continue + } + newFile = strings.TrimSuffix(arg, ".c") + newFile += ".cpp" + CopyFile(arg, newFile) + CppifyHeaderIncludesFromFile(newFile) + cmdline[i] = newFile + break + } + if newFile == "" { + return []string{}, 1, "", "" + } + cppBin := "clang++" + newCmdline := []string{"-stdlib=libc++"} + newCmdline = append(cmdline, newCmdline...) + newFullArgs := append([]string{cppBin}, newCmdline...) + + retcode, out, err := Compile(cppBin, newCmdline) + if retcode == 0 { + return newFullArgs, retcode, out, err + } + correctedCmdline, corrected, _ := CorrectMissingHeaders(cppBin, newCmdline) + if corrected { + return append([]string{cppBin}, correctedCmdline...), 0, "", "" + } + return newFullArgs, retcode, out, err +} + +func ExtractMissingHeader(compilerOutput string) (string, bool) { + r := regexp.MustCompile(`fatal error: ['|<](?P
[a-zA-z0-9\/\.]+)['|>] file not found`) + matches := r.FindStringSubmatch(compilerOutput) + if len(matches) == 0 { + return "", false + } + return matches[1], true +} + +func ReplaceMissingHeaderInFile(srcFilename, curHeader, replacementHeader string) error { + srcFile, err := os.Open(srcFilename) + if err != nil { + return err + } + srcBytes, err := ioutil.ReadAll(srcFile) + if err != nil { + return err + } + src := string(srcBytes) + newSrc := ReplaceMissingHeader(src, curHeader, replacementHeader) + b := []byte(newSrc) + err = ioutil.WriteFile(srcFilename, b, 0644) + if err != nil { + return err + } + return nil +} + +func ReplaceMissingHeader(src, curHeader, replacementHeader string) string { + re := regexp.MustCompile(`#include ["|<]` + curHeader + `["|>]\n`) + replacement := "#include \"" + replacementHeader + "\"\n" + return re.ReplaceAllString(src, replacement) +} + +func GetHeaderCorrectedFilename(compilerErr string) (string, string, bool) { + re := regexp.MustCompile(`(?P[a-z\/\-\_0-9A-z\.]+):.* fatal error: .* file not found`) + matches := re.FindStringSubmatch(compilerErr) + if len(matches) < 2 { + return "", "", false + } + oldFilename := matches[1] + base := filepath.Base(oldFilename) + root := filepath.Dir(oldFilename) + newFilename := root + "/jcc-corrected-" + base + return oldFilename, newFilename, true +} + +func GetHeaderCorrectedCmd(cmd []string, compilerErr string) ([]string, string, error) { + oldFilename, newFilename, success := GetHeaderCorrectedFilename(compilerErr) + if !success { + return cmd, "", errors.New("Couldn't find buggy file") + } + // Make new cmd. + newCmd := make([]string, len(cmd)) + for i, part := range cmd { + newCmd[i] = part + } + found := false + for i, filename := range newCmd { + if filename == oldFilename { + newCmd[i] = newFilename + found = true + break + } + } + CopyFile(oldFilename, newFilename) + if found { + return newCmd, newFilename, nil + } + return cmd, "", errors.New("Couldn't find file") +} + +func CorrectMissingHeaders(bin string, cmd []string) ([]string, bool, error) { + + _, _, stderr := Compile(bin, cmd) + cmd, correctedFilename, err := GetHeaderCorrectedCmd(cmd, stderr) + if err != nil { + return cmd, false, err + } + for i := 0; i < MaxMissingHeaderFiles; i++ { + fixed, hasBrokenHeaders := TryCompileAndFixHeadersOnce(bin, cmd, correctedFilename) + if fixed { + return cmd, true, nil + } + if !hasBrokenHeaders { + return cmd, false, nil + } + } + return cmd, false, nil +} + +func ExecBuildCommand(bin string, args []string) (int, string, string) { + // Executes the original command. + cmd := exec.Command(bin, args...) + var outb, errb bytes.Buffer + cmd.Stdout = &outb + cmd.Stderr = &errb + cmd.Stdin = os.Stdin + cmd.Run() + return cmd.ProcessState.ExitCode(), outb.String(), errb.String() +} + +func Compile(bin string, args []string) (int, string, string) { + // Run the actual command. + return ExecBuildCommand(bin, args) +} + +func TryCompileAndFixHeadersOnce(bin string, cmd []string, filename string) (fixed, hasBrokenHeaders bool) { + retcode, _, err := Compile(bin, cmd) + if retcode == 0 { + fixed = true + hasBrokenHeaders = false + return + } + missingHeader, isMissing := ExtractMissingHeader(err) + if !isMissing { + fixed = false + hasBrokenHeaders = false + return + } + + newHeaderPath, found := FindMissingHeader(missingHeader) + if !found { + fixed = false + hasBrokenHeaders = true + return false, true + } + ReplaceMissingHeaderInFile(filename, missingHeader, newHeaderPath) + return false, true +} + +func FindMissingHeader(missingHeader string) (string, bool) { + envVar := "JCC_MISSING_HEADER_SEARCH_PATH" + var searchPath string + searchPath, exists := os.LookupEnv(envVar) + if !exists { + searchPath = "/src" + } + searchPath, _ = filepath.Abs(searchPath) + var headerLocation string + missingHeader = "/" + missingHeader + find := func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + if strings.HasSuffix(path, missingHeader) { + headerLocation = path + return nil + } + return nil + } + filepath.WalkDir(searchPath, find) + if headerLocation == "" { + return "", false + } + return headerLocation, true +} + +func CppifyHeaderIncludesFromFile(srcFile string) error { + contentsBytes, err := ioutil.ReadFile(srcFile) + if err != nil { + return err + } + contents := string(contentsBytes[:]) + contents, err = CppifyHeaderIncludes(contents) + if err != nil { + return err + } + b := []byte(contents) + err = ioutil.WriteFile(srcFile, b, 0644) + return err +} + +func CppifyHeaderIncludes(contents string) (string, error) { + shouldCppify, exists := os.LookupEnv("JCC_CPPIFY_PROJECT_HEADERS") + if !exists || strings.Compare(shouldCppify, "0") == 0 { + return contents, nil + } + if strings.Contains(contents, CppifyHeadersMagicString) { + return contents, nil + } + re := regexp.MustCompile(`\#include \"(?P
.+)\"\n`) + matches := re.FindAllStringSubmatch(contents, -1) + if len(matches) == 0 { + return "", nil // !!! + } + for i, match := range matches { + if i == 0 { + // So we don't cppify twice. + contents += CppifyHeadersMagicString + } + oldStr := match[0] + replacement := "extern \"C\" {\n#include \"" + match[1] + "\"\n}\n" + contents = strings.Replace(contents, oldStr, replacement, 1) + if strings.Compare(contents, "") == 0 { + panic("Failed to replace") + } + } + return contents, nil +} + +func AppendStringToFile(filepath, new_content string) error { + // Appends |new_content| to the content of |filepath|. + file, err := os.OpenFile(filepath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + defer file.Close() + + _, err = file.WriteString(new_content) + return err +} + +func WriteStdErrOut(args []string, outstr string, errstr string) { + // Prints |outstr| to stdout, prints |errstr| to stderr, and saves |errstr| to err.log. + fmt.Print(outstr) + fmt.Fprint(os.Stderr, errstr) + // Record what compile args produced the error and the error itself in log file. + AppendStringToFile("/workspace/err.log", fmt.Sprintf("%s\n", args)+errstr) +} + +func main() { + f, err := os.OpenFile("/tmp/jcc.log", os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + log.Println(err) + } + defer f.Close() + if _, err := f.WriteString(fmt.Sprintf("%s\n", os.Args)); err != nil { + log.Println(err) + } + + args := os.Args[1:] + if args[0] == "unfreeze" { + fmt.Println("unfreeze") + unfreeze() + } + basename := filepath.Base(os.Args[0]) + isCPP := basename == "clang++-jcc" + newArgs := append(args, "-w") + + var bin string + if isCPP { + bin = "clang++" + newArgs = append(args, "-stdlib=libc++") + } else { + bin = "clang" + } + fullCmdArgs := append([]string{bin}, newArgs...) + if IsCompilingTarget(fullCmdArgs) { + WriteTargetArgsAndCommitImage(fullCmdArgs) + os.Exit(0) + } + retcode, out, errstr := Compile(bin, newArgs) + WriteStdErrOut(fullCmdArgs, out, errstr) + os.Exit(retcode) +} + +type BuildCommand struct { + CWD string `json:"CWD"` + CMD []string `json:"CMD"` +} + +func WriteTargetArgsAndCommitImage(cmdline []string) { + log.Println("WRITE COMMAND") + f, _ := os.OpenFile("/out/statefile.json", os.O_CREATE|os.O_WRONLY, 0644) + wd, _ := os.Getwd() + buildcmd := BuildCommand{ + CWD: wd, + CMD: cmdline, + } + jsonData, _ := json.Marshal(buildcmd) + f.Write(jsonData) + f.Close() + hostname, _ := os.Hostname() + dockerArgs := []string{"commit", hostname, "frozen"} + cmd := exec.Command("docker", dockerArgs...) + var outb, errb bytes.Buffer + cmd.Stdout = &outb + cmd.Stderr = &errb + cmd.Stdin = os.Stdin + cmd.Run() + fmt.Println(outb.String(), errb.String()) + fmt.Println("COMMIT IMAGE") +} + +func IsCompilingTarget(cmdline []string) bool { + for _, arg := range cmdline { + // This can fail if people do crazy things they aren't supposed + // to such as using some other means to link in libFuzzer. + if arg == "-fsanitize=fuzzer" { + return true + } + if arg == "-lFuzzingEngine" { + return true + } + } + return false +} + +func parseCommand(command string) (string, []string) { + args := strings.Fields(command) + commandBin := args[0] + commandArgs := args[1:] + return commandBin, commandArgs +} + +func unfreeze() { + content, err := ioutil.ReadFile("/out/statefile.json") + if err != nil { + log.Fatal(err) + } + var command BuildCommand + json.Unmarshal(content, &command) + bin, args := parseCommand(strings.Join(command.CMD, " ")) + os.Chdir(command.CWD) + ExecBuildCommand(bin, args) + os.Exit(0) +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc_test.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc_test.go new file mode 100644 index 0000000000000000000000000000000000000000..bc76cb263f11969de5c15d19ebfe742a8e0f5f33 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/jcc_test.go @@ -0,0 +1,186 @@ +package main + +import ( + "fmt" + "os" + "strings" + "testing" +) + +func TestExtractMissingHeader(t *testing.T) { + missingHeaderMessage := `path/to/file.cpp:8:10: fatal error: 'missingheader.h' file not found + + #include "missingheader.h" + + ^~~~~~~~~~~~ + + 1 error generated. + ` + + res, _ := ExtractMissingHeader(missingHeaderMessage) + expected := "missingheader.h" + if strings.Compare(res, expected) != 0 { + t.Errorf("Got: %s. Expected: %s.", res, expected) + } +} + +func TestGetHeaderCorrectedFilename(t *testing.T) { + missingHeaderMessage := `path/to/file.cpp:8:10: fatal error: 'missingheader.h' file not found + + #include "missingheader.h" + + ^~~~~~~~~~~~ + + 1 error generated. + ` + _, correctedFilename, _ := GetHeaderCorrectedFilename(missingHeaderMessage) + expected := "path/to/jcc-corrected-file.cpp" + if strings.Compare(correctedFilename, expected) != 0 { + t.Errorf("Got: %s. Expected: %s.", correctedFilename, expected) + } +} + +func TestFindMissingHeader(t *testing.T) { + pwd, _ := os.Getwd() + t.Setenv("JCC_MISSING_HEADER_SEARCH_PATH", pwd) + + location, _ := FindMissingHeader("header.h") + expected := pwd + "/testdata/path/to/header.h" + if strings.Compare(location, expected) != 0 { + t.Errorf("Got: %s. Expected: %s.", location, expected) + } +} + +func TestCorrectMissingHeaders(t *testing.T) { + pwd, _ := os.Getwd() + t.Setenv("JCC_MISSING_HEADER_SEARCH_PATH", pwd) + cfile := pwd + "/testdata/cfile.c" + cmd := [4]string{"-fsanitize=address", cfile, "-o", "/tmp/blah"} + res, err := CorrectMissingHeaders("clang", cmd[:]) + if !res { + fmt.Println(err) + t.Errorf("Expected successful compilation") + } +} + +func TestGetHeaderCorrectedCmd(t *testing.T) { + compilerErr := `testdata/cpp.cc:8:10: fatal error: 'missingheader.h' file not found + + #include "missingheader.h" + + ^~~~~~~~~~~~ + + 1 error generated. + ` + + cmd := [3]string{"-fsanitize=address", "file.cpp", "path/to/cpp.cc"} + expectedFixedCmd := [3]string{"-fanitize=address", "file.cpp", "path/to/jcc-corrected-cpp.cc"} + fixedCmd, _, _ := GetHeaderCorrectedCmd(cmd[:], compilerErr) + if strings.Compare(fixedCmd[1], expectedFixedCmd[1]) != 0 { + t.Errorf("Expected %s, got: %s", expectedFixedCmd, fixedCmd) + } +} + +func TestCppifyHeaderIncludes(t *testing.T) { + t.Setenv("JCC_CPPIFY_PROJECT_HEADERS", "1") + src := `// Copyright blah +#include + +#include "fuzz.h" +#include "x/y.h" +extern "C" LLVMFuzzerTestOneInput(uint8_t* data, size_t sz) { + return 0; +}` + newFile, _ := CppifyHeaderIncludes(src) + expected := `// Copyright blah +#include + +extern "C" { +#include "fuzz.h" +} +extern "C" { +#include "x/y.h" +} +extern "C" LLVMFuzzerTestOneInput(uint8_t* data, size_t sz) { + return 0; +} +/* JCCCppifyHeadersMagicString */ +` + if strings.Compare(newFile, expected) != 0 { + t.Errorf("Expected: %s, got: %s", expected, newFile) + } +} + +func TestCppifyHeaderIncludesShouldnt(t *testing.T) { + src := `// Copyright blah +#include + +#include "fuzz.h" +#include "x/y.h" +extern "C" LLVMFuzzerTestOneInput(uint8_t* data, size_t sz) { + return 0; +}` + newFile, _ := CppifyHeaderIncludes(src) + if strings.Compare(newFile, src) != 0 { + t.Errorf("Expected: %s. Got: %s", src, newFile) + } +} + +func TestCppifyHeaderIncludesAlready(t *testing.T) { + src := `// Copyright blah +#include + +#include "fuzz.h" +#include "x/y.h" +extern "C" LLVMFuzzerTestOneInput(uint8_t* data, size_t sz) { + return 0; +} +/* JCCCppifyHeadersMagicString */ +` + newFile, _ := CppifyHeaderIncludes(src) + if strings.Compare(newFile, src) != 0 { + t.Errorf("Expected %s, got: %s", src, newFile) + } +} + +func TestExtractMissingHeaderNonHeaderFailure(t *testing.T) { + missingHeaderMessage := `clang: error: no such file or directory: 'x' +clang: error: no input files` + + header, res := ExtractMissingHeader(missingHeaderMessage) + if res { + t.Errorf("Expected no match, got: %s", header) + } +} + +func TestReplaceMissingHeader(t *testing.T) { + cfile := `// Copyright 2035 Robots +#include + +#include + +// Some libraries like OpenSSL will use brackets for their own headers. +#include + +int LLVMFuzzerTestOneInput(uint8_t* data, size_t size) { + return 0; +} +` + + res := ReplaceMissingHeader(cfile, "missingheader.h", "path/to/includes/missingheader.h") + expected := `// Copyright 2035 Robots +#include + +#include + +// Some libraries like OpenSSL will use brackets for their own headers. +#include "path/to/includes/missingheader.h" + +int LLVMFuzzerTestOneInput(uint8_t* data, size_t size) { + return 0; +} +` + if strings.Compare(res, expected) != 0 { + t.Errorf("Got: %s. Expected: %s.", res, expected) + } +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/.gitignore b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..ea1fd92c507fc7d30a9fb1cd71f0a03d42cfca1d --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/.gitignore @@ -0,0 +1,2 @@ +jcc-corrected-cfile.c +jcc-corrected-cfile.cpp \ No newline at end of file diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/cfile.c b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/cfile.c new file mode 100644 index 0000000000000000000000000000000000000000..e18c414d13060ac339cf2aafe5e782a8e55e9cc3 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/cfile.c @@ -0,0 +1,18 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "header.h" +int main() { + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/cpp.cc b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/cpp.cc new file mode 100644 index 0000000000000000000000000000000000000000..e18c414d13060ac339cf2aafe5e782a8e55e9cc3 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/cpp.cc @@ -0,0 +1,18 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "header.h" +int main() { + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/path/to/header.h b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/path/to/header.h new file mode 100644 index 0000000000000000000000000000000000000000..f4d3e90abd55b52ed6e8a02f23393165ccbf34ab --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/jcc/testdata/path/to/header.h @@ -0,0 +1,15 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +int xhg(void); diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/llvmsymbol.diff b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/llvmsymbol.diff new file mode 100644 index 0000000000000000000000000000000000000000..70181bf3997985bbd8ac568f3227e18ef423f6f2 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/llvmsymbol.diff @@ -0,0 +1,50 @@ +diff --git a/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt b/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt +index acfb3bd0e..a499ee2e0 100644 +--- a/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt ++++ b/llvm/lib/DebugInfo/Symbolize/CMakeLists.txt +@@ -12,4 +12,11 @@ add_llvm_component_library(LLVMSymbolize + Object + Support + Demangle +- ) ++ ++ LINK_LIBS ++ /usr/lib/swift_static/linux/libswiftCore.a ++ /usr/lib/swift_static/linux/libicui18nswift.a ++ /usr/lib/swift_static/linux/libicuucswift.a ++ /usr/lib/swift_static/linux/libicudataswift.a ++ /usr/lib/x86_64-linux-gnu/libstdc++.so.6 ++) +diff --git a/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp b/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp +index fb4875f79..0030769ee 100644 +--- a/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp ++++ b/llvm/lib/DebugInfo/Symbolize/Symbolize.cpp +@@ -36,6 +36,13 @@ + #include + #include + ++ ++extern "C" char *swift_demangle(const char *mangledName, ++ size_t mangledNameLength, ++ char *outputBuffer, ++ size_t *outputBufferSize, ++ uint32_t flags); ++ + namespace llvm { + namespace symbolize { + +@@ -678,6 +685,14 @@ LLVMSymbolizer::DemangleName(const std::string &Name, + free(DemangledName); + return Result; + } ++ if (!Name.empty() && Name.front() == '$') { ++ char *DemangledName = swift_demangle(Name.c_str(), Name.length(), 0, 0, 0); ++ if (DemangledName) { ++ std::string Result = DemangledName; ++ free(DemangledName); ++ return Result; ++ } ++ } + + if (DbiModuleDescriptor && DbiModuleDescriptor->isWin32Module()) + return std::string(demanglePE32ExternCFunc(Name)); diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/ossfuzz_coverage_runner.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/ossfuzz_coverage_runner.go new file mode 100644 index 0000000000000000000000000000000000000000..d433da24638c96dbe58c77bf00a964f8834edd18 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/ossfuzz_coverage_runner.go @@ -0,0 +1,69 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mypackagebeingfuzzed + +import ( + "io/ioutil" + "os" + "runtime/pprof" + "testing" +) + +func TestFuzzCorpus(t *testing.T) { + dir := os.Getenv("FUZZ_CORPUS_DIR") + if dir == "" { + t.Logf("No fuzzing corpus directory set") + return + } + infos, err := ioutil.ReadDir(dir) + if err != nil { + t.Logf("Not fuzzing corpus directory %s", err) + return + } + filename := "" + defer func() { + if r := recover(); r != nil { + t.Error("Fuzz panicked in "+filename, r) + } + }() + profname := os.Getenv("FUZZ_PROFILE_NAME") + if profname != "" { + f, err := os.Create(profname + ".cpu.prof") + if err != nil { + t.Logf("error creating profile file %s\n", err) + } else { + _ = pprof.StartCPUProfile(f) + } + } + for i := range infos { + filename = dir + infos[i].Name() + data, err := ioutil.ReadFile(filename) + if err != nil { + t.Error("Failed to read corpus file", err) + } + FuzzFunction(data) + } + if profname != "" { + pprof.StopCPUProfile() + f, err := os.Create(profname + ".heap.prof") + if err != nil { + t.Logf("error creating heap profile file %s\n", err) + } + if err = pprof.WriteHeapProfile(f); err != nil { + t.Logf("error writing heap profile file %s\n", err) + } + f.Close() + } +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl new file mode 100644 index 0000000000000000000000000000000000000000..d6e71f2c3428745fde2b9ff2519f0ff67b3ebaa2 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl @@ -0,0 +1,35 @@ +#!/bin/bash -eu +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "Precompiling AFLplusplus" + +pushd $SRC/aflplusplus > /dev/null +make clean +# Unset CFLAGS and CXXFLAGS while building AFL since we don't want to slow it +# down with sanitizers. +SAVE_CXXFLAGS=$CXXFLAGS +SAVE_CFLAGS=$CFLAGS +unset CXXFLAGS +unset CFLAGS +export AFL_IGNORE_UNKNOWN_ENVS=1 +make clean +AFL_NO_X86=1 PYTHON_INCLUDE=/ make +make -C utils/aflpp_driver + +popd > /dev/null + +echo "Done." diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede new file mode 100644 index 0000000000000000000000000000000000000000..362ef6a5b6e51bb5c81cbbc45ddab2660b966e0d --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede @@ -0,0 +1,49 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo -n "Precompiling centipede" + +# Build Centipede with bazel. +cd "$SRC/fuzztest/centipede/" +apt-get update && apt-get install libssl-dev -y +unset CXXFLAGS CFLAGS +# We need to use an older version of BAZEL because fuzztest relies on WORKSPACE +# Ref: https://github.com/google/oss-fuzz/pull/12838#issue-2733821058 +export USE_BAZEL_VERSION=7.4.0 +echo 'build --cxxopt=-stdlib=libc++ --linkopt=-lc++' >> /tmp/centipede.bazelrc +bazel --bazelrc=/tmp/centipede.bazelrc build -c opt :all +unset USE_BAZEL_VERSION + +# Prepare the weak symbols: +# This is necessary because we compile the target binary and the intermediate +# auxiliary binaries with the same cflags. The auxiliary binaries do not need +# data-flow tracing flags, but will still throw errors when they cannot find +# the corresponding functions. +# The weak symbols provides fake implementations for intermediate binaries. +$CXX "$SRC/fuzztest/centipede/weak_sancov_stubs.cc" -c -o "$SRC/fuzztest/centipede/weak.o" + +echo 'Removing extra stuff leftover to avoid bloating image.' + +rm -rf /clang-*.tgz /clang + +BAZEL_BIN_REAL_DIR=$(readlink -f $CENTIPEDE_BIN_DIR) +rm -rf $CENTIPEDE_BIN_DIR +mkdir -p $CENTIPEDE_BIN_DIR +mv $BAZEL_BIN_REAL_DIR/centipede/{centipede,libcentipede_runner.pic.a} $CENTIPEDE_BIN_DIR +rm -rf /root/.cache + +echo 'Done.' diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz new file mode 100644 index 0000000000000000000000000000000000000000..34d2ab8c2497ff14b4d49b8a3828febaa5135cc1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz @@ -0,0 +1,45 @@ +#!/bin/bash -eu +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "Precompiling honggfuzz" +export BUILD_OSSFUZZ_STATIC=true + +PACKAGES=( + libunwind8-dev + libblocksruntime-dev + liblzma-dev + libiberty-dev + zlib1g-dev + pkg-config) + +apt-get update && apt-get install -y ${PACKAGES[@]} + +pushd $SRC/honggfuzz > /dev/null +make clean +# These CFLAGs match honggfuzz's default, with the exception of -mtune to +# improve portability and `-D_HF_LINUX_NO_BFD` to remove assembly instructions +# from the filenames. +CC=clang CFLAGS="-O3 -funroll-loops -D_HF_LINUX_NO_BFD" make + +# libhfuzz.a will be added by CC/CXX linker directly during linking, +# but it's defined here to satisfy the build infrastructure +ar rcs honggfuzz.a libhfuzz/*.o libhfcommon/*.o +popd > /dev/null + +apt-get remove -y --purge ${PACKAGES[@]} +apt-get autoremove -y +echo "Done." diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..4f244a0435c1a783f8b841b78cb8b385bbaa5f1b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py @@ -0,0 +1,120 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Extracts file paths to copy files from pyinstaller-generated executables""" +import os +import sys +import shutil +import zipfile + + +# Finds all *.toc files in ./workpath and reads these files in order to +# identify Python files associated with a pyinstaller packaged executable. +# Copies all of the Python files to a temporary directory (/medio) following +# the original directory structure. +def get_all_files_from_toc(toc_file, file_path_set): + """ + Extract filepaths from a .toc file and add to file_path_set + """ + with open(toc_file, 'rb') as toc_file_fd: + for line in toc_file_fd: + try: + line = line.decode() + except: # pylint:disable=bare-except + continue + if '.py' not in line: + continue + + split_line = line.split(' ') + for word in split_line: + word = word.replace('\'', '').replace(',', '').replace('\n', '') + if '.py' not in word: + continue + # Check if .egg is in the path and if so we need to split it + if os.path.isfile(word): + file_path_set.add(word) + elif '.egg' in word: # check if this is an egg + egg_path_split = word.split('.egg') + if len(egg_path_split) != 2: + continue + egg_path = egg_path_split[0] + '.egg' + if not os.path.isfile(egg_path): + continue + + print('Unzipping contents of %s' % egg_path) + + # We have an egg. This needs to be unzipped and then replaced + # with the unzipped data. + tmp_dir_name = 'zipdcontents' + if os.path.isdir(tmp_dir_name): + shutil.rmtree(tmp_dir_name) + + # unzip egg and replace path with unzipped content + with zipfile.ZipFile(egg_path, 'r') as zip_f: + zip_f.extractall(tmp_dir_name) + os.remove(egg_path) + shutil.copytree(tmp_dir_name, egg_path) + + # Now the lines should be accessible, so check again + if os.path.isfile(word): + file_path_set.add(word) + + +def create_file_structure_from_tocs(work_path, out_path): + """ + Extract the Python files that are added as paths in the output of + a pyinstaller operation. The files are determined by reading through + all of the *.toc files in the workpath of pyinstaller. + + The files will be copied into the out_path using a similar file path + as they originally are. If any archive (.egg) files are present in the + .toc files, then unzip the archives and substitute the archive for the + unzipped content, i.e. we will extract the archives and collect the source + files. + """ + print('Extracts files from the pyinstaller workpath') + file_path_set = set() + for path1 in os.listdir(work_path): + full_path = os.path.join(work_path, path1) + if not os.path.isdir(full_path): + continue + + # We have a directory + for path2 in os.listdir(full_path): + if not '.toc' in path2: + continue + full_toc_file = os.path.join(full_path, path2) + get_all_files_from_toc(full_toc_file, file_path_set) + + for file_path in file_path_set: + relative_src = file_path[1:] if file_path[0] == '/' else file_path + dst_path = os.path.join(out_path, relative_src) + os.makedirs(os.path.dirname(dst_path), exist_ok=True) + shutil.copy(file_path, dst_path) + + +def main(): + """ + Main handler. + """ + if len(sys.argv) != 3: + print('Use: python3 python_coverage_helper.py pyinstaller_workpath ' + 'destination_for_output') + sys.exit(1) + work_path = sys.argv[1] + out_path = sys.argv[2] + create_file_structure_from_tocs(work_path, out_path) + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/.gitignore b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..42dcf3e0ba244c6d59a6ab3d38af5adcd5f92fbe --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/.gitignore @@ -0,0 +1,4 @@ +dist +pysecsan.egg-info* +build +.venv diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/LICENSE b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..6d6d1266c329fd60129d68b2a80179e0102633e0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/LICENSE @@ -0,0 +1,13 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/README.md b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/README.md new file mode 100644 index 0000000000000000000000000000000000000000..cbb07c1f1de011555b07e9ddc57c01fac34deba4 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/README.md @@ -0,0 +1,3 @@ +# pysecsan + +Security sanitizers for vulnerability detection during runtime. diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pyproject.toml b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..636f93d09324148c5c33b90b2cfcde4feb948207 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pyproject.toml @@ -0,0 +1,22 @@ +[build-system] +requires = ["setuptools>=61.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "pysecsan" +version = "0.1.0" +authors = [ + { name="David Korczynski", email="david@adalogics.com" }, +] +description = "Sanitizers to detect security vulnerabilities at runtime." +readme = "README.md" +requires-python = ">=3.7" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", +] + +[project.urls] +"Homepage" = "https://github.com/google/oss-fuzz/tree/master/infra/sanitizers/pysecsan" +"Bug Tracker" = "https://github.com/google/oss-fuzz/issues" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..606ff453358402b2ef653ccae9944dd0481000ea --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Glue for pysecsan library.""" + +# Import sanlib and expose only needs functionality by way of __all__ +from .sanlib import * + +# pylint: disable=undefined-all-variable +__all__ = ['add_hooks'] diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/command_injection.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/command_injection.py new file mode 100644 index 0000000000000000000000000000000000000000..1d010047588f04ad66dba55173f0a13284fbc21e --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/command_injection.py @@ -0,0 +1,106 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Sanitizers for capturing code injections.""" + +from typing import Optional +from pysecsan import sanlib + + +def get_all_substr_prefixes(main_str, sub_str): + """Yields all strings prefixed with sub_str in main_str.""" + idx = 0 + while True: + idx = main_str.find(sub_str, idx) + if idx == -1: + return + yield main_str[0:idx] + # Increase idx the length of the substring from the current position + # where an occurence of the substring was found. + idx += len(sub_str) + + +# pylint: disable=unsubscriptable-object +def check_code_injection_match(elem, check_unquoted=False) -> Optional[str]: + """identify if elem is an injection match.""" + # Check exact match + if elem == 'exec-sanitizer': + return 'Explicit command injection found.' + + # Check potential for injecting into a string + if 'FROMFUZZ' in elem: + if check_unquoted: + # return true if any index is unquoted + for sub_str in get_all_substr_prefixes(elem, 'FROMFUZZ'): + if sub_str.count('\"') % 2 == 0: + return 'Fuzzer controlled content in data. Code injection potential.' + + # Return None if all fuzzer taints were quoted + return None + return 'Fuzzer-controlled data in command string. Injection potential.' + return None + + +# pylint: disable=invalid-name +def hook_pre_exec_subprocess_Popen(cmd, **kwargs): + """Hook for subprocess.Popen.""" + + arg_shell = 'shell' in kwargs and kwargs['shell'] + + # Command injections depend on whether the first argument is a list of + # strings or a string. Handle this now. + # Example: tests/poe/ansible-runner-cve-2021-4041 + if isinstance(cmd, str): + res = check_code_injection_match(cmd, check_unquoted=True) + if res is not None: + # if shell arg is true and string is tainted and unquoted that's a + # definite code injection. + if arg_shell is True: + sanlib.abort_with_issue('Code injection in Popen', 'Command injection') + + # It's a maybe: will not report this to avoid false positives. + # TODO: add more precise detection here. + + # Check for hg command injection + # Example: tests/poe/libvcs-cve-2022-21187 + if cmd[0] == 'hg': + # Check if the arguments are controlled by the fuzzer, and this given + # arg is not preceded by -- + found_dashes = False + for idx in range(1, len(cmd)): + if cmd[0] == '--': + found_dashes = True + if not found_dashes and check_code_injection_match(cmd[idx]): + sanlib.abort_with_issue( + 'command injection likely by way of mercurial. The following' + f'command {str(cmd)} is executed, and if you substitute {cmd[idx]} ' + 'with \"--config=alias.init=!touch HELLO_PY\" then you will ' + 'create HELLO_PY', 'Command injection') + + +def hook_pre_exec_os_system(cmd): + """Hook for os.system.""" + res = check_code_injection_match(cmd) + if res is not None: + sanlib.abort_with_issue(f'code injection by way of os.system\n{res}', + 'Command injection') + + +def hook_pre_exec_eval(cmd, *args, **kwargs): + """Hook for eval. Experimental atm.""" + res = check_code_injection_match(cmd, check_unquoted=True) + if res is not None: + sanlib.abort_with_issue(f'Potential code injection by way of eval\n{res}', + 'Command injection') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/redos.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/redos.py new file mode 100644 index 0000000000000000000000000000000000000000..388cb331c20f05d6f876ca3589e6490705e4fc5c --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/redos.py @@ -0,0 +1,84 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Sanitizer for regular expression dos.""" + +# pylint: disable=protected-access + +import time +import os +from pysecsan import sanlib + +START_RE_TIME = None + + +# Hooks for regular expressions. +# Main problem is to identify ReDOS attemps. This is a non-trivial task +# - https://arxiv.org/pdf/1701.04045.pdf +# - https://dl.acm.org/doi/pdf/10.1145/3236024.3236027 +# and the current approach we use is simply check for extensive computing time. +# In essence, this is more of a refinement of traditional timeout checker from +# the fuzzer, which, effectively will detect these types of attacks by way of +# timeouts. +# +# Perhaps the smartest would be to use something like e.g. +# https://github.com/doyensec/regexploit to scan the regex patterns. +# Other heuristics without going too technical on identifying super-linear +# regexes: +# - check +# - if 'taint' exists in re.compile(xx) +# - check +# - for backtracking possbility in PATTERN within re.comile(PATTERN) +# - and +# - 'taint' in findall(XX) calls. +# pylint: disable=global-statement +def hook_post_exec_re_pattern_findall(self, re_str): + """Hook post exeution re.compile().findall().""" + _ = self # Satisfy lint + global START_RE_TIME + try: + endtime = time.time() - START_RE_TIME + if endtime > 4: + sanlib.abort_with_issue(f'Potential ReDOS attack.\n {re_str}', 'ReDOS') + except NameError: + sanlib.sanitizer_log( + 'starttime is not set, which it should have. Error in PySecSan', + sanlib.LOG_INFO) + os._exit(1) + + +def hook_pre_exec_re_pattern_findall(self, string): + """Hook pre execution of re.pattern().findall().""" + _ = (self, string) # Satisfy lint + global START_RE_TIME + START_RE_TIME = time.time() + + +def hook_post_exec_re_compile(retval, pattern, flags=None): + """Hook for re.compile post execution to hook returned objects functions.""" + _ = (pattern, flags) # Satisfy lint + sanlib.sanitizer_log('Inside of post compile hook', sanlib.LOG_DEBUG) + wrapper_object = sanlib.create_object_wrapper( + findall=(hook_pre_exec_re_pattern_findall, + hook_post_exec_re_pattern_findall)) + hooked_object = wrapper_object(retval) + return hooked_object + + +def hook_pre_exec_re_compile(pattern, flags=None): + """Check if tainted input exists in pattern. If so, likely chance of making + ReDOS possible.""" + _ = (pattern, flags) # Satisfy lint + sanlib.sanitizer_log('Inside re compile hook', sanlib.LOG_DEBUG) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/sanlib.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/sanlib.py new file mode 100644 index 0000000000000000000000000000000000000000..eead5c3979e545502891cc1a36b57ed3e1fb44df --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/sanlib.py @@ -0,0 +1,227 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Core routines for pysecsan library.""" + +# pylint: disable=protected-access + +import re +import os +import functools +import subprocess +import traceback +import importlib.util + +from typing import Any, Callable, Optional +from pysecsan import command_injection, redos, yaml_deserialization + +LOG_DEBUG = 0 +LOG_INFO = 1 +PYSECSAN_LOG_LVL = LOG_INFO + +# Message that will be printed to stdout when an issue is found. +PYSECSAN_BUG_LABEL = r'===BUG DETECTED: PySecSan:' + + +# pylint: disable=global-statement +def sanitizer_log(msg, log_level, force=False, log_prefix=True): + """Helper printing function.""" + global PYSECSAN_LOG_LVL + if log_level >= PYSECSAN_LOG_LVL or force: + if log_prefix: + print(f'[PYSECSAN] {msg}') + else: + print(f'{msg}') + + +def sanitizer_log_always(msg, log_prefix=True): + """Wrapper for sanitizer logging. Will always log""" + sanitizer_log(msg, 0, force=True, log_prefix=log_prefix) + + +def is_module_present(mod_name): + """Identify if module is importable.""" + # pylint: disable=deprecated-method + return importlib.util.find_spec(mod_name) is not None + + +def _log_bug(bug_title): + sanitizer_log_always('%s %s ===' % (PYSECSAN_BUG_LABEL, bug_title), + log_prefix=False) + + +def abort_with_issue(msg, bug_title): + """Print message, display stacktrace and force process exit. + + Use this function for signalling an issue is found and use the messages + logged from this function to determine if a fuzzer found a bug. + """ + # Show breaker string using an ASAN approach (uses 65 =) + sanitizer_log_always("=" * 65, log_prefix=False) + + # Log issue message + _log_bug(bug_title) + sanitizer_log_always(msg) + + # Log stacktrace + sanitizer_log_always("Stacktrace:") + traceback.print_stack() + + # Force exit + # Use os._exit here to force exit. sys.exit will exit + # by throwing a SystemExit exception which the interpreter + # handles by exiting. However, code may catch this exception, + # and thus to avoid this we exit the process without exceptions. + # pylint: disable=protected-access + sanitizer_log_always("Exiting") + os._exit(1) + + +def is_exact_taint(stream) -> bool: + """Checks if stream is an exact match for taint from fuzzer.""" + # The fuzzer has to get 8 characters right. This may be a bit much, + # however, when found it shows a high level of control over the data. + if stream == 'FROMFUZZ': + return True + + return False + + +def create_object_wrapper(**methods): + """Hooks functions in an object. + + This is needed for hooking built-in types and object attributes. + + Example use case is if we want to find ReDOS vulnerabilities, that + have a pattern of + + ``` + import re + r = re.compile(REGEX) + for _ in r.findall(...) + ``` + + In the above case r.findall is a reference to + re.Pattern.findall, which is a built-in type that is non-writeable. + + In order to hook such calls we need to wrap the object, and also hook the + re.compile function to return the wrapped/hooked object. + """ + + class Wrapper(): + """Wrap an object by hiding attributes.""" + + def __init__(self, instance): + object.__setattr__(self, 'instance', instance) + + def __setattr__(self, name, value): + object.__setattr__(object.__getattribute__(self, 'instance'), name, value) + + def __getattribute__(self, name): + instance = object.__getattribute__(self, 'instance') + + def _hook_func(self, pre_hook, post_hook, orig, *args, **kargs): + if pre_hook is not None: + pre_hook(self, *args, **kargs) + # No need to pass instance here because when we extracted + # the function we used instance.__getattribute__(name) which + # seems to include it. I think. + orig_retval = orig(*args, **kargs) + + if post_hook is not None: + post_hook(self, *args, **kargs) + return orig_retval + + # If this is a wrapped method, return a bound method + if name in methods: + pre_hook = methods[name][0] + post_hook = methods[name][1] + orig = instance.__getattribute__(name) + return (lambda *args, **kargs: _hook_func(self, pre_hook, post_hook, + orig, *args, **kargs)) + + # Otherwise, just return attribute of instance + return instance.__getattribute__(name) + + return Wrapper + + +# pylint: disable=unsubscriptable-object +def add_hook(function: Callable[[Any], Any], + pre_exec_hook: Optional[Callable[[Any], Any]] = None, + post_exec_hook: Optional[Callable[[Any], Any]] = None): + """Hook a function. + + Hooks can be placed pre and post function call. At least one hook is + needed. + + This hooking is intended on non-object hooks. In order to hook functions + in objects the `create_object_wrapper` function is used in combination + with function hooking initialisation functions post execution. + """ + if pre_exec_hook is None and post_exec_hook is None: + raise Exception('Some hooks must be included') + + @functools.wraps(function) + def run(*args, **kwargs): + sanitizer_log(f'Hook start {str(function)}', LOG_DEBUG) + + # Call hook + if pre_exec_hook is not None: + pre_exec_hook(*args, **kwargs) + + # Call the original function in the even the hook did not indicate + # failure. + ret = function(*args, **kwargs) + + # Post execution hook. Overwrite return value if anything is returned + # by post hook. + if post_exec_hook is not None: + tmp_ret = post_exec_hook(ret, *args, **kwargs) + if tmp_ret is not None: + sanitizer_log('Overwriting return value', LOG_DEBUG) + ret = tmp_ret + sanitizer_log(f'Hook end {str(function)}', LOG_DEBUG) + return ret + + return run + + +def add_hooks(): + """Sets up hooks.""" + sanitizer_log('Starting', LOG_INFO) + os.system = add_hook(os.system, + pre_exec_hook=command_injection.hook_pre_exec_os_system) + subprocess.Popen = add_hook( + subprocess.Popen, + pre_exec_hook=command_injection.hook_pre_exec_subprocess_Popen) + + __builtins__['eval'] = add_hook( + __builtins__['eval'], pre_exec_hook=command_injection.hook_pre_exec_eval) + + re.compile = add_hook(re.compile, + pre_exec_hook=redos.hook_pre_exec_re_compile, + post_exec_hook=redos.hook_post_exec_re_compile) + + # Hack to determine if yaml is elligible, because pkg_resources does + # not seem to work from pyinstaller. + # pylint: disable=import-outside-toplevel + if is_module_present('yaml'): + import yaml + sanitizer_log('Hooking pyyaml.load', LOG_DEBUG) + yaml.load = add_hook( + yaml.load, + pre_exec_hook=yaml_deserialization.hook_pre_exec_pyyaml_load, + ) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/yaml_deserialization.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/yaml_deserialization.py new file mode 100644 index 0000000000000000000000000000000000000000..ba7179d37e8cd938b35277fe613afca0c375143a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/pysecsan/yaml_deserialization.py @@ -0,0 +1,42 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Catches vulnerable yaml desrializations that can potentially lead to +arbitrary code execution.""" +from pysecsan import sanlib + +try: + import yaml +# pylint: disable=broad-except +except Exception: + pass + + +def hook_pre_exec_pyyaml_load(stream, loader): + """Hook for pyyaml.load_yaml. + + Exits if the loader is unsafe or vanilla loader and the stream passed + to the loader is controlled by the fuzz data + """ + # Ensure loader is the unsafe loader or vanilla loader + if loader not in (yaml.loader.Loader, yaml.loader.UnsafeLoader): + return + + # Check for exact taint in stream + if sanlib.is_exact_taint(stream): + msg = ( + 'Yaml deserialization issue.\n' + 'Unsafe deserialization can be used to execute arbitrary commands.\n') + sanlib.abort_with_issue(msg, 'Yaml deserialisation') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/setup.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..d54eeb94c4af3ee12d1ca47e994892566c65a3b0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/setup.py @@ -0,0 +1,21 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Config for installing python as package.""" +from setuptools import setup, find_packages + +setup(name='pysecsan', + version='0.1', + author='David Korczynski', + author_email='david@adalogics.com', + packages=find_packages(exclude='tests')) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/README.md b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d0e2e6fc6cca7aa38885792bede7e10874b3e13b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/README.md @@ -0,0 +1 @@ +# Tests including Proof of Exploits diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/eval_command_injection.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/eval_command_injection.py new file mode 100644 index 0000000000000000000000000000000000000000..c980f30c44e1be351bb7c82ed00c7f089a6fe469 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/eval_command_injection.py @@ -0,0 +1,52 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Fuzzer targetting command injection of eval.""" +# pylint: disable=eval-used + +import sys +import atheris +import pysecsan + +pysecsan.add_hooks() + + +def list_files_perhaps(param, magicval): + """Pass fuzzer data into eval.""" + if len(param) < 3: + return + if magicval == 1337: + try: + eval("FROMFUZZ") + except ValueError: + pass + + +def test_one_input(data): + """Fuzzer entrypoint.""" + fdp = atheris.FuzzedDataProvider(data) + list_files_perhaps(fdp.ConsumeUnicodeNoSurrogates(24), + fdp.ConsumeIntInRange(500, 1500)) + + +def main(): + """Set up and start fuzzing.""" + + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/os_command_injection.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/os_command_injection.py new file mode 100644 index 0000000000000000000000000000000000000000..9493aa51cca777e876c12e40e75a7036d4e4f46a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/os_command_injection.py @@ -0,0 +1,52 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Fuzzer targetting command injection of os.system.""" + +import os +import sys +import atheris +import pysecsan + + +def list_files_perhaps(param, magicval): + """Pass fuzzer data into os.system.""" + if 'B' not in param: + return + if magicval == 1338: + os.system('exec-san') + elif magicval == 1339: + os.system('ls -la FROMFUZZ') + else: + os.system('ls -la ./') + + +def test_one_input(data): + """Fuzzer entrypoint.""" + fdp = atheris.FuzzedDataProvider(data) + list_files_perhaps(fdp.ConsumeUnicodeNoSurrogates(24), + fdp.ConsumeIntInRange(500, 1500)) + + +def main(): + """Set up and start fuzzing.""" + pysecsan.add_hooks() + + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/ansible-runner-cve-2021-4041/build.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/ansible-runner-cve-2021-4041/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..e95473cfdb58d84b8ff36f6b3140e8f4995d40f3 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/ansible-runner-cve-2021-4041/build.sh @@ -0,0 +1,25 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +python3 -m pip install pysecsan + +git clone https://github.com/ansible/ansible-runner/ +cd ansible-runner +git checkout cdc0961df51fa1e10b44371944aafe5ae140b98c +python3 -m pip install . +cd .. +python3 fuzz_ansible_runner.py diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/ansible-runner-cve-2021-4041/fuzz_ansible_runner.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/ansible-runner-cve-2021-4041/fuzz_ansible_runner.py new file mode 100644 index 0000000000000000000000000000000000000000..44bd5ecea4e0f12fe4c955c0f88f2f118d40c9b3 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/ansible-runner-cve-2021-4041/fuzz_ansible_runner.py @@ -0,0 +1,64 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Targets: https://github.com/advisories/GHSA-6j58-grhv-2769.""" + +import sys +import atheris +import pexpect +import pysecsan +import ansible_runner +from ansible_runner.config.runner import RunnerConfig + +pysecsan.add_hooks() + + +def test_one_input(data): + """Fuzzer entrypoint.""" + fdp = atheris.FuzzedDataProvider(data) + + conf = RunnerConfig('/tmp/') + conf.suppress_ansible_output = True + conf.expect_passwords = {pexpect.TIMEOUT: None, pexpect.EOF: None} + conf.cwd = str('/tmp/') + conf.env = {} + conf.job_timeout = 10 + conf.idle_timeout = 0 + conf.pexpect_timeout = 2. + conf.pexpect_use_poll = True + conf.command = 'from_fuzzer' + + runner = ansible_runner.Runner(conf) + runner.resource_profiling = True + # rc.resource_profiling_base_cgroup = "; exec-san" + assistance = True + if assistance and fdp.ConsumeIntInRange(1, 100) > 80: + conf.resource_profiling_base_cgroup = 'FROMFUZZ' + else: + conf.resource_profiling_base_cgroup = fdp.ConsumeUnicodeNoSurrogates(24) + try: + runner.run() + except (RuntimeError, ValueError, TypeError) as _: + pass + + +def main(): + """Set up and start fuzzing.""" + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/libvcs-cve-2022-21187/build.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/libvcs-cve-2022-21187/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..5f193a4c2a441d9e179dac4685fa0938ae2ad402 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/libvcs-cve-2022-21187/build.sh @@ -0,0 +1,21 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +python3 -m pip install pysecsan + +python3 -m pip install libvcs==0.11.0 +python3 ./fuzz_libvcs.py diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/libvcs-cve-2022-21187/fuzz_libvcs.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/libvcs-cve-2022-21187/fuzz_libvcs.py new file mode 100644 index 0000000000000000000000000000000000000000..c2bdb5890f8057da8e0754a71b96826b4077cd15 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/libvcs-cve-2022-21187/fuzz_libvcs.py @@ -0,0 +1,45 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Targets https://github.com/advisories/GHSA-mv2w-4jqc-6fg4.""" + +import sys +import atheris +import pysecsan +from libvcs.shortcuts import create_repo + +pysecsan.add_hooks() + + +def test_one_input(data): + """Target code injection in libvcs.""" + fdp = atheris.FuzzedDataProvider(data) + mercurial_repo = create_repo(url=fdp.ConsumeUnicodeNoSurrogates(128), + vcs='hg', + repo_dir='./') + try: + mercurial_repo.update_repo() + except (ValueError, FileNotFoundError) as exception: + _ = exception # Satisfy lint + + +def main(): + """Set up and start fuzzing.""" + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/python-ldap-GHSL-2021-117/build.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/python-ldap-GHSL-2021-117/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..9311619c3fea1513973f0a559b69f0c61b134c8f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/python-ldap-GHSL-2021-117/build.sh @@ -0,0 +1,25 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +python3 -m pip install pysecsan + +git clone https://github.com/python-ldap/python-ldap +cd python-ldap +git checkout 404c36b702c5b3a7e60729745c8bda16098b1472 +python3 -m pip install . +cd ../ +python3 ./fuzz_ldap.py diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/python-ldap-GHSL-2021-117/fuzz_ldap.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/python-ldap-GHSL-2021-117/fuzz_ldap.py new file mode 100644 index 0000000000000000000000000000000000000000..bacc9b209537c7cf5995f94134a635cca326d11f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/python-ldap-GHSL-2021-117/fuzz_ldap.py @@ -0,0 +1,42 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Targets: https://github.com/python-ldap/python-ldap/security/advisories/GHSA-r8wq-qrxc-hmcm""" # pylint: disable=line-too-long + +import sys +import atheris +import pysecsan +import ldap.schema + +pysecsan.add_hooks() + + +def test_one_input(data): + """Fuzzer targetting regex dos in ldap.""" + fdp = atheris.FuzzedDataProvider(data) + try: + ldap.schema.split_tokens(fdp.ConsumeUnicodeNoSurrogates(1024)) + except ValueError: + pass + + +def main(): + """Set up and start fuzzing.""" + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/build.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..ad1412a7ac70ac9dbb6e24a55e2c6db94122276b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/build.sh @@ -0,0 +1,22 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +git clone --depth 1 --branch 1.5.10 https://github.com/PyTorchLightning/pytorch-lightning.git +cd pytorch-lightning +python3 -m pip install . +cd ../ +python3 ./fuzz_pytorch_lightning.py diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/fuzz_pytorch_lightning.dict b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/fuzz_pytorch_lightning.dict new file mode 100644 index 0000000000000000000000000000000000000000..54636cb47520367bfbf89ceae8b1a373b7dbbd7a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/fuzz_pytorch_lightning.dict @@ -0,0 +1 @@ +"os.system('exec-sanitizer')" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/fuzz_pytorch_lightning.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/fuzz_pytorch_lightning.py new file mode 100644 index 0000000000000000000000000000000000000000..a40fe6d711777951a05cfab7c1a76d8e238b8d02 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/poe/pytorch-lightning-1.5.10/fuzz_pytorch_lightning.py @@ -0,0 +1,67 @@ +#!/usr/local/bin/python3 +# +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Exploit pytorch lightning with fuzzer's input as a random env variable. +This PoC is extended from a report in GitHub Advisory Database: +https://github.com/advisories/GHSA-r5qj-cvf9-p85h +The original report documents an exploit using a specific environment variable, +we show a way to achieve the same exploit with an arbitrary env variable. +""" + +import os +import sys +import atheris +import pysecsan + +pysecsan.add_hooks() + +with atheris.instrument_imports(): + from pytorch_lightning import Trainer + from pytorch_lightning.utilities.argparse import parse_env_variables + + +def prepare_fuzzing_input(data): + """Prepare the data needed by the exploit with input data from fuzzers.""" + data = data.replace(b'\0', b'') + env_name = 'AN_ARBITRARY_ENV_NAME' + return data, env_name + + +def exploit_target(env_value, env_name): + """This target is based on a snippet from the official documentation of + `parse_env_variables`: + https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.utilities.argparse.html # pylint: disable=line-too-long + It might not be the most realistic example, + but serves as a PoC to show that SystemSan works for Python.""" + os.environb[env_name.encode()] = env_value + parse_env_variables(Trainer, template=env_name) + + +def TestOneInput(data): # pylint: disable=invalid-name + """Exploit the target only with input data from fuzzers.""" + env_value, env_name = prepare_fuzzing_input(data) + exploit_target(env_value, env_name) + + +def main(): + """Fuzz target with atheris.""" + atheris.Setup(sys.argv, TestOneInput) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/subprocess_popen_injection.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/subprocess_popen_injection.py new file mode 100644 index 0000000000000000000000000000000000000000..da05135f91e6ba33e1f4e868adb1ef778223d760 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/subprocess_popen_injection.py @@ -0,0 +1,51 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Fuzzer displaying insecure use of subprocess.Popen.""" + +import sys +import subprocess +import atheris +import pysecsan + + +def list_files_perhaps(param): + """Insecure call to Popen.""" + try: + subprocess.Popen(' '.join(['ls', '-la', param]), shell=True) + except ValueError: + pass + + +def test_one_input(data): + """Fuzzer entrypoint.""" + fdp = atheris.FuzzedDataProvider(data) + + if fdp.ConsumeIntInRange(1, 10) == 5: + list_files_perhaps('FROMFUZZ') + else: + list_files_perhaps('.') + + +def main(): + """Set up and start fuzzing.""" + pysecsan.add_hooks() + + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/yaml_deserialization_general.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/yaml_deserialization_general.py new file mode 100644 index 0000000000000000000000000000000000000000..6581acede3f00ee595c4c849c69791b489e1e3b5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/yaml_deserialization_general.py @@ -0,0 +1,47 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Fuzzer for insecure yaml deserialization.""" + +import sys +import yaml +import atheris +import pysecsan + + +def serialize_with_tainted_data(param): + """Hit insecure yaml function.""" + try: + yaml.load(param, yaml.Loader) + except yaml.YAMLError: + pass + + +def test_one_input(data): + """Fuzzer routine.""" + fdp = atheris.FuzzedDataProvider(data) + serialize_with_tainted_data(fdp.ConsumeUnicodeNoSurrogates(32)) + + +def main(): + """Set up and start fuzzing.""" + pysecsan.add_hooks() + + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/yaml_deserialization_simple.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/yaml_deserialization_simple.py new file mode 100644 index 0000000000000000000000000000000000000000..bec48d20780ba2ffdda020cfc91270b5462dd519 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/sanitizers/pysecsan/tests/yaml_deserialization_simple.py @@ -0,0 +1,54 @@ +#!/usr/bin/python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Fuzzer triggering insecure yaml serialization.""" + +import sys +import yaml +import atheris +import pysecsan + + +def serialize_with_tainted_data(param, magicval): + """Pass data to insecure yaml functions.""" + if magicval == 1337: + try: + yaml.load(param, yaml.Loader) + except yaml.YAMLError: + pass + elif magicval == 1338: + try: + yaml.load('FROMFUZZ', yaml.Loader) + except yaml.YAMLError: + pass + + +def test_one_input(data): + """Fuzzer entrypoint.""" + fdp = atheris.FuzzedDataProvider(data) + serialize_with_tainted_data(fdp.ConsumeUnicodeNoSurrogates(32), + fdp.ConsumeIntInRange(500, 1500)) + + +def main(): + """Set up and start fuzzing.""" + pysecsan.add_hooks() + + atheris.instrument_all() + atheris.Setup(sys.argv, test_one_input, enable_python_coverage=True) + atheris.Fuzz() + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap new file mode 100644 index 0000000000000000000000000000000000000000..f967074fdc9161fa3c95814e3b24e0f2324af61c --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap @@ -0,0 +1,66 @@ +#!/bin/bash -eux +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Deterimine srcmap of checked out source code + +SRCMAP=$(tempfile) +echo "{}" > $SRCMAP + +# $1 - json file, $2 - jq program +function jq_inplace() { + F=$(tempfile) && cat $1 | jq "$2" > $F && mv $F $1 +} + +PATHS_TO_SCAN="$SRC" + +if [[ $FUZZING_LANGUAGE == "go" ]]; then + PATHS_TO_SCAN="$PATHS_TO_SCAN $GOPATH" +fi + +# Git +for DOT_GIT_DIR in $(find $PATHS_TO_SCAN -name ".git" -type d); do + GIT_DIR=$(dirname $DOT_GIT_DIR) + cd $GIT_DIR + GIT_URL=$(git config --get remote.origin.url) + GIT_REV=$(git rev-parse HEAD) + jq_inplace $SRCMAP ".\"$GIT_DIR\" = { type: \"git\", url: \"$GIT_URL\", rev: \"$GIT_REV\" }" +done + +# Subversion +for DOT_SVN_DIR in $(find $PATHS_TO_SCAN -name ".svn" -type d); do + SVN_DIR=$(dirname $DOT_SVN_DIR) + cd $SVN_DIR + SVN_URL=$(svn info | grep "^URL:" | sed 's/URL: //g') + SVN_REV=$(svn info -r HEAD | grep "^Revision:" | sed 's/Revision: //g') + jq_inplace $SRCMAP ".\"$SVN_DIR\" = { type: \"svn\", url: \"$SVN_URL\", rev: \"$SVN_REV\" }" +done + +# Mercurial +for DOT_HG_DIR in $(find $PATHS_TO_SCAN -name ".hg" -type d); do + HG_DIR=$(dirname $DOT_HG_DIR) + cd $HG_DIR + HG_URL=$(hg paths default) + HG_REV=$(hg --debug id -r. -i) + jq_inplace $SRCMAP ".\"$HG_DIR\" = { type: \"hg\", url: \"$HG_URL\", rev: \"$HG_REV\" }" +done + +if [ "${OSSFUZZ_REVISION-}" != "" ]; then + jq_inplace $SRCMAP ".\"/src\" = { type: \"git\", url: \"https://github.com/google/oss-fuzz.git\", rev: \"$OSSFUZZ_REVISION\" }" +fi + +cat $SRCMAP +rm $SRCMAP diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/test_data/culprit-commit.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/test_data/culprit-commit.txt new file mode 100644 index 0000000000000000000000000000000000000000..5529d35851db8fa39204520f6bb279c3ef772848 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/test_data/culprit-commit.txt @@ -0,0 +1,30 @@ +ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d is the first bad commit +commit ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d +Author: John Doe +Date: Tue Aug 6 08:41:53 2019 +0000 + + [compiler-rt] Implement getrandom interception + + Summary: + Straightforward implementation of `getrandom` syscall and libc + hooks. + + Test Plan: Local MSAN failures caused by uninstrumented `getrandom` + calls stop failing. + + Patch by John Doe 3. + + Reviewers: jonhdoe2, johndoe + + Reviewed By: johndoe + + Subscribers: johndoe4, johndoe5, #sanitizers, llvm-commits + + Tags: #sanitizers, #llvm + + Differential Revision: https://reviews.llvm.org/D65551 + + llvm-svn: 367999 + +:040000 040000 8db10511ca83cc7b0265c7703684cd386350151b 62508fdc5e8919bbb2a0bd185cc109868192cdb0 M compiler-rt +bisect run success diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py new file mode 100644 index 0000000000000000000000000000000000000000..3b3a6d39aa6dfc75d6694808e336d5ed1e5226ed --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Script for writing from project.yaml to .labels file.""" + +import os +import json +import sys + + +def main(): + """Writes labels.""" + if len(sys.argv) != 3: + print('Usage: write_labels.py labels_json out_dir', file=sys.stderr) + sys.exit(1) + + labels_by_target = json.loads(sys.argv[1]) + out = sys.argv[2] + + for target_name, labels in labels_by_target.items(): + # Skip over wildcard value applying to all fuzz targets + if target_name == '*': + continue + with open(os.path.join(out, target_name + '.labels'), 'w') as file_handle: + file_handle.write('\n'.join(labels)) + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-clang/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-clang/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..959b4d430de1e2db53c6f181ee519ce84c1d126c --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-clang/Dockerfile @@ -0,0 +1,79 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Docker image with clang installed. + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-image:${IMG_TAG} + +ARG arch=x86_64 + +ENV FUZZINTRO_OUTDIR=$SRC + +# Install newer cmake. +# Many projects, as well as recent clang versions, need a newer cmake. +ENV CMAKE_VERSION 3.29.2 +RUN apt-get update && apt-get install -y wget sudo && \ + wget -q https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-Linux-$arch.sh && \ + chmod +x cmake-$CMAKE_VERSION-Linux-$arch.sh && \ + ./cmake-$CMAKE_VERSION-Linux-$arch.sh --skip-license --prefix="/usr/local" && \ + rm cmake-$CMAKE_VERSION-Linux-$arch.sh && \ + SUDO_FORCE_REMOVE=yes apt-get autoremove --purge -y wget sudo && \ + rm -rf /usr/local/doc/cmake /usr/local/bin/cmake-gui + +RUN apt-get update && apt-get install -y git && \ + git clone https://github.com/ossf/fuzz-introspector.git fuzz-introspector && \ + cd fuzz-introspector && \ + git checkout f9bcb8824a18d60d57e2430c5b43f525d811cae8 && \ + git submodule init && \ + git submodule update && \ + apt-get autoremove --purge -y git && \ + rm -rf .git + +COPY checkout_build_install_llvm.sh /root/ +# Keep all steps in the same script to decrease the number of intermediate +# layes in docker file. +RUN /root/checkout_build_install_llvm.sh +RUN rm /root/checkout_build_install_llvm.sh + +# Setup the environment. +ENV CC "clang" +ENV CXX "clang++" +ENV CCC "clang++" + +# FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION is described at +# https://llvm.org/docs/LibFuzzer.html#fuzzer-friendly-build-mode + +# The implicit-function-declaration and implicit-int errors are downgraded to a +# warning, to allow compiling legacy code. +# See https://releases.llvm.org/16.0.0/tools/clang/docs/ReleaseNotes.html#potentially-breaking-changes +# Same for deprecated-declarations, int-conversion, +# incompatible-function-pointer-types, enum-constexpr-conversion, +# vla-cxx-extension + +ENV CFLAGS -O1 \ + -fno-omit-frame-pointer \ + -gline-tables-only \ + -Wno-error=enum-constexpr-conversion \ + -Wno-error=incompatible-function-pointer-types \ + -Wno-error=int-conversion \ + -Wno-error=deprecated-declarations \ + -Wno-error=implicit-function-declaration \ + -Wno-error=implicit-int \ + -Wno-error=vla-cxx-extension \ + -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION +ENV CXXFLAGS_EXTRA "-stdlib=libc++" +ENV CXXFLAGS "$CFLAGS $CXXFLAGS_EXTRA" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-clang/checkout_build_install_llvm.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-clang/checkout_build_install_llvm.sh new file mode 100644 index 0000000000000000000000000000000000000000..e5ab30d04a4c38a29855785b83a2f03be56c897f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-clang/checkout_build_install_llvm.sh @@ -0,0 +1,246 @@ +#!/bin/bash -eux +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +NPROC=$(nproc) + +TARGET_TO_BUILD= +case $(uname -m) in + x86_64) + TARGET_TO_BUILD=X86 + ARCHITECTURE_DEPS="g++-multilib" + # Use chromium's clang revision. + export CC=$WORK/llvm-stage1/bin/clang + export CXX=$WORK/llvm-stage1/bin/clang++ + ;; + aarch64) + TARGET_TO_BUILD=AArch64 + # g++ multilib is not needed on AArch64 because we don't care about i386. + # We need to install clang and lld using apt because the binary downloaded + # from Chrome's developer tools doesn't support AArch64. + # TODO(metzman): Make x86_64 use the distro's clang for consistency once + # we support AArch64 fully. + ARCHITECTURE_DEPS="clang lld g++" + export CC=clang + export CXX=clang++ + ;; + *) + echo "Error: unsupported target $(uname -m)" + exit 1 + ;; +esac + +INTROSPECTOR_DEP_PACKAGES="texinfo bison flex" +# zlib1g-dev is needed for llvm-profdata to handle coverage data from rust compiler +LLVM_DEP_PACKAGES="build-essential make ninja-build git python3 python3-distutils binutils-dev zlib1g-dev $ARCHITECTURE_DEPS $INTROSPECTOR_DEP_PACKAGES" + +apt-get update && apt-get install -y $LLVM_DEP_PACKAGES --no-install-recommends + +# For manual bumping. +# On each bump a full trial run for everything (fuzzing engines, sanitizers, +# languages, projects, ...) is needed. +# Check CMAKE_VERSION infra/base-images/base-clang/Dockerfile was released +# recently enough to fully support this clang version. +OUR_LLVM_REVISION=llvmorg-18.1.8 + +mkdir $SRC/chromium_tools +cd $SRC/chromium_tools +git clone https://chromium.googlesource.com/chromium/src/tools/clang +cd clang +# Pin clang script due to https://github.com/google/oss-fuzz/issues/7617 +git checkout 9eb79319239629c1b23cf7a59e5ebb2bab319a34 + +LLVM_SRC=$SRC/llvm-project +# Checkout +CHECKOUT_RETRIES=10 +function clone_with_retries { + REPOSITORY=$1 + LOCAL_PATH=$2 + CHECKOUT_RETURN_CODE=1 + + # Disable exit on error since we might encounter some failures while retrying. + set +e + for i in $(seq 1 $CHECKOUT_RETRIES); do + rm -rf $LOCAL_PATH + git clone $REPOSITORY $LOCAL_PATH + CHECKOUT_RETURN_CODE=$? + if [ $CHECKOUT_RETURN_CODE -eq 0 ]; then + break + fi + done + + # Re-enable exit on error. If checkout failed, script will exit. + set -e + return $CHECKOUT_RETURN_CODE +} +clone_with_retries https://github.com/llvm/llvm-project.git $LLVM_SRC + +git -C $LLVM_SRC checkout $OUR_LLVM_REVISION +echo "Using LLVM revision: $OUR_LLVM_REVISION" + +# For fuzz introspector. +echo "Applying introspector changes" +OLD_WORKING_DIR=$PWD +cd $LLVM_SRC +cp -rf /fuzz-introspector/frontends/llvm/include/llvm/Transforms/FuzzIntrospector/ ./llvm/include/llvm/Transforms/FuzzIntrospector +cp -rf /fuzz-introspector/frontends/llvm/lib/Transforms/FuzzIntrospector ./llvm/lib/Transforms/FuzzIntrospector + +# LLVM currently does not support dynamically loading LTO passes. Thus, we +# hardcode it into Clang instead. Ref: https://reviews.llvm.org/D77704 +/fuzz-introspector/frontends/llvm/patch-llvm.sh +cd $OLD_WORKING_DIR + +mkdir -p $WORK/llvm-stage2 $WORK/llvm-stage1 +python3 $SRC/chromium_tools/clang/scripts/update.py --output-dir $WORK/llvm-stage1 + +cd $WORK/llvm-stage2 +cmake -G "Ninja" \ + -DLIBCXX_ENABLE_SHARED=OFF \ + -DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON \ + -DLIBCXXABI_ENABLE_SHARED=OFF \ + -DCMAKE_BUILD_TYPE=Release \ + -DLLVM_ENABLE_RUNTIMES="compiler-rt;libcxx;libcxxabi" \ + -DLLVM_TARGETS_TO_BUILD="$TARGET_TO_BUILD" \ + -DLLVM_ENABLE_PROJECTS="clang;lld" \ + -DLLVM_BINUTILS_INCDIR="/usr/include/" \ + -DLIBCXXABI_USE_LLVM_UNWINDER=OFF \ + $LLVM_SRC/llvm + +ninja -j $NPROC +ninja install +rm -rf $WORK/llvm-stage1 $WORK/llvm-stage2 + +# libFuzzer sources. +cp -r $LLVM_SRC/compiler-rt/lib/fuzzer $SRC/libfuzzer + +# Use the clang we just built from now on. +export CC=clang +export CXX=clang++ + +function free_disk_space { + rm -rf $LLVM_SRC $SRC/chromium_tools + apt-get autoremove --purge -y $LLVM_DEP_PACKAGES + # Delete unneeded parts of LLVM to reduce image size. + # See https://github.com/google/oss-fuzz/issues/5170 + LLVM_TOOLS_TMPDIR=/tmp/llvm-tools + mkdir $LLVM_TOOLS_TMPDIR + # Move binaries with llvm- prefix that we want into LLVM_TOOLS_TMPDIR. + mv \ + /usr/local/bin/llvm-ar \ + /usr/local/bin/llvm-as \ + /usr/local/bin/llvm-config \ + /usr/local/bin/llvm-cov \ + /usr/local/bin/llvm-objcopy \ + /usr/local/bin/llvm-nm \ + /usr/local/bin/llvm-profdata \ + /usr/local/bin/llvm-ranlib \ + /usr/local/bin/llvm-symbolizer \ + /usr/local/bin/llvm-undname \ + $LLVM_TOOLS_TMPDIR + + # Delete remaining llvm- binaries. + rm -rf /usr/local/bin/llvm-* + + # Restore the llvm- binaries we want to keep. + mv $LLVM_TOOLS_TMPDIR/* /usr/local/bin/ + rm -rf $LLVM_TOOLS_TMPDIR + + # Remove binaries from LLVM build that we don't need. + rm -f \ + /usr/local/bin/bugpoint \ + /usr/local/bin/llc \ + /usr/local/bin/lli \ + /usr/local/bin/clang-check \ + /usr/local/bin/clang-refactor \ + /usr/local/bin/clang-offload-wrapper \ + /usr/local/bin/clang-offload-bundler \ + /usr/local/bin/clang-repl \ + /usr/local/bin/clang-check \ + /usr/local/bin/clang-refactor \ + /usr/local/bin/c-index-test \ + /usr/local/bin/clang-rename \ + /usr/local/bin/clang-scan-deps \ + /usr/local/bin/clang-extdef-mapping \ + /usr/local/bin/diagtool \ + /usr/local/bin/sanstats \ + /usr/local/bin/dsymutil \ + /usr/local/bin/verify-uselistorder \ + /usr/local/bin/clang-format + + # Remove unneeded clang libs, CMake files from LLVM build, lld libs, and the + # libraries. + # Note: we need fuzzer_no_main libraries for atheris. Don't delete. + rm -rf \ + /usr/local/lib/libclang* \ + /usr/local/lib/liblld* \ + /usr/local/lib/cmake/ +} + +if [ "$TARGET_TO_BUILD" == "AArch64" ] +then + free_disk_space + # Exit now on AArch64. We don't need to rebuild libc++ because on AArch64 we + # do not support MSAN nor do we care about i386. + exit 0 +fi + +function cmake_libcxx { + extra_args="$@" + cmake -G "Ninja" \ + -DLIBCXX_ENABLE_SHARED=OFF \ + -DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON \ + -DLIBCXXABI_ENABLE_SHARED=OFF \ + -DLIBCXXABI_USE_LLVM_UNWINDER=OFF \ + -DCMAKE_BUILD_TYPE=Release \ + -DLLVM_ENABLE_PIC=ON \ + -DLLVM_TARGETS_TO_BUILD="$TARGET_TO_BUILD" \ + -DLLVM_ENABLE_RUNTIMES="libcxx;libcxxabi" \ + -DLLVM_BINUTILS_INCDIR="/usr/include/" \ + $extra_args \ + -S $LLVM_SRC/runtimes +} + +# 32-bit libraries. +mkdir -p $WORK/i386 +cd $WORK/i386 +cmake_libcxx \ + -DCMAKE_INSTALL_PREFIX=/usr/i386/ \ + -DCMAKE_C_FLAGS="-m32" \ + -DCMAKE_CXX_FLAGS="-m32" + +ninja -j $NPROC cxx +ninja install-cxx +rm -rf $WORK/i386 + +# MemorySanitizer instrumented libraries. +mkdir -p $WORK/msan +cd $WORK/msan + +# https://github.com/google/oss-fuzz/issues/1099 +cat < $WORK/msan/ignorelist.txt +fun:__gxx_personality_* +EOF + +cmake_libcxx \ + -DLLVM_USE_SANITIZER=Memory \ + -DCMAKE_INSTALL_PREFIX=/usr/msan/ \ + -DCMAKE_CXX_FLAGS="-fsanitize-ignorelist=$WORK/msan/ignorelist.txt" + +ninja -j $NPROC cxx +ninja install-cxx +rm -rf $WORK/msan + +free_disk_space diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..f77c7f77d453c6717e9ae9e8be994e305288841a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile @@ -0,0 +1,46 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Base image for all other images. + +ARG parent_image=ubuntu:20.04@sha256:4a45212e9518f35983a976eead0de5eecc555a2f047134e9dd2cfc589076a00d + +FROM $parent_image + +ENV DEBIAN_FRONTEND noninteractive +# Install tzadata to match ClusterFuzz +# (https://github.com/google/oss-fuzz/issues/9280). + +# Use Azure mirrors for consistent apt repository access. +RUN cp /etc/apt/sources.list /etc/apt/sources.list.backup && \ + sed -i 's|http://archive.ubuntu.com/ubuntu/|http://azure.archive.ubuntu.com/ubuntu/|g' /etc/apt/sources.list && \ + sed -i 's|http://security.ubuntu.com/ubuntu/|http://azure.archive.ubuntu.com/ubuntu/|g' /etc/apt/sources.list + + +RUN apt-get update && \ + apt-get upgrade -y && \ + apt-get install -y libc6-dev binutils libgcc-9-dev tzdata locales locales-all && \ + apt-get autoremove -y + +ENV OUT=/out +ENV SRC=/src +ENV WORK=/work +ENV PATH="$PATH:/out" +ENV HWASAN_OPTIONS=random_tags=0 +#set locale to utf8 +ENV LC_ALL=C.UTF-8 + +RUN mkdir -p $OUT $SRC $WORK && chmod a+rwx $OUT $SRC $WORK diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..05d15f03a48a9cbfb8b6b5383984cfef786bd7db --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile @@ -0,0 +1,26 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-runner:${IMG_TAG} +RUN apt-get update && apt-get install -y valgrind zip + +# Installing GDB 12, re https://github.com/google/oss-fuzz/issues/7513. +RUN apt-get install -y build-essential libgmp-dev && \ + wget https://ftp.gnu.org/gnu/gdb/gdb-12.1.tar.xz && \ + tar -xf gdb-12.1.tar.xz && cd gdb-12.1 && ./configure && \ + make -j $(expr $(nproc) / 2) && make install && cd .. && \ + rm -rf gdb-12.1* && apt-get remove --purge -y build-essential libgmp-dev diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..7f4ba223bdecff1f1f5c8ba78b0bfef9a6cc48f5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile @@ -0,0 +1,139 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Build rust stuff in its own image. We only need the resulting binaries. +# Keeping the rust toolchain in the image wastes 1 GB. +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-image:${IMG_TAG} as temp-runner-binary-builder + +RUN apt-get update && apt-get install -y cargo libyaml-dev +RUN cargo install rustfilt + +# Using multi-stage build to copy some LLVM binaries needed in the runner image. +FROM ghcr.io/aixcc-finals/base-clang:${IMG_TAG} AS base-clang +FROM ghcr.io/aixcc-finals/base-builder-ruby:${IMG_TAG} AS base-ruby + +# The base builder image compiles a specific Python version. Using a multi-stage build +# to copy that same Python interpreter into the runner image saves build time and keeps +# the Python versions in sync. +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} AS base-builder + +# Real image that will be used later. +FROM ghcr.io/aixcc-finals/base-image:${IMG_TAG} + +COPY --from=temp-runner-binary-builder /root/.cargo/bin/rustfilt /usr/local/bin + +# Copy the binaries needed for code coverage and crash symbolization. +COPY --from=base-clang /usr/local/bin/llvm-cov \ + /usr/local/bin/llvm-profdata \ + /usr/local/bin/llvm-symbolizer \ + /usr/local/bin/ + +# Copy the pre-compiled Python binaries and libraries +COPY --from=base-builder /usr/local/bin/python3.10 /usr/local/bin/python3.10 +COPY --from=base-builder /usr/local/lib/libpython3.10.so.1.0 /usr/local/lib/libpython3.10.so.1.0 +COPY --from=base-builder /usr/local/include/python3.10 /usr/local/include/python3.10 +COPY --from=base-builder /usr/local/lib/python3.10 /usr/local/lib/python3.10 +COPY --from=base-builder /usr/local/bin/pip3 /usr/local/bin/pip3 + +# Create symbolic links to ensure compatibility +RUN ldconfig && \ + ln -s /usr/local/bin/python3.10 /usr/local/bin/python3 && \ + ln -s /usr/local/bin/python3.10 /usr/local/bin/python + +COPY install_deps.sh / +RUN /install_deps.sh && rm /install_deps.sh + +ENV CODE_COVERAGE_SRC=/opt/code_coverage +# Pin coverage to the same as in the base builder: +# https://github.com/google/oss-fuzz/blob/master/infra/base-images/base-builder/install_python.sh#L22 +RUN git clone https://chromium.googlesource.com/chromium/src/tools/code_coverage $CODE_COVERAGE_SRC && \ + cd /opt/code_coverage && \ + git checkout edba4873b5e8a390e977a64c522db2df18a8b27d && \ + pip3 install wheel && \ + # If version "Jinja2==2.10" is in requirements.txt, bump it to a patch version that + # supports upgrading its MarkupSafe dependency to a Python 3.10 compatible release: + sed -i 's/Jinja2==2.10/Jinja2==2.10.3/' requirements.txt && \ + pip3 install -r requirements.txt && \ + pip3 install MarkupSafe==2.0.1 && \ + pip3 install coverage==6.3.2 + +# Default environment options for various sanitizers. +# Note that these match the settings used in ClusterFuzz and +# shouldn't be changed unless a corresponding change is made on +# ClusterFuzz side as well. +ENV ASAN_OPTIONS="alloc_dealloc_mismatch=0:allocator_may_return_null=1:allocator_release_to_os_interval_ms=500:check_malloc_usable_size=0:detect_container_overflow=1:detect_odr_violation=0:detect_leaks=1:detect_stack_use_after_return=1:fast_unwind_on_fatal=0:handle_abort=1:handle_segv=1:handle_sigill=1:max_uar_stack_size_log=16:print_scariness=1:quarantine_size_mb=10:strict_memcmp=1:strip_path_prefix=/workspace/:symbolize=1:use_sigaltstack=1:dedup_token_length=3" +ENV MSAN_OPTIONS="print_stats=1:strip_path_prefix=/workspace/:symbolize=1:dedup_token_length=3" +ENV UBSAN_OPTIONS="print_stacktrace=1:print_summary=1:silence_unsigned_overflow=1:strip_path_prefix=/workspace/:symbolize=1:dedup_token_length=3" +ENV FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25" +ENV AFL_FUZZER_ARGS="-m none" + +# Set up Golang environment variables (copied from /root/.bash_profile). +ENV GOPATH /root/go + +# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc). +# $GOPATH/bin is for the binaries from the dependencies installed via "go get". +ENV PATH $PATH:$GOPATH/bin +COPY gocoverage $GOPATH/gocoverage + +COPY install_go.sh / +RUN /install_go.sh && rm -rf /install_go.sh /root/.go + +# Install OpenJDK 15 and trim its size by removing unused components. +ENV JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64 +ENV JAVA_15_HOME=/usr/lib/jvm/java-15-openjdk-amd64 +ENV JVM_LD_LIBRARY_PATH=$JAVA_HOME/lib/server +ENV PATH=$PATH:$JAVA_HOME/bin + +COPY install_java.sh / +RUN /install_java.sh && rm /install_java.sh + +# Install JaCoCo for JVM coverage. +RUN wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.cli/0.8.7/org.jacoco.cli-0.8.7-nodeps.jar -O /opt/jacoco-cli.jar && \ + wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.agent/0.8.7/org.jacoco.agent-0.8.7-runtime.jar -O /opt/jacoco-agent.jar && \ + echo "37df187b76888101ecd745282e9cd1ad4ea508d6 /opt/jacoco-agent.jar" | shasum --check && \ + echo "c1814e7bba5fd8786224b09b43c84fd6156db690 /opt/jacoco-cli.jar" | shasum --check + +COPY install_javascript.sh / +RUN /install_javascript.sh && rm /install_javascript.sh + +# Copy built ruby and ruzzy from builder +COPY --from=base-ruby /usr/local/rvm /usr/local/rvm +COPY --from=base-ruby /install/ruzzy /install/ruzzy +COPY ruzzy /usr/bin/ruzzy +ENV PATH="$PATH:/usr/local/rvm/rubies/ruby-3.3.1/bin" +# RubyGems installation directory +ENV GEM_HOME="$OUT/fuzz-gem" +ENV GEM_PATH="/install/ruzzy" + +# Do this last to make developing these files easier/faster due to caching. +COPY bad_build_check \ + coverage \ + coverage_helper \ + download_corpus \ + jacoco_report_converter.py \ + nyc_report_converter.py \ + rcfilt \ + reproduce \ + run_fuzzer \ + parse_options.py \ + generate_differential_cov_report.py \ + profraw_update.py \ + targets_list \ + test_all.py \ + test_one.py \ + python_coverage_runner_help.py \ + /usr/local/bin/ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e1e29e3802fc485f0a4df667baf89cea7fad5dfd --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md @@ -0,0 +1,31 @@ +# base-runner +> Base image for fuzzer runners. + +```bash +docker run -ti ghcr.io/aixcc-finals/base-runner +``` + +## Commands + +| Command | Description | +|---------|-------------| +| `reproduce ` | build all fuzz targets and run specified one with testcase `/testcase` and given options. +| `run_fuzzer ` | runs specified fuzzer combining options with `.options` file | +| `test_all.py` | runs every binary in `/out` as a fuzzer for a while to ensure it works. | +| `coverage ` | generate a coverage report for the given fuzzer. | + +# Examples + +- *Reproduce using latest OSS-Fuzz build:* + +
+docker run --rm -ti -v <testcase_path>:/testcase gcr.io/oss-fuzz/$PROJECT_NAME reproduce <fuzzer_name>
+
+ +- *Reproduce using local source checkout:* + +
+docker run --rm -ti -v <source_path>:/src/$PROJECT_NAME \
+                    -v <testcase_path>:/testcase gcr.io/oss-fuzz/$PROJECT_NAME \
+                    reproduce <fuzzer_name>
+
diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check new file mode 100644 index 0000000000000000000000000000000000000000..8aa901db6c654da365092f6764490b5f3e819668 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check @@ -0,0 +1,494 @@ +#!/bin/bash -u +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# A minimal number of runs to test fuzz target with a non-empty input. +MIN_NUMBER_OF_RUNS=4 + +# The "example" target has 73 with ASan, 65 with UBSan, and 6648 with MSan. +# Real world targets have greater values (arduinojson: 407, zlib: 664). +# Mercurial's bdiff_fuzzer has 116 PCs when built with ASan. +THRESHOLD_FOR_NUMBER_OF_EDGES=100 + +# A fuzz target is supposed to have at least two functions, such as +# LLVMFuzzerTestOneInput and an API that is being called from there. +THRESHOLD_FOR_NUMBER_OF_FUNCTIONS=2 + +# Threshold values for different sanitizers used by instrumentation checks. +ASAN_CALLS_THRESHOLD_FOR_ASAN_BUILD=1000 +ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD=0 + +# The value below can definitely be higher (like 500-1000), but avoid being too +# agressive here while still evaluating the DFT-based fuzzing approach. +DFSAN_CALLS_THRESHOLD_FOR_DFSAN_BUILD=100 +DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD=0 + +MSAN_CALLS_THRESHOLD_FOR_MSAN_BUILD=1000 +# Some engines (e.g. honggfuzz) may make a very small number of calls to msan +# for memory poisoning. +MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD=3 + +# Usually, a non UBSan build (e.g. ASan) has 165 calls to UBSan runtime. The +# majority of targets built with UBSan have 200+ UBSan calls, but there are +# some very small targets that may have < 200 UBSan calls even in a UBSan build. +# Use the threshold value of 168 (slightly > 165) for UBSan build. +UBSAN_CALLS_THRESHOLD_FOR_UBSAN_BUILD=168 + +# It would be risky to use the threshold value close to 165 for non UBSan build, +# as UBSan runtime may change any time and thus we could have different number +# of calls to UBSan runtime even in ASan build. With that, we use the threshold +# value of 200 that would detect unnecessary UBSan instrumentation in the vast +# majority of targets, except of a handful very small ones, which would not be +# a big concern either way as the overhead for them would not be significant. +UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD=200 + +# ASan builds on i386 generally have about 250 UBSan runtime calls. +if [[ $ARCHITECTURE == 'i386' ]] +then + UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD=280 +fi + + +# Verify that the given fuzz target is correctly built to run with a particular +# engine. +function check_engine { + local FUZZER=$1 + local FUZZER_NAME=$(basename $FUZZER) + local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output" + local CHECK_FAILED=0 + + if [[ "$FUZZING_ENGINE" == libfuzzer ]]; then + # Store fuzz target's output into a temp file to be used for further checks. + $FUZZER -seed=1337 -runs=$MIN_NUMBER_OF_RUNS &>$FUZZER_OUTPUT + CHECK_FAILED=$(egrep "ERROR: no interesting inputs were found. Is the code instrumented" -c $FUZZER_OUTPUT) + if (( $CHECK_FAILED > 0 )); then + echo "BAD BUILD: $FUZZER does not seem to have coverage instrumentation." + cat $FUZZER_OUTPUT + # Bail out as the further check does not make any sense, there are 0 PCs. + return 1 + fi + + local NUMBER_OF_EDGES=$(grep -Po "INFO: Loaded [[:digit:]]+ module.*\(.*(counters|guards)\):[[:space:]]+\K[[:digit:]]+" $FUZZER_OUTPUT) + + # If a fuzz target fails to start, grep won't find anything, so bail out early to let check_startup_crash deal with it. + [[ -z "$NUMBER_OF_EDGES" ]] && return + + if (( $NUMBER_OF_EDGES < $THRESHOLD_FOR_NUMBER_OF_EDGES )); then + echo "BAD BUILD: $FUZZER seems to have only partial coverage instrumentation." + fi + elif [[ "$FUZZING_ENGINE" == afl ]]; then + AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "All set and ready to roll" -c $FUZZER_OUTPUT) + if (( $CHECK_PASSED == 0 )); then + echo "BAD BUILD: fuzzing $FUZZER with afl-fuzz failed." + cat $FUZZER_OUTPUT + return 1 + fi + elif [[ "$FUZZING_ENGINE" == honggfuzz ]]; then + SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "^Sz:[0-9]+ Tm:[0-9]+" -c $FUZZER_OUTPUT) + if (( $CHECK_PASSED == 0 )); then + echo "BAD BUILD: fuzzing $FUZZER with honggfuzz failed." + cat $FUZZER_OUTPUT + return 1 + fi + elif [[ "$FUZZING_ENGINE" == dataflow ]]; then + $FUZZER &> $FUZZER_OUTPUT + local NUMBER_OF_FUNCTIONS=$(grep -Po "INFO:\s+\K[[:digit:]]+(?=\s+instrumented function.*)" $FUZZER_OUTPUT) + [[ -z "$NUMBER_OF_FUNCTIONS" ]] && NUMBER_OF_FUNCTIONS=0 + if (( $NUMBER_OF_FUNCTIONS < $THRESHOLD_FOR_NUMBER_OF_FUNCTIONS )); then + echo "BAD BUILD: $FUZZER does not seem to be properly built in 'dataflow' config." + cat $FUZZER_OUTPUT + return 1 + fi + elif [[ "$FUZZING_ENGINE" == centipede \ + && ("${HELPER:-}" == True || "$SANITIZER" == none ) ]]; then + # Performs run test on unsanitized binaries with auxiliary sanitized + # binaries if they are built with helper.py. + # Performs run test on unsanitized binaries without auxiliary sanitized + # binaries if they are from trial build and production build. + # TODO(Dongge): Support run test with sanitized binaries for trial and + # production build. + SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "\[S0.0] begin-fuzz: ft: 0 corp: 0/0" -c $FUZZER_OUTPUT) + if (( $CHECK_PASSED == 0 )); then + echo "BAD BUILD: fuzzing $FUZZER with centipede failed." + cat $FUZZER_OUTPUT + return 1 + fi + fi + + return 0 +} + +# Verify that the given fuzz target has been built properly and works. +function check_startup_crash { + local FUZZER=$1 + local FUZZER_NAME=$(basename $FUZZER) + local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output" + local CHECK_PASSED=0 + + if [[ "$FUZZING_ENGINE" = libfuzzer ]]; then + # Skip seed corpus as there is another explicit check that uses seed corpora. + SKIP_SEED_CORPUS=1 run_fuzzer $FUZZER_NAME -seed=1337 -runs=$MIN_NUMBER_OF_RUNS &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "Done $MIN_NUMBER_OF_RUNS runs" -c $FUZZER_OUTPUT) + elif [[ "$FUZZING_ENGINE" = afl ]]; then + AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + if [ $(egrep "target binary (crashed|terminated)" -c $FUZZER_OUTPUT) -eq 0 ]; then + CHECK_PASSED=1 + fi + elif [[ "$FUZZING_ENGINE" = dataflow ]]; then + # TODO(https://github.com/google/oss-fuzz/issues/1632): add check for + # binaries compiled with dataflow engine when the interface becomes stable. + CHECK_PASSED=1 + else + # TODO: add checks for another fuzzing engines if possible. + CHECK_PASSED=1 + fi + + if [ "$CHECK_PASSED" -eq "0" ]; then + echo "BAD BUILD: $FUZZER seems to have either startup crash or exit:" + cat $FUZZER_OUTPUT + return 1 + fi + + return 0 +} + +# Mixed sanitizers check for ASan build. +function check_asan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS < $ASAN_CALLS_THRESHOLD_FOR_ASAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with UBSan." + return 1 + fi + + return 0 +} + +# Mixed sanitizers check for DFSan build. +function check_dfsan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then + echo "BAD BUILD: DFSan build of $FUZZER seems to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS < $DFSAN_CALLS_THRESHOLD_FOR_DFSAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with UBSan." + return 1 + fi + + return 0 +} + + +# Mixed sanitizers check for MSan build. +function check_msan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then + echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then + echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS < $MSAN_CALLS_THRESHOLD_FOR_MSAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then + echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with UBSan." + return 1 + fi + + return 0 +} + +# Mixed sanitizers check for UBSan build. +function check_ubsan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + if [[ "$FUZZING_ENGINE" != libfuzzer ]]; then + # Ignore UBSan checks for fuzzing engines other than libFuzzer because: + # A) we (probably) are not going to use those with UBSan + # B) such builds show indistinguishable number of calls to UBSan + return 0 + fi + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then + echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then + echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then + echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS < $UBSAN_CALLS_THRESHOLD_FOR_UBSAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with UBSan." + return 1 + fi +} + +# Verify that the given fuzz target is compiled with correct sanitizer. +function check_mixed_sanitizers { + local FUZZER=$1 + local result=0 + local CALL_INSN= + + if [ "${FUZZING_LANGUAGE:-}" = "jvm" ]; then + # Sanitizer runtime is linked into the Jazzer driver, so this check does not + # apply. + return 0 + fi + + if [ "${FUZZING_LANGUAGE:-}" = "javascript" ]; then + # Jazzer.js currently does not support using sanitizers with native Node.js addons. + # This is not relevant anyways since supporting this will be done by preloading + # the sanitizers in the wrapper script starting Jazzer.js. + return 0 + fi + + if [ "${FUZZING_LANGUAGE:-}" = "python" ]; then + # Sanitizer runtime is loaded via LD_PRELOAD, so this check does not apply. + return 0 + fi + + # For fuzztest fuzzers point to the binary instead of launcher script. + if [[ $FUZZER == *"@"* ]]; then + FUZZER=(${FUZZER//@/ }[0]) + fi + + CALL_INSN= + if [[ $ARCHITECTURE == "x86_64" ]] + then + CALL_INSN="callq?\s+[0-9a-f]+\s+<" + elif [[ $ARCHITECTURE == "i386" ]] + then + CALL_INSN="call\s+[0-9a-f]+\s+<" + elif [[ $ARCHITECTURE == "aarch64" ]] + then + CALL_INSN="bl\s+[0-9a-f]+\s+<" + else + echo "UNSUPPORTED ARCHITECTURE" + exit 1 + fi + local ASAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__asan" -c) + local DFSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__dfsan" -c) + local MSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__msan" -c) + local UBSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__ubsan" -c) + + + if [[ "$SANITIZER" = address ]]; then + check_asan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = dataflow ]]; then + check_dfsan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = memory ]]; then + check_msan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = undefined ]]; then + check_ubsan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = thread ]]; then + # TODO(metzman): Implement this. + result=0 + fi + + return $result +} + +# Verify that the given fuzz target doesn't crash on the seed corpus. +function check_seed_corpus { + local FUZZER=$1 + local FUZZER_NAME="$(basename $FUZZER)" + local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output" + + if [[ "$FUZZING_ENGINE" != libfuzzer ]]; then + return 0 + fi + + # Set up common fuzzing arguments, otherwise "run_fuzzer" errors out. + if [ -z "$FUZZER_ARGS" ]; then + export FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25" + fi + + bash -c "run_fuzzer $FUZZER_NAME -runs=0" &> $FUZZER_OUTPUT + + # Don't output anything if fuzz target hasn't crashed. + if [ $? -ne 0 ]; then + echo "BAD BUILD: $FUZZER has a crashing input in its seed corpus:" + cat $FUZZER_OUTPUT + return 1 + fi + + return 0 +} + +function check_architecture { + local FUZZER=$1 + local FUZZER_NAME=$(basename $FUZZER) + + if [ "${FUZZING_LANGUAGE:-}" = "jvm" ]; then + # The native dependencies of a JVM project are not packaged, but loaded + # dynamically at runtime and thus cannot be checked here. + return 0; + fi + + if [ "${FUZZING_LANGUAGE:-}" = "javascript" ]; then + # Jazzer.js fuzzers are wrapper scripts that start the fuzz target with + # the Jazzer.js CLI. + return 0; + fi + + if [ "${FUZZING_LANGUAGE:-}" = "python" ]; then + FUZZER=${FUZZER}.pkg + fi + + # For fuzztest fuzzers point to the binary instead of launcher script. + if [[ $FUZZER == *"@"* ]]; then + FUZZER=(${FUZZER//@/ }[0]) + fi + + FILE_OUTPUT=$(file $FUZZER) + if [[ $ARCHITECTURE == "x86_64" ]] + then + echo $FILE_OUTPUT | grep "x86-64" > /dev/null + elif [[ $ARCHITECTURE == "i386" ]] + then + echo $FILE_OUTPUT | grep "80386" > /dev/null + elif [[ $ARCHITECTURE == "aarch64" ]] + then + echo $FILE_OUTPUT | grep "aarch64" > /dev/null + else + echo "UNSUPPORTED ARCHITECTURE" + return 1 + fi + result=$? + if [[ $result != 0 ]] + then + echo "BAD BUILD $FUZZER is not built for architecture: $ARCHITECTURE" + echo "file command output: $FILE_OUTPUT" + echo "check_mixed_sanitizers test will fail." + fi + return $result +} + +function main { + local FUZZER=$1 + local AUXILIARY_FUZZER=${2:-} + local checks_failed=0 + local result=0 + + export RUN_FUZZER_MODE="batch" + check_engine $FUZZER + result=$? + checks_failed=$(( $checks_failed + $result )) + + check_architecture $FUZZER + result=$? + checks_failed=$(( $checks_failed + $result )) + + if [[ "$FUZZING_ENGINE" == centipede \ + && "$SANITIZER" != none && "${HELPER:-}" == True ]]; then + check_mixed_sanitizers $AUXILIARY_FUZZER + else + check_mixed_sanitizers $FUZZER + fi + result=$? + checks_failed=$(( $checks_failed + $result )) + + check_startup_crash $FUZZER + result=$? + checks_failed=$(( $checks_failed + $result )) + + # TODO: re-enable after introducing bug auto-filing for bad builds. + # check_seed_corpus $FUZZER + return $checks_failed +} + + +if [ $# -ne 1 -a $# -ne 2 ]; then + echo "Usage: $0 []" + exit 1 +fi + +# Fuzz target path. +FUZZER=$1 +AUXILIARY_FUZZER=${2:-} + +main $FUZZER $AUXILIARY_FUZZER +exit $? diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage new file mode 100644 index 0000000000000000000000000000000000000000..585b4d457e753e12025344efd735c571b38fb580 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage @@ -0,0 +1,549 @@ +#!/bin/bash -u +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +cd $OUT + +if (( $# > 0 )); then + FUZZ_TARGETS="$@" +else + FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n' | \ + grep -v -x -F \ + -e 'llvm-symbolizer' \ + -e 'jazzer_agent_deploy.jar' \ + -e 'jazzer_driver' \ + -e 'jazzer_driver_with_sanitizer' \ + -e 'sanitizer_with_fuzzer.so')" +fi + +COVERAGE_OUTPUT_DIR=${COVERAGE_OUTPUT_DIR:-$OUT} + +DUMPS_DIR="$COVERAGE_OUTPUT_DIR/dumps" +FUZZERS_COVERAGE_DUMPS_DIR="$DUMPS_DIR/fuzzers_coverage" +MERGED_COVERAGE_DIR="$COVERAGE_OUTPUT_DIR/merged_coverage" +FUZZER_STATS_DIR="$COVERAGE_OUTPUT_DIR/fuzzer_stats" +TEXTCOV_REPORT_DIR="$COVERAGE_OUTPUT_DIR/textcov_reports" +LOGS_DIR="$COVERAGE_OUTPUT_DIR/logs" +REPORT_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report" +REPORT_BY_TARGET_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report_target" +PLATFORM=linux +REPORT_PLATFORM_DIR="$COVERAGE_OUTPUT_DIR/report/$PLATFORM" + +for directory in $DUMPS_DIR $FUZZER_STATS_DIR $LOGS_DIR $REPORT_ROOT_DIR $TEXTCOV_REPORT_DIR\ + $REPORT_PLATFORM_DIR $REPORT_BY_TARGET_ROOT_DIR $FUZZERS_COVERAGE_DUMPS_DIR $MERGED_COVERAGE_DIR; do + rm -rf $directory + mkdir -p $directory +done + +PROFILE_FILE="$DUMPS_DIR/merged.profdata" +SUMMARY_FILE="$REPORT_PLATFORM_DIR/summary.json" +COVERAGE_TARGET_FILE="$FUZZER_STATS_DIR/coverage_targets.txt" + +# Use path mapping, as $SRC directory from the builder is copied into $OUT/$SRC. +PATH_EQUIVALENCE_ARGS="-path-equivalence=/,$OUT" + +# It's important to use $COVERAGE_EXTRA_ARGS as the last argument, because it +# can contain paths to source files / directories which are positional args. +LLVM_COV_COMMON_ARGS="$PATH_EQUIVALENCE_ARGS \ + -ignore-filename-regex=.*src/libfuzzer/.* $COVERAGE_EXTRA_ARGS" + +# Options to extract branch coverage. +BRANCH_COV_ARGS="--show-branches=count --show-expansions" + +# Timeout for running a single fuzz target. +TIMEOUT=1h + +# This will be used by llvm-cov command to generate the actual report. +objects="" + +# Number of CPUs available, this is needed for running tests in parallel. +# Set the max number of parallel jobs to be the CPU count and a max of 10. +NPROC=$(nproc) +MAX_PARALLEL_COUNT=10 + +CORPUS_DIR=${CORPUS_DIR:-"/corpus"} + +function run_fuzz_target { + local target=$1 + + # '%1m' will produce separate dump files for every object. For example, if a + # fuzz target loads a shared library, we will have dumps for both of them. + local profraw_file="$DUMPS_DIR/$target.%1m.profraw" + local profraw_file_mask="$DUMPS_DIR/$target.*.profraw" + local profdata_file="$DUMPS_DIR/$target.profdata" + local corpus_real="$CORPUS_DIR/${target}" + + # -merge=1 requires an output directory, create a new, empty dir for that. + local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}" + rm -rf $corpus_dummy && mkdir -p $corpus_dummy + + # Use -merge=1 instead of -runs=0 because merge is crash resistant and would + # let to get coverage using all corpus files even if there are crash inputs. + # Merge should not introduce any significant overhead compared to -runs=0, + # because (A) corpuses are already minimized; (B) we do not use sancov, and so + # libFuzzer always finishes merge with an empty output dir. + # Use 100s timeout instead of 25s as code coverage builds can be very slow. + local args="-merge=1 -timeout=100 $corpus_dummy $corpus_real" + + export LLVM_PROFILE_FILE=$profraw_file + timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + rm -rf $corpus_dummy + if (( $(du -c $profraw_file_mask | tail -n 1 | cut -f 1) == 0 )); then + # Skip fuzz targets that failed to produce profile dumps. + return 0 + fi + + # If necessary translate to latest profraw version. + if [[ $target == *"@"* ]]; then + # Extract fuzztest binary name from fuzztest wrapper script. + target=(${target//@/ }[0]) + fi + profraw_update.py $OUT/$target -i $profraw_file_mask + llvm-profdata merge -j=1 -sparse $profraw_file_mask -o $profdata_file + + # Delete unnecessary and (potentially) large .profraw files. + rm $profraw_file_mask + + shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$target) + + llvm-cov export -summary-only -instr-profile=$profdata_file -object=$target \ + $shared_libraries $LLVM_COV_COMMON_ARGS > $FUZZER_STATS_DIR/$target.json + + # For introspector. + llvm-cov show -instr-profile=$profdata_file -object=$target -line-coverage-gt=0 $shared_libraries $BRANCH_COV_ARGS $LLVM_COV_COMMON_ARGS > ${TEXTCOV_REPORT_DIR}/$target.covreport +} + +function run_go_fuzz_target { + local target=$1 + + echo "Running go target $target" + export FUZZ_CORPUS_DIR="$CORPUS_DIR/${target}/" + export FUZZ_PROFILE_NAME="$DUMPS_DIR/$target.perf" + + # setup for native go fuzzers + cd $OUT + mkdir -p "testdata/fuzz/${target}" + cp -r "${FUZZ_CORPUS_DIR}" "testdata/fuzz/" + + # rewrite libFuzzer corpus to Std Go corpus if native fuzzing + grep "TestFuzzCorpus" $target > /dev/null 2>&1 && $SYSGOPATH/bin/convertcorpus $target "testdata/fuzz/${target}" + cd - + + timeout $TIMEOUT $OUT/$target -test.coverprofile $DUMPS_DIR/$target.profdata &> $LOGS_DIR/$target.log + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + # cleanup after native go fuzzers + rm -r "${OUT}/testdata/fuzz/${target}" + + # The Go 1.18 fuzzers are renamed to "*_fuzz_.go" during "infra/helper.py build_fuzzers". + # They are are therefore refered to as "*_fuzz_.go" in the profdata files. + # Since the copies named "*_fuzz_.go" do not exist in the file tree during + # the coverage build, we change the references in the .profdata files + # to the original file names. + #sed -i "s/_test.go_fuzz_.go/_test.go/g" $DUMPS_DIR/$target.profdata + # translate from golangish paths to current absolute paths + cat $OUT/$target.gocovpath | while read i; do sed -i $i $DUMPS_DIR/$target.profdata; done + # cf PATH_EQUIVALENCE_ARGS + sed -i 's=/='$OUT'/=' $DUMPS_DIR/$target.profdata + $SYSGOPATH/bin/gocovsum $DUMPS_DIR/$target.profdata > $FUZZER_STATS_DIR/$target.json +} + +function run_python_fuzz_target { + local target=$1 + local zipped_sources="$DUMPS_DIR/$target.deps.zip" + local corpus_real="$CORPUS_DIR/${target}" + # Write dummy stats file + echo "{}" > "$FUZZER_STATS_DIR/$target.json" + + # Run fuzzer + $OUT/$target $corpus_real -atheris_runs=$(ls -la $corpus_real | wc -l) > $LOGS_DIR/$target.log 2>&1 + if (( $? != 0 )); then + echo "Error happened getting coverage of $target" + echo "This is likely because Atheris did not exit gracefully" + cat $LOGS_DIR/$target.log + return 0 + fi + mv .coverage $OUT/.coverage_$target +} + +function run_java_fuzz_target { + local target=$1 + + local exec_file="$DUMPS_DIR/$target.exec" + local class_dump_dir="$DUMPS_DIR/${target}_classes/" + mkdir "$class_dump_dir" + local corpus_real="$CORPUS_DIR/${target}" + + # -merge=1 requires an output directory, create a new, empty dir for that. + local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}" + rm -rf $corpus_dummy && mkdir -p $corpus_dummy + + # Use 100s timeout instead of 25s as code coverage builds can be very slow. + local jacoco_args="destfile=$exec_file,classdumpdir=$class_dump_dir,excludes=com.code_intelligence.jazzer.*\\:com.sun.tools.attach.VirtualMachine" + local args="-merge=1 -timeout=100 --nohooks \ + --additional_jvm_args=-javaagent\\:/opt/jacoco-agent.jar=$jacoco_args \ + $corpus_dummy $corpus_real" + + timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + if (( $(du -c $exec_file | tail -n 1 | cut -f 1) == 0 )); then + # Skip fuzz targets that failed to produce .exec files. + echo "$target failed to produce .exec file." + return 0 + fi + + # Generate XML report only as input to jacoco_report_converter. + # Source files are not needed for the summary. + local xml_report="$DUMPS_DIR/${target}.xml" + local summary_file="$FUZZER_STATS_DIR/$target.json" + java -jar /opt/jacoco-cli.jar report $exec_file \ + --xml $xml_report \ + --classfiles $class_dump_dir + + # Write llvm-cov summary file. + jacoco_report_converter.py $xml_report $summary_file +} + +function run_javascript_fuzz_target { + local target=$1 + local corpus_real="$CORPUS_DIR/${target}" + + # -merge=1 requires an output directory, create a new, empty dir for that. + local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}" + rm -rf $corpus_dummy && mkdir -p $corpus_dummy + + # IstanbulJS currently does not work when the tested program creates + # subprocesses. For this reason, we first minimize the corpus removing + # any crashing inputs so that we can report source-based code coverage + # with a single sweep over the minimized corpus + local merge_args="-merge=1 -timeout=100 $corpus_dummy $corpus_real" + timeout $TIMEOUT $OUT/$target $merge_args &> $LOGS_DIR/$target.log + + # nyc saves the coverage reports in a directory with the default name "coverage" + local coverage_dir="$DUMPS_DIR/coverage_dir_for_${target}" + rm -rf $coverage_dir && mkdir -p $coverage_dir + + local nyc_json_coverage_file="$coverage_dir/coverage-final.json" + local nyc_json_summary_file="$coverage_dir/coverage-summary.json" + + local args="-runs=0 $corpus_dummy" + local jazzerjs_args="--coverage --coverageDirectory $coverage_dir --coverageReporters json --coverageReporters json-summary" + + JAZZERJS_EXTRA_ARGS=$jazzerjs_args $OUT/$target $args &> $LOGS_DIR/$target.log + + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + if [ ! -s $nyc_json_coverage_file ]; then + # Skip fuzz targets that failed to produce coverage-final.json file. + echo "$target failed to produce coverage-final.json file." + return 0 + fi + + cp $nyc_json_coverage_file $FUZZERS_COVERAGE_DUMPS_DIR/$target.json + + local summary_file="$FUZZER_STATS_DIR/$target.json" + + nyc_report_converter.py $nyc_json_summary_file $summary_file +} + +function generate_html { + local profdata=$1 + local shared_libraries=$2 + local objects=$3 + local output_dir=$4 + + rm -rf "$output_dir" + mkdir -p "$output_dir/$PLATFORM" + + local llvm_cov_args="-instr-profile=$profdata $objects $LLVM_COV_COMMON_ARGS" + llvm-cov show -format=html -output-dir=$output_dir -Xdemangler rcfilt $llvm_cov_args + + # Export coverage summary in JSON format. + local summary_file=$output_dir/$PLATFORM/summary.json + + llvm-cov export -summary-only $llvm_cov_args > $summary_file + + coverage_helper -v post_process -src-root-dir=/ -summary-file=$summary_file \ + -output-dir=$output_dir $PATH_EQUIVALENCE_ARGS +} + +export SYSGOPATH=$GOPATH +export GOPATH=$OUT/$GOPATH +# Run each fuzz target, generate raw coverage dumps. +for fuzz_target in $FUZZ_TARGETS; do + # Test if fuzz target is a golang one. + if [[ $FUZZING_LANGUAGE == "go" ]]; then + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "FUZZ_CORPUS_DIR" $fuzz_target > /dev/null 2>&1 || grep "testing\.T" $fuzz_target > /dev/null 2>&1 || continue + fi + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_go_fuzz_target $fuzz_target & + elif [[ $FUZZING_LANGUAGE == "python" ]]; then + echo "Entering python fuzzing" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_python_fuzz_target $fuzz_target + elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue + fi + + echo "Running $fuzz_target" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_java_fuzz_target $fuzz_target & + elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue + fi + + echo "Running $fuzz_target" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_javascript_fuzz_target $fuzz_target & + else + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue + fi + + echo "Running $fuzz_target" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_fuzz_target $fuzz_target & + + # Rewrite object if its a FUZZTEST target + if [[ $fuzz_target == *"@"* ]]; then + # Extract fuzztest binary name from fuzztest wrapper script. + fuzz_target=(${fuzz_target//@/ }[0]) + fi + if [[ -z $objects ]]; then + # The first object needs to be passed without -object= flag. + objects="$fuzz_target" + else + objects="$objects -object=$fuzz_target" + fi + fi + + + # Limit the number of processes to be spawned. + n_child_proc=$(jobs -rp | wc -l) + while [[ "$n_child_proc" -eq "$NPROC" || "$n_child_proc" -gt "$MAX_PARALLEL_COUNT" ]]; do + sleep 4 + n_child_proc=$(jobs -rp | wc -l) + done +done + +# Wait for background processes to finish. +wait + +if [[ $FUZZING_LANGUAGE == "go" ]]; then + echo $DUMPS_DIR + $SYSGOPATH/bin/gocovmerge $DUMPS_DIR/*.profdata > fuzz.cov + gotoolcover -html=fuzz.cov -o $REPORT_ROOT_DIR/index.html + $SYSGOPATH/bin/gocovsum fuzz.cov > $SUMMARY_FILE + cp $REPORT_ROOT_DIR/index.html $REPORT_PLATFORM_DIR/index.html + $SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.cpu.prof + mv merged.data $REPORT_ROOT_DIR/cpu.prof + $SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.heap.prof + mv merged.data $REPORT_ROOT_DIR/heap.prof + #TODO some proxy for go tool pprof -http=127.0.0.1:8001 $DUMPS_DIR/cpu.prof + echo "Finished generating code coverage report for Go fuzz targets." +elif [[ $FUZZING_LANGUAGE == "python" ]]; then + # Extract source files from all dependency zip folders + mkdir -p /pythoncovmergedfiles/medio + PYCOVDIR=/pycovdir/ + mkdir $PYCOVDIR + for fuzzer in $FUZZ_TARGETS; do + fuzzer_deps=${fuzzer}.pkg.deps.zip + unzip $OUT/${fuzzer_deps} + rsync -r ./medio /pythoncovmergedfiles/medio + rm -rf ./medio + + # Translate paths in unzipped folders to paths that we can use + mv $OUT/.coverage_$fuzzer .coverage + python3 /usr/local/bin/python_coverage_runner_help.py translate /pythoncovmergedfiles/medio + cp .new_coverage $PYCOVDIR/.coverage_$fuzzer + cp .new_coverage $OUT/coverage_d_$fuzzer + done + + # Combine coverage + cd $PYCOVDIR + python3 /usr/local/bin/python_coverage_runner_help.py combine .coverage_* + python3 /usr/local/bin/python_coverage_runner_help.py html + # Produce all_cov file used by fuzz introspector. + python3 /usr/local/bin/python_coverage_runner_help.py json -o ${TEXTCOV_REPORT_DIR}/all_cov.json + + # Generate .json with similar format to llvm-cov output. + python3 /usr/local/bin/python_coverage_runner_help.py \ + convert-to-summary-json ${TEXTCOV_REPORT_DIR}/all_cov.json $SUMMARY_FILE + + # Copy coverage date out + cp htmlcov/status.json ${TEXTCOV_REPORT_DIR}/html_status.json + + mv htmlcov/* $REPORT_PLATFORM_DIR/ + mv .coverage_* $REPORT_PLATFORM_DIR/ +elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then + + # From this point on the script does not tolerate any errors. + set -e + + # Merge .exec files from the individual targets. + jacoco_merged_exec=$DUMPS_DIR/jacoco.merged.exec + java -jar /opt/jacoco-cli.jar merge $DUMPS_DIR/*.exec \ + --destfile $jacoco_merged_exec + + # Prepare classes directory for jacoco process + classes_dir=$DUMPS_DIR/classes + mkdir $classes_dir + + # Only copy class files found in $OUT/$SRC to ensure they are + # lively compiled from the project, avoiding inclusion of + # dependency classes. This also includes the fuzzer classes. + find "$OUT/$SRC" -type f -name "*.class" | while read -r class_file; do + # Skip module-info.class + if [[ "$(basename "$class_file")" == "module-info.class" ]]; then + continue + fi + + # Use javap to extract the fully qualified name of the class and copy it to $classes_dir + fqn=$(javap -verbose "$class_file" 2>/dev/null | grep "this_class:" | grep -oP '(?<=// ).*') + if [ -n "$fqn" ]; then + mkdir -p $classes_dir/$(dirname $fqn) + cp $class_file $classes_dir/$fqn.class + fi + done + + # Heuristically determine source directories based on Maven structure. + # Always include the $SRC root as it likely contains the fuzzer sources. + sourcefiles_args=(--sourcefiles $OUT/$SRC) + source_dirs=$(find $OUT/$SRC -type d -name 'java') + for source_dir in $source_dirs; do + sourcefiles_args+=(--sourcefiles "$source_dir") + done + + # Generate HTML and XML reports. + xml_report=$REPORT_PLATFORM_DIR/index.xml + java -jar /opt/jacoco-cli.jar report $jacoco_merged_exec \ + --html $REPORT_PLATFORM_DIR \ + --xml $xml_report \ + --classfiles $classes_dir \ + "${sourcefiles_args[@]}" + + # Also serve the raw exec file and XML report, which can be useful for + # automated analysis. + cp $jacoco_merged_exec $REPORT_PLATFORM_DIR/jacoco.exec + cp $xml_report $REPORT_PLATFORM_DIR/jacoco.xml + cp $xml_report $TEXTCOV_REPORT_DIR/jacoco.xml + + # Write llvm-cov summary file. + jacoco_report_converter.py $xml_report $SUMMARY_FILE + + set +e +elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then + + # From this point on the script does not tolerate any errors. + set -e + + json_report=$MERGED_COVERAGE_DIR/coverage.json + nyc merge $FUZZERS_COVERAGE_DUMPS_DIR $json_report + + nyc report -t $MERGED_COVERAGE_DIR --report-dir $REPORT_PLATFORM_DIR --reporter=html --reporter=json-summary + + nyc_json_summary_file=$REPORT_PLATFORM_DIR/coverage-summary.json + + # Write llvm-cov summary file. + nyc_report_converter.py $nyc_json_summary_file $SUMMARY_FILE + + set +e +else + + # From this point on the script does not tolerate any errors. + set -e + + # Merge all dumps from the individual targets. + rm -f $PROFILE_FILE + llvm-profdata merge -sparse $DUMPS_DIR/*.profdata -o $PROFILE_FILE + + # TODO(mmoroz): add script from Chromium for rendering directory view reports. + # The first path in $objects does not have -object= prefix (llvm-cov format). + shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$objects) + objects="$objects $shared_libraries" + + generate_html $PROFILE_FILE "$shared_libraries" "$objects" "$REPORT_ROOT_DIR" + + # Per target reports. + for fuzz_target in $FUZZ_TARGETS; do + if [[ $fuzz_target == *"@"* ]]; then + profdata_path=$DUMPS_DIR/$fuzz_target.profdata + report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target + # Extract fuzztest binary name from fuzztest wrapper script. + fuzz_target=(${fuzz_target//@/ }[0]) + else + profdata_path=$DUMPS_DIR/$fuzz_target.profdata + report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target + fi + if [[ ! -f "$profdata_path" ]]; then + echo "WARNING: $fuzz_target has no profdata generated." + continue + fi + + generate_html $profdata_path "$shared_libraries" "$fuzz_target" "$report_dir" + done + + set +e +fi + +# Make sure report is readable. +chmod -R +r $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR +find $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR -type d -exec chmod +x {} + + +# HTTP_PORT is optional. +set +u +if [[ -n $HTTP_PORT ]]; then + # Serve the report locally. + echo "Serving the report on http://127.0.0.1:$HTTP_PORT/linux/index.html" + cd $REPORT_ROOT_DIR + python3 -m http.server $HTTP_PORT +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper new file mode 100644 index 0000000000000000000000000000000000000000..4d29ceac8f5d048a43a7ec6cc0524d7a23936781 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper @@ -0,0 +1,17 @@ +#!/bin/bash -u +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +python3 $CODE_COVERAGE_SRC/coverage_utils.py $@ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus new file mode 100644 index 0000000000000000000000000000000000000000..1b7ebe8a263b5c3410dcbbd5b8e861c5cf134d4a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus @@ -0,0 +1,30 @@ +#!/bin/bash -u +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +if (( $# < 1 )); then + echo "Usage: $0 \"path_download_to url_download_from\" (can be repeated)" >&2 + exit 1 +fi + +for pair in "$@"; do + read path url <<< "$pair" + wget -q -O $path $url +done + +# Always exit with 0 as we do not track wget return codes and should not rely +# on the latest command execution. +exit 0 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py new file mode 100644 index 0000000000000000000000000000000000000000..3f9fc22230c71a5c7f01068a871d44942489aad0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py @@ -0,0 +1,228 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Script for generating differential coverage reports. +generate_differential_cov_report.py \ + +""" +import os +import shutil +import subprocess +import sys + + +class ProfData: + """Class representing a profdata file.""" + + def __init__(self, text): + self.function_profs = [] + for function_prof in text.split('\n\n'): + if not function_prof: + continue + self.function_profs.append(FunctionProf(function_prof)) + + def to_string(self): + """Convert back to a string.""" + return '\n'.join( + [function_prof.to_string() for function_prof in self.function_profs]) + + def find_function(self, function, idx=None): + """Find the same function in this profdata.""" + if idx is not None: + try: + possibility = self.function_profs[idx] + if function.func_hash == possibility.func_hash: + return possibility + except IndexError: + pass + for function_prof in self.function_profs: + if function_prof.func_hash == function.func_hash: + return function_prof + return None + + def subtract(self, subtrahend): + """Subtract subtrahend from this profdata.""" + for idx, function_prof in enumerate(self.function_profs): + subtrahend_function_prof = subtrahend.find_function(function_prof, idx) + function_prof.subtract(subtrahend_function_prof) + + +class FunctionProf: + """Profile of a function.""" + FUNC_HASH_COMMENT_LINE = '# Func Hash:' + NUM_COUNTERS_COMMENT_LINE = '# Num Counters:' + COUNTER_VALUES_COMMENT_LINE = '# Counter Values:' + + def __init__(self, text): + print(text) + lines = text.splitlines() + self.function = lines[0] + assert self.FUNC_HASH_COMMENT_LINE == lines[1] + self.func_hash = lines[2] + assert self.NUM_COUNTERS_COMMENT_LINE == lines[3] + self.num_counters = int(lines[4]) + assert self.COUNTER_VALUES_COMMENT_LINE == lines[5] + self.counter_values = [1 if int(line) else 0 for line in lines[6:]] + + def to_string(self): + """Convert back to text.""" + lines = [ + self.function, + self.FUNC_HASH_COMMENT_LINE, + self.func_hash, + self.NUM_COUNTERS_COMMENT_LINE, + str(self.num_counters), + self.COUNTER_VALUES_COMMENT_LINE, + ] + [str(num) for num in self.counter_values] + return '\n'.join(lines) + + def subtract(self, subtrahend_prof): + """Subtract this other function from this function.""" + if not subtrahend_prof: + print(self.function, 'has no subtrahend') + # Nothing to subtract. + return + self.counter_values = [ + max(counter1 - counter2, 0) for counter1, counter2 in zip( + self.counter_values, subtrahend_prof.counter_values) + ] + + +def get_profdata_files(directory): + """Returns profdata files in |directory|.""" + profdatas = [] + for filename in os.listdir(directory): + filename = os.path.join(directory, filename) + if filename.endswith('.profdata'): + profdatas.append(filename) + return profdatas + + +def convert_profdata_to_text(profdata): + """Convert a profdata binary file to a profdata text file.""" + profdata_text = f'{profdata}.txt' + if os.path.exists(profdata_text): + os.remove(profdata_text) + command = [ + 'llvm-profdata', 'merge', '-j=1', '-sparse', profdata, '--text', '-o', + profdata_text + ] + print(command) + subprocess.run(command, check=True) + return profdata_text + + +def convert_text_profdata_to_bin(profdata_text): + """Convert a profdata text file to a profdata binary file.""" + profdata = profdata_text.replace('.txt', '').replace('.profdata', + '') + '.profdata' + print('bin profdata', profdata) + if os.path.exists(profdata): + os.remove(profdata) + command = [ + 'llvm-profdata', 'merge', '-j=1', '-sparse', profdata_text, '-o', profdata + ] + print(command) + subprocess.run(command, check=True) + return profdata + + +def get_difference(minuend_filename, subtrahend_filename): + """Subtract subtrahend_filename from minuend_filename.""" + with open(minuend_filename, 'r', encoding='utf-8') as minuend_file: + print('minuend', minuend_filename) + minuend = ProfData(minuend_file.read()) + with open(subtrahend_filename, 'r', encoding='utf-8') as subtrahend_file: + print('subtrahend', subtrahend_filename) + subtrahend = ProfData(subtrahend_file.read()) + + minuend.subtract(subtrahend) + return minuend + + +def profdatas_to_objects(profdatas): + """Get the corresponding objects for each profdata.""" + return [ + os.path.splitext(os.path.basename(profdata))[0] for profdata in profdatas + ] + + +def generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas, + difference_dir): + """Calculate the differences between all profdatas and generate differential + coverage reports.""" + profdata_objects = profdatas_to_objects(minuend_profdatas) + real_profdata_objects = [ + binobject for binobject in profdata_objects if binobject != 'merged' + ] + for minuend, subtrahend, binobject in zip(minuend_profdatas, + subtrahend_profdatas, + profdata_objects): + minuend_text = convert_profdata_to_text(minuend) + subtrahend_text = convert_profdata_to_text(subtrahend) + difference = get_difference(minuend_text, subtrahend_text) + basename = os.path.basename(minuend_text) + difference_text = os.path.join(difference_dir, basename) + with open(difference_text, 'w', encoding='utf-8') as file_handle: + file_handle.write(difference.to_string()) + difference_profdata = convert_text_profdata_to_bin(difference_text) + if not difference_profdata.endswith('merged.profdata'): + generate_html_report(difference_profdata, [binobject], + os.path.join(difference_dir, binobject)) + else: + generate_html_report(difference_profdata, real_profdata_objects, + os.path.join(difference_dir, 'merged')) + + +def generate_html_report(profdata, objects, directory): + """Generate an HTML coverage report.""" + # TODO(metzman): Deal with shared libs. + html_dir = os.path.join(directory, 'reports') + if os.path.exists(html_dir): + os.remove(html_dir) + os.makedirs(html_dir) + out_dir = os.getenv('OUT', '/out') + command = [ + 'llvm-cov', 'show', f'-path-equivalence=/,{out_dir}', '-format=html', + '-Xdemangler', 'rcfilt', f'-instr-profile={profdata}' + ] + + objects = [os.path.join(out_dir, binobject) for binobject in objects] + command += objects + ['-o', html_dir] + print(' '.join(command)) + subprocess.run(command, check=True) + + +def main(): + """Generate differential coverage reports.""" + if len(sys.argv) != 4: + print( + f'Usage: {sys.argv[0]} ') + minuend_dir = sys.argv[1] + subtrahend_dir = sys.argv[2] + difference_dir = sys.argv[3] + if os.path.exists(difference_dir): + shutil.rmtree(difference_dir) + os.makedirs(difference_dir, exist_ok=True) + minuend_profdatas = get_profdata_files(minuend_dir) + subtrahend_profdatas = get_profdata_files(subtrahend_dir) + generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas, + difference_dir) + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/go.mod b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/go.mod new file mode 100644 index 0000000000000000000000000000000000000000..c82e4565f29aa96933c22df8ffdd37af57f98eed --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/go.mod @@ -0,0 +1,10 @@ +module oss-fuzz.com/gocoverage/convertcorpus + +go 1.19 + +require github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20221110144148-3ffc89b74f84 + +require ( + github.com/AdaLogics/go-fuzz-headers v0.0.0-20220824214621-3c06a36a6952 // indirect + github.com/cyphar/filepath-securejoin v0.2.3 // indirect +) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/go.sum b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/go.sum new file mode 100644 index 0000000000000000000000000000000000000000..7b7d20e202530161b2ca1cee6f36a56e8cc4b422 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/go.sum @@ -0,0 +1,24 @@ +github.com/AdaLogics/go-fuzz-headers v0.0.0-20220824214621-3c06a36a6952 h1:cs1LC1MGKD1O4neR89Rc24t0u15Vs5ASfUQ2tLr/KbY= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20220824214621-3c06a36a6952/go.mod h1:i9fr2JpcEcY/IHEvzCM3qXUZYOQHgR89dt4es1CgMhc= +github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20221110144148-3ffc89b74f84 h1:a0NR83n+t4XyUh32ifxu6XsmeLMKyOx5Lxub9IeBM7k= +github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20221110144148-3ffc89b74f84/go.mod h1:pXIs8t4wo19ehhsffZsAZxSQ+oPUF41iiDrUaIDWKFU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyphar/filepath-securejoin v0.2.3 h1:YX6ebbZCZP7VkM3scTTokDgBL2TY741X51MTk3ycuNI= +github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/main.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/main.go new file mode 100644 index 0000000000000000000000000000000000000000..43e2c7ea7dd0042307f86213ecb46c4928cb6c51 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/convertcorpus/main.go @@ -0,0 +1,54 @@ +package main + +import ( + "fmt" + "log" + "os" + "path/filepath" + + "github.com/AdamKorcz/go-118-fuzz-build/coverage" +) + +// reads all corpus files in a directory and converts +// them from libFuzzer format to native Go format. +func main() { + if len(os.Args) != 3 { + fmt.Println(os.Args) + log.Fatalf("need exactly two argument") + } + FUZZERNAME := os.Args[1] + CORPUS_PATH := os.Args[2] + + filepath.Walk(CORPUS_PATH, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + if !info.Mode().IsRegular() { + return nil + } + libFuzzerSeed, err := os.ReadFile(path) + if err != nil { + panic(err) + } + out := os.Getenv("OUT") + fuzzerContents, err := os.ReadFile(filepath.Join(out, "rawfuzzers", FUZZERNAME)) + if err != nil { + panic(err) + } + goSeed := coverage.ConvertLibfuzzerSeedToGoSeed(fuzzerContents, libFuzzerSeed, FUZZERNAME) + err = os.Remove(path) + if err != nil { + panic(err) + } + f, err := os.Create(path) + if err != nil { + panic(err) + } + defer f.Close() + _, err = f.Write([]byte(goSeed)) + if err != nil { + panic(err) + } + return nil + }) +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/go.mod b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/go.mod new file mode 100644 index 0000000000000000000000000000000000000000..b0b57216ea3083a5451fb862138bebba90b81f80 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/go.mod @@ -0,0 +1,8 @@ +module oss-fuzz.com/gocoverage + +go 1.14 + +require ( + github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5 + golang.org/x/tools v0.1.0 +) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/go.sum b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/go.sum new file mode 100644 index 0000000000000000000000000000000000000000..3279af3ba87d8613c26f6353bdbf8b2310affe13 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/go.sum @@ -0,0 +1,30 @@ +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5 h1:zIaiqGYDQwa4HVx5wGRTXbx38Pqxjemn4BP98wpzpXo= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovmerge/LICENSE b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovmerge/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..455fb10875ed00a441170736f454c383a80ad7b1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovmerge/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2015, Wade Simmons +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovmerge/gocovmerge.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovmerge/gocovmerge.go new file mode 100644 index 0000000000000000000000000000000000000000..e8099839e13aa6ba5504bf77af9969fc48b30d29 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovmerge/gocovmerge.go @@ -0,0 +1,111 @@ +// gocovmerge takes the results from multiple `go test -coverprofile` runs and +// merges them into one profile +package main + +import ( + "flag" + "fmt" + "io" + "log" + "os" + "sort" + + "golang.org/x/tools/cover" +) + +func mergeProfiles(p *cover.Profile, merge *cover.Profile) { + if p.Mode != merge.Mode { + log.Fatalf("cannot merge profiles with different modes") + } + // Since the blocks are sorted, we can keep track of where the last block + // was inserted and only look at the blocks after that as targets for merge + startIndex := 0 + for _, b := range merge.Blocks { + startIndex = mergeProfileBlock(p, b, startIndex) + } +} + +func mergeProfileBlock(p *cover.Profile, pb cover.ProfileBlock, startIndex int) int { + sortFunc := func(i int) bool { + pi := p.Blocks[i+startIndex] + return pi.StartLine >= pb.StartLine && (pi.StartLine != pb.StartLine || pi.StartCol >= pb.StartCol) + } + + i := 0 + if sortFunc(i) != true { + i = sort.Search(len(p.Blocks)-startIndex, sortFunc) + } + i += startIndex + if i < len(p.Blocks) && p.Blocks[i].StartLine == pb.StartLine && p.Blocks[i].StartCol == pb.StartCol { + if p.Blocks[i].EndLine != pb.EndLine || p.Blocks[i].EndCol != pb.EndCol { + log.Fatalf("OVERLAP MERGE: %v %v %v", p.FileName, p.Blocks[i], pb) + } + switch p.Mode { + case "set": + p.Blocks[i].Count |= pb.Count + case "count", "atomic": + p.Blocks[i].Count += pb.Count + default: + log.Fatalf("unsupported covermode: '%s'", p.Mode) + } + } else { + if i > 0 { + pa := p.Blocks[i-1] + if pa.EndLine >= pb.EndLine && (pa.EndLine != pb.EndLine || pa.EndCol > pb.EndCol) { + log.Fatalf("OVERLAP BEFORE: %v %v %v", p.FileName, pa, pb) + } + } + if i < len(p.Blocks)-1 { + pa := p.Blocks[i+1] + if pa.StartLine <= pb.StartLine && (pa.StartLine != pb.StartLine || pa.StartCol < pb.StartCol) { + log.Fatalf("OVERLAP AFTER: %v %v %v", p.FileName, pa, pb) + } + } + p.Blocks = append(p.Blocks, cover.ProfileBlock{}) + copy(p.Blocks[i+1:], p.Blocks[i:]) + p.Blocks[i] = pb + } + return i + 1 +} + +func addProfile(profiles []*cover.Profile, p *cover.Profile) []*cover.Profile { + i := sort.Search(len(profiles), func(i int) bool { return profiles[i].FileName >= p.FileName }) + if i < len(profiles) && profiles[i].FileName == p.FileName { + mergeProfiles(profiles[i], p) + } else { + profiles = append(profiles, nil) + copy(profiles[i+1:], profiles[i:]) + profiles[i] = p + } + return profiles +} + +func dumpProfiles(profiles []*cover.Profile, out io.Writer) { + if len(profiles) == 0 { + return + } + fmt.Fprintf(out, "mode: %s\n", profiles[0].Mode) + for _, p := range profiles { + for _, b := range p.Blocks { + fmt.Fprintf(out, "%s:%d.%d,%d.%d %d %d\n", p.FileName, b.StartLine, b.StartCol, b.EndLine, b.EndCol, b.NumStmt, b.Count) + } + } +} + +func main() { + flag.Parse() + + var merged []*cover.Profile + + for _, file := range flag.Args() { + profiles, err := cover.ParseProfiles(file) + if err != nil { + log.Fatalf("failed to parse profiles: %v", err) + } + for _, p := range profiles { + merged = addProfile(merged, p) + } + } + + dumpProfiles(merged, os.Stdout) +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovsum/gocovsum.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovsum/gocovsum.go new file mode 100644 index 0000000000000000000000000000000000000000..660c4d13a364e073e4cad39d542fe56cd611b3e9 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/gocovsum/gocovsum.go @@ -0,0 +1,171 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "encoding/json" + "flag" + "fmt" + "log" + + "go/ast" + "go/parser" + "go/token" + + "golang.org/x/tools/cover" +) + +type CoverageTotal struct { + Count int `json:"count"` + Covered int `json:"covered"` + Uncovered int `json:"notcovered"` + Percent float64 `json:"percent"` +} + +type CoverageTotals struct { + Functions CoverageTotal `json:"functions,omitempty"` + Lines CoverageTotal `json:"lines,omitempty"` + Regions CoverageTotal `json:"regions,omitempty"` + Instantiations CoverageTotal `json:"instantiations,omitempty"` + Branches CoverageTotal `json:"branches,omitempty"` +} + +type CoverageFile struct { + Summary CoverageTotals `json:"summary,omitempty"` + Filename string `json:"filename,omitempty"` +} + +type CoverageData struct { + Totals CoverageTotals `json:"totals,omitempty"` + Files []CoverageFile `json:"files,omitempty"` +} + +type PositionInterval struct { + start token.Position + end token.Position +} + +type CoverageSummary struct { + Data []CoverageData `json:"data,omitempty"` + Type string `json:"type,omitempty"` + Version string `json:"version,omitempty"` +} + +func isFunctionCovered(s token.Position, e token.Position, blocks []cover.ProfileBlock) bool { + for _, b := range blocks { + if b.StartLine >= s.Line && b.StartLine <= e.Line && b.EndLine >= s.Line && b.EndLine <= e.Line { + if b.Count > 0 { + return true + } + } + } + return false +} + +func computePercent(s *CoverageTotals) { + if s.Regions.Count > 0 { + s.Regions.Percent = float64(100*s.Regions.Covered) / float64(s.Regions.Count) + } + if s.Lines.Count > 0 { + s.Lines.Percent = float64(100*s.Lines.Covered) / float64(s.Lines.Count) + } + if s.Functions.Count > 0 { + s.Functions.Percent = float64(100*s.Functions.Covered) / float64(s.Functions.Count) + } +} + +func main() { + flag.Parse() + + if len(flag.Args()) != 1 { + log.Fatalf("needs exactly one argument") + } + profiles, err := cover.ParseProfiles(flag.Args()[0]) + if err != nil { + log.Fatalf("failed to parse profiles: %v", err) + } + r := CoverageSummary{} + r.Type = "oss-fuzz.go.coverage.json.export" + r.Version = "2.0.1" + r.Data = make([]CoverageData, 1) + for _, p := range profiles { + fset := token.NewFileSet() // positions are relative to fset + f, err := parser.ParseFile(fset, p.FileName, nil, 0) + if err != nil { + log.Printf("failed to parse go file: %v", err) + continue + } + fileCov := CoverageFile{} + fileCov.Filename = p.FileName + ast.Inspect(f, func(n ast.Node) bool { + switch x := n.(type) { + case *ast.FuncLit: + startf := fset.Position(x.Pos()) + endf := fset.Position(x.End()) + fileCov.Summary.Functions.Count++ + if isFunctionCovered(startf, endf, p.Blocks) { + fileCov.Summary.Functions.Covered++ + } else { + fileCov.Summary.Functions.Uncovered++ + } + case *ast.FuncDecl: + startf := fset.Position(x.Pos()) + endf := fset.Position(x.End()) + fileCov.Summary.Functions.Count++ + if isFunctionCovered(startf, endf, p.Blocks) { + fileCov.Summary.Functions.Covered++ + } else { + fileCov.Summary.Functions.Uncovered++ + } + } + return true + }) + + for _, b := range p.Blocks { + fileCov.Summary.Regions.Count++ + if b.Count > 0 { + fileCov.Summary.Regions.Covered++ + } else { + fileCov.Summary.Regions.Uncovered++ + } + + fileCov.Summary.Lines.Count += b.NumStmt + if b.Count > 0 { + fileCov.Summary.Lines.Covered += b.NumStmt + } else { + fileCov.Summary.Lines.Uncovered += b.NumStmt + } + } + r.Data[0].Totals.Regions.Count += fileCov.Summary.Regions.Count + r.Data[0].Totals.Regions.Covered += fileCov.Summary.Regions.Covered + r.Data[0].Totals.Regions.Uncovered += fileCov.Summary.Regions.Uncovered + r.Data[0].Totals.Lines.Count += fileCov.Summary.Lines.Count + r.Data[0].Totals.Lines.Covered += fileCov.Summary.Lines.Covered + r.Data[0].Totals.Lines.Uncovered += fileCov.Summary.Lines.Uncovered + r.Data[0].Totals.Functions.Count += fileCov.Summary.Functions.Count + r.Data[0].Totals.Functions.Covered += fileCov.Summary.Functions.Covered + r.Data[0].Totals.Functions.Uncovered += fileCov.Summary.Functions.Uncovered + + computePercent(&fileCov.Summary) + r.Data[0].Files = append(r.Data[0].Files, fileCov) + } + + computePercent(&r.Data[0].Totals) + o, err := json.Marshal(r) + if err != nil { + log.Fatalf("failed to generate json: %v", err) + } + fmt.Printf(string(o)) +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/pprof-merge/LICENSE b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/pprof-merge/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..8dada3edaf50dbc082c9a125058f25def75e625a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/pprof-merge/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/pprof-merge/main.go b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/pprof-merge/main.go new file mode 100644 index 0000000000000000000000000000000000000000..f351564034df276c774fb7a58b0a9c481e700b42 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/gocoverage/pprof-merge/main.go @@ -0,0 +1,68 @@ +// Copyright 2019 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "flag" + "log" + "os" + + "github.com/google/pprof/profile" +) + +var ( + output string +) + +func main() { + flag.StringVar(&output, "o", "merged.data", "") + flag.Parse() + + files := os.Args[1:] + if len(files) == 0 { + log.Fatal("Give profiles files as arguments") + } + + var profiles []*profile.Profile + for _, fname := range files { + f, err := os.Open(fname) + if err != nil { + log.Fatalf("Cannot open profile file at %q: %v", fname, err) + } + p, err := profile.Parse(f) + if err != nil { + log.Fatalf("Cannot parse profile at %q: %v", fname, err) + } + profiles = append(profiles, p) + } + + merged, err := profile.Merge(profiles) + if err != nil { + log.Fatalf("Cannot merge profiles: %v", err) + } + + out, err := os.OpenFile(output, os.O_RDWR|os.O_CREATE, 0755) + if err != nil { + log.Fatalf("Cannot open output to write: %v", err) + } + + if err := merged.Write(out); err != nil { + log.Fatalf("Cannot write merged profile to file: %v", err) + } + + if err := out.Close(); err != nil { + log.Printf("Error when closing the output file: %v", err) + } +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh new file mode 100644 index 0000000000000000000000000000000000000000..fc0569b339add75ce09c0772fbaa938a4aca0db1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh @@ -0,0 +1,37 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install dependencies in a platform-aware way. + +apt-get update && apt-get install -y \ + binutils \ + file \ + ca-certificates \ + fonts-dejavu \ + git \ + libcap2 \ + rsync \ + unzip \ + wget \ + zip --no-install-recommends + +case $(uname -m) in + x86_64) + # We only need to worry about i386 if we are on x86_64. + apt-get install -y lib32gcc1 libc6-i386 + ;; +esac diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh new file mode 100644 index 0000000000000000000000000000000000000000..4d6066b0f0a8c868d34099204b18c9ce140f09e1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh @@ -0,0 +1,41 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install go on x86_64, don't do anything on ARM. + +case $(uname -m) in + x86_64) + # Download and install Go 1.19. + wget -q https://storage.googleapis.com/golang/getgo/installer_linux -O $SRC/installer_linux + chmod +x $SRC/installer_linux + SHELL="bash" $SRC/installer_linux -version 1.19 + rm $SRC/installer_linux + # Set up Golang coverage modules. + printf $(find . -name gocoverage) + cd $GOPATH/gocoverage && /root/.go/bin/go install ./... + cd convertcorpus && /root/.go/bin/go install . + cd /root/.go/src/cmd/cover && /root/.go/bin/go build && mv cover $GOPATH/bin/gotoolcover + ;; + aarch64) + # Don't install go because installer is not provided. + echo "Not installing go: aarch64." + ;; + *) + echo "Error: unsupported architecture: $(uname -m)" + exit 1 + ;; +esac diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh new file mode 100644 index 0000000000000000000000000000000000000000..1cf3855265031837b0d64058de3e7742d9383934 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh @@ -0,0 +1,46 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install java in a platform-aware way. + +ARCHITECTURE= +case $(uname -m) in + x86_64) + ARCHITECTURE=x64 + ;; + aarch64) + ARCHITECTURE=aarch64 + ;; + *) + echo "Error: unsupported architecture: $(uname -m)" + exit 1 + ;; +esac + +wget -q https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +wget -q https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +cd /tmp +mkdir -p $JAVA_HOME +tar -xz --strip-components=1 -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_HOME +rm -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip + +# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15. +mkdir -p $JAVA_15_HOME +tar -xz --strip-components=1 -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_15_HOME +rm -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh new file mode 100644 index 0000000000000000000000000000000000000000..183cdb418569b281b83eb1130038ea47dd219020 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh @@ -0,0 +1,30 @@ +#!/bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +# see installation instructions: https://github.com/nodesource/distributions#available-architectures +apt-get update +apt-get install -y ca-certificates curl gnupg +mkdir -p /etc/apt/keyrings +curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg + +NODE_MAJOR=20 +echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list + +apt-get update +apt-get install nodejs -y + +# Install latest versions of nyc for source-based coverage reporting +npm install --global nyc diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py new file mode 100644 index 0000000000000000000000000000000000000000..bf9d89faf3a10fb1e7c3e4f7eb49538567bb8a00 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for creating an llvm-cov style JSON summary from a JaCoCo XML +report.""" +import json +import os +import sys +import xml.etree.ElementTree as ET + + +def convert(xml): + """Turns a JaCoCo XML report into an llvm-cov JSON summary.""" + summary = { + 'type': 'oss-fuzz.java.coverage.json.export', + 'version': '1.0.0', + 'data': [{ + 'totals': {}, + 'files': [], + }], + } + + report = ET.fromstring(xml) + totals = make_element_summary(report) + summary['data'][0]['totals'] = totals + + # Since Java compilation does not track source file location, we match + # coverage info to source files via the full class name, e.g. we search for + # a path in /out/src ending in foo/bar/Baz.java for the class foo.bar.Baz. + # Under the assumptions that a given project only ever contains a single + # version of a class and that no class name appears as a suffix of another + # class name, we can assign coverage info to every source file matched in that + # way. + src_files = list_src_files() + + for class_element in report.findall('./package/class'): + # Skip fuzzer classes + if is_fuzzer_class(class_element): + continue + + # Skip non class elements + if 'sourcefilename' not in class_element.attrib: + continue + + class_name = class_element.attrib['name'] + package_name = os.path.dirname(class_name) + basename = class_element.attrib['sourcefilename'] + # This path is 'foo/Bar.java' for the class element + # . + canonical_path = os.path.join(package_name, basename) + + class_summary = make_element_summary(class_element) + for src_file in relative_to_src_path(src_files, canonical_path): + summary['data'][0]['files'].append({ + 'filename': src_file, + 'summary': class_summary, + }) + + return json.dumps(summary) + + +def list_src_files(): + """Returns a map from basename to full path for all files in $OUT/$SRC.""" + filename_to_paths = {} + out_path = os.environ['OUT'] + '/' + src_path = os.environ['SRC'] + src_in_out = out_path + src_path + for dirpath, _, filenames in os.walk(src_in_out): + for filename in filenames: + full_path = dirpath + '/' + filename + # Map /out//src/... to /src/... + file_path = full_path[len(out_path):] + filename_to_paths.setdefault(filename, []).append(file_path) + return filename_to_paths + + +def is_fuzzer_class(class_element): + """Check if the class is fuzzer class.""" + method_elements = class_element.find('./method[@name=\"fuzzerTestOneInput\"]') + if method_elements: + return True + + return False + + +def relative_to_src_path(src_files, canonical_path): + """Returns all paths in src_files ending in canonical_path.""" + basename = os.path.basename(canonical_path) + if basename not in src_files: + return [] + candidate_paths = src_files[basename] + return [ + path for path in candidate_paths if path.endswith("/" + canonical_path) + ] + + +def make_element_summary(element): + """Returns a coverage summary for an element in the XML report.""" + summary = {} + + function_counter = element.find('./counter[@type=\'METHOD\']') + summary['functions'] = make_counter_summary(function_counter) + + line_counter = element.find('./counter[@type=\'LINE\']') + summary['lines'] = make_counter_summary(line_counter) + + # JaCoCo tracks branch coverage, which counts the covered control-flow edges + # between llvm-cov's regions instead of the covered regions themselves. For + # non-trivial code parts, the difference is usually negligible. However, if + # all methods of a class consist of a single region only (no branches), + # JaCoCo does not report any branch coverage even if there is instruction + # coverage. Since this would give incorrect results for CI Fuzz purposes, we + # increase the regions counter by 1 if there is any amount of instruction + # coverage. + instruction_counter = element.find('./counter[@type=\'INSTRUCTION\']') + has_some_coverage = instruction_counter is not None and int( + instruction_counter.attrib["covered"]) > 0 + branch_covered_adjustment = 1 if has_some_coverage else 0 + region_counter = element.find('./counter[@type=\'BRANCH\']') + summary['regions'] = make_counter_summary( + region_counter, covered_adjustment=branch_covered_adjustment) + + return summary + + +def make_counter_summary(counter_element, covered_adjustment=0): + """Turns a JaCoCo element into an llvm-cov totals entry.""" + summary = {} + covered = covered_adjustment + missed = 0 + if counter_element is not None: + covered += int(counter_element.attrib['covered']) + missed += int(counter_element.attrib['missed']) + summary['covered'] = covered + summary['notcovered'] = missed + summary['count'] = summary['covered'] + summary['notcovered'] + if summary['count'] != 0: + summary['percent'] = (100.0 * summary['covered']) / summary['count'] + else: + summary['percent'] = 0 + return summary + + +def main(): + """Produces an llvm-cov style JSON summary from a JaCoCo XML report.""" + if len(sys.argv) != 3: + sys.stderr.write('Usage: %s \n' % + sys.argv[0]) + return 1 + + with open(sys.argv[1], 'r') as xml_file: + xml_report = xml_file.read() + json_summary = convert(xml_report) + with open(sys.argv[2], 'w') as json_file: + json_file.write(json_summary) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py new file mode 100644 index 0000000000000000000000000000000000000000..53044754c26b2a74a6b7afa7f5f22b52881767d1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for creating a llvm-cov style JSON summary from a nyc +JSON summary.""" +import json +import sys + + +def convert(nyc_json_summary): + """Turns a nyc JSON report into a llvm-cov JSON summary.""" + summary = { + 'type': + 'oss-fuzz.javascript.coverage.json.export', + 'version': + '1.0.0', + 'data': [{ + 'totals': + file_summary(nyc_json_summary['total']), + 'files': [{ + 'filename': src_file, + 'summary': file_summary(nyc_json_summary[src_file]) + } for src_file in nyc_json_summary if src_file != 'total'], + }], + } + + return json.dumps(summary) + + +def file_summary(nyc_file_summary): + """Returns a summary for a given file in the nyc JSON summary report.""" + return { + 'functions': element_summary(nyc_file_summary['functions']), + 'lines': element_summary(nyc_file_summary['lines']), + 'regions': element_summary(nyc_file_summary['branches']) + } + + +def element_summary(element): + """Returns a summary of a coverage element in the nyc JSON summary + of the file""" + return { + 'count': element['total'], + 'covered': element['covered'], + 'notcovered': element['total'] - element['covered'] - element['skipped'], + 'percent': element['pct'] if element['pct'] != 'Unknown' else 0 + } + + +def main(): + """Produces a llvm-cov style JSON summary from a nyc JSON summary.""" + if len(sys.argv) != 3: + sys.stderr.write('Usage: %s \n' % + sys.argv[0]) + return 1 + + with open(sys.argv[1], 'r') as nyc_json_summary_file: + nyc_json_summary = json.load(nyc_json_summary_file) + json_summary = convert(nyc_json_summary) + with open(sys.argv[2], 'w') as json_output_file: + json_output_file.write(json_summary) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py new file mode 100644 index 0000000000000000000000000000000000000000..6612a309587f82269f9af5f00d178940d6932e06 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for parsing custom fuzzing options.""" +import configparser +import sys + + +def parse_options(options_file_path, options_section): + """Parses the given file and returns options from the given section.""" + parser = configparser.ConfigParser() + parser.read(options_file_path) + + if not parser.has_section(options_section): + return None + + options = parser[options_section] + + if options_section == 'libfuzzer': + options_string = ' '.join( + '-%s=%s' % (key, value) for key, value in options.items()) + else: + # Sanitizer options. + options_string = ':'.join( + '%s=%s' % (key, value) for key, value in options.items()) + + return options_string + + +def main(): + """Processes the arguments and prints the options in the correct format.""" + if len(sys.argv) < 3: + sys.stderr.write('Usage: %s \n' % + sys.argv[0]) + return 1 + + options = parse_options(sys.argv[1], sys.argv[2]) + if options is not None: + print(options) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py new file mode 100644 index 0000000000000000000000000000000000000000..d1ce1fd86396ed02a03fbeb6a54de1d595c2855b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for upgrading a profraw file to latest version.""" + +from collections import namedtuple +import struct +import subprocess +import sys + +HeaderGeneric = namedtuple('HeaderGeneric', 'magic version') +HeaderVersion9 = namedtuple( + 'HeaderVersion9', + 'BinaryIdsSize DataSize PaddingBytesBeforeCounters CountersSize \ + PaddingBytesAfterCounters NumBitmapBytes PaddingBytesAfterBitmapBytes NamesSize CountersDelta BitmapDelta NamesDelta ValueKindLast' +) + +PROFRAW_MAGIC = 0xff6c70726f667281 + + +def relativize_address(data, offset, databegin, sect_prf_cnts, sect_prf_data): + """Turns an absolute offset into a relative one.""" + value = struct.unpack('Q', data[offset:offset + 8])[0] + if sect_prf_cnts <= value < sect_prf_data: + # If the value is an address in the right section, make it relative. + value = (value - databegin) & 0xffffffffffffffff + value = struct.pack('Q', value) + for i in range(8): + data[offset + i] = value[i] + # address was made relative + return True + # no changes done + return False + + +def upgrade(data, sect_prf_cnts, sect_prf_data): + """Upgrades profraw data, knowing the sections addresses.""" + generic_header = HeaderGeneric._make(struct.unpack('QQ', data[:16])) + if generic_header.magic != PROFRAW_MAGIC: + raise Exception('Bad magic.') + base_version = generic_header.version + + if base_version >= 9: + # Nothing to do. + return data + if base_version < 5 or base_version == 6: + raise Exception('Unhandled version.') + + if generic_header.version == 5: + generic_header = generic_header._replace(version=7) + # Upgrade from version 5 to 7 by adding binaryids field. + data = data[:8] + struct.pack('Q', generic_header.version) + struct.pack( + 'Q', 0) + data[16:] + if generic_header.version == 7: + # cf https://reviews.llvm.org/D111123 + generic_header = generic_header._replace(version=8) + data = data[:8] + struct.pack('Q', generic_header.version) + data[16:] + if generic_header.version == 8: + # see https://reviews.llvm.org/D138846 + generic_header = generic_header._replace(version=9) + # Upgrade from version 8 to 9 by adding NumBitmapBytes, PaddingBytesAfterBitmapBytes and BitmapDelta fields. + data = data[:8] + struct.pack( + 'Q', generic_header.version) + data[16:56] + struct.pack( + 'QQ', 0, 0) + data[56:72] + struct.pack('Q', 0) + data[72:] + + v9_header = HeaderVersion9._make(struct.unpack('QQQQQQQQQQQQ', data[16:112])) + + if base_version <= 8 and v9_header.BinaryIdsSize % 8 != 0: + # Adds padding for binary ids. + # cf commit b9f547e8e51182d32f1912f97a3e53f4899ea6be + # cf https://reviews.llvm.org/D110365 + padlen = 8 - (v9_header.BinaryIdsSize % 8) + v7_header = v9_header._replace(BinaryIdsSize=v9_header.BinaryIdsSize + + padlen) + data = data[:16] + struct.pack('Q', v9_header.BinaryIdsSize) + data[24:] + data = data[:112 + v9_header.BinaryIdsSize] + bytes( + padlen) + data[112 + v9_header.BinaryIdsSize:] + + if base_version <= 8: + offset = 112 + v9_header.BinaryIdsSize + for d in range(v9_header.DataSize): + # Add BitmapPtr and aligned u32(NumBitmapBytes) + data = data[:offset + 3 * 8] + struct.pack( + 'Q', 0) + data[offset + 3 * 8:offset + 6 * 8] + struct.pack( + 'Q', 0) + data[offset + 6 * 8:] + value = struct.unpack('Q', + data[offset + 2 * 8:offset + 3 * 8])[0] - 16 * d + data = data[:offset + 2 * 8] + struct.pack('Q', + value) + data[offset + 3 * 8:] + offset += 8 * 8 + + if base_version >= 8: + # Nothing more to do. + return data + + # Last changes are relaed to bump from 7 to version 8 making CountersPtr relative. + dataref = sect_prf_data + # 80 is offset of CountersDelta. + if not relativize_address(data, 80, dataref, sect_prf_cnts, sect_prf_data): + return data + + offset = 112 + v9_header.BinaryIdsSize + # This also works for C+Rust binaries compiled with + # clang-14/rust-nightly-clang-13. + for _ in range(v9_header.DataSize): + # 16 is the offset of CounterPtr in ProfrawData structure. + relativize_address(data, offset + 16, dataref, sect_prf_cnts, sect_prf_data) + # We need this because of CountersDelta -= sizeof(*SrcData); + # seen in __llvm_profile_merge_from_buffer. + dataref += 44 + 2 * (v9_header.ValueKindLast + 1) + if was8: + #profraw9 added RelativeBitmapPtr and NumBitmapBytes (8+4 rounded up to 16) + dataref -= 16 + # This is the size of one ProfrawData structure. + offset += 44 + 2 * (v9_header.ValueKindLast + 1) + + return data + + +def main(): + """Helper script for upgrading a profraw file to latest version.""" + if len(sys.argv) < 3: + sys.stderr.write('Usage: %s options? ...\n' % sys.argv[0]) + return 1 + + # First find llvm profile sections addresses in the elf, quick and dirty. + process = subprocess.Popen(['readelf', '-S', sys.argv[1]], + stdout=subprocess.PIPE) + output, err = process.communicate() + if err: + print('readelf failed') + return 2 + for line in iter(output.split(b'\n')): + if b'__llvm_prf_cnts' in line: + sect_prf_cnts = int(line.split()[3], 16) + elif b'__llvm_prf_data' in line: + sect_prf_data = int(line.split()[3], 16) + + out_name = "default.profup" + in_place = False + start = 2 + if sys.argv[2] == "-i": + in_place = True + start = start + 1 + elif sys.argv[2] == "-o": + out_name = sys.argv[3] + start = 4 + + if len(sys.argv) < start: + sys.stderr.write('Usage: %s options ...\n' % sys.argv[0]) + return 1 + + for i in range(start, len(sys.argv)): + # Then open and read the input profraw file. + with open(sys.argv[i], 'rb') as input_file: + profraw_base = bytearray(input_file.read()) + # Do the upgrade, returning a bytes object. + profraw_latest = upgrade(profraw_base, sect_prf_cnts, sect_prf_data) + # Write the output to the file given to the command line. + if in_place: + out_name = sys.argv[i] + with open(out_name, 'wb') as output_file: + output_file.write(profraw_latest) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py new file mode 100644 index 0000000000000000000000000000000000000000..5d5f627128455ed4d5d70c166ef8c7fe218ace99 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py @@ -0,0 +1,181 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Helper to manage coverage.py related operations. Does two main +things: (1) pass commands into the coverage.py library and (2) +translate .coverage created from a pyinstaller executable into +paths that match local files. This is needed for html report creation. +""" +import os +import re +import json +import sys +from coverage.cmdline import main as coverage_main +from coverage.data import CoverageData + + +def should_exclude_file(filepath): + """Returns whether the path should be excluded from the coverage report.""" + # Skip all atheris code + if "atheris" in filepath: + return True + + # Filter out all standard python libraries + if ('/usr/local/lib/python' in filepath and + 'site-packages' not in filepath and 'dist-packages' not in filepath): + return True + + # Avoid all PyInstaller modules. + if 'PyInstaller' in filepath: + return True + + return False + + +def translate_lines(cov_data, new_cov_data, all_file_paths): + """ + Translate lines in a .coverage file created by coverage.py such that + the file paths points to local files instead. This is needed when collecting + coverage from executables created by pyinstaller. + """ + for pyinstaller_file_path in cov_data.measured_files(): + stripped_py_file_path = pyinstaller_file_path + if stripped_py_file_path.startswith('/tmp/_MEI'): + stripped_py_file_path = '/'.join(stripped_py_file_path.split('/')[3:]) + if stripped_py_file_path.startswith('/out/'): + stripped_py_file_path = stripped_py_file_path.replace('/out/', '') + + # Check if this file exists in our file paths: + for local_file_path in all_file_paths: + if should_exclude_file(local_file_path): + continue + if local_file_path.endswith(stripped_py_file_path): + print('Found matching: %s' % (local_file_path)) + new_cov_data.add_lines( + {local_file_path: cov_data.lines(pyinstaller_file_path)}) + + +def translate_coverage(all_file_paths): + """ + Translate pyinstaller-generated file paths in .coverage (produced by + coverage.py) into local file paths. Place result in .new_coverage. + """ + covdata_pre_translation = CoverageData('.coverage') + covdata_post_translation = CoverageData('.new_coverage') + + covdata_pre_translation.read() + translate_lines(covdata_pre_translation, covdata_post_translation, + all_file_paths) + covdata_post_translation.write() + + +def convert_coveragepy_cov_to_summary_json(src, dst): + """ + Converts a json file produced by coveragepy into a summary.json file + similary to llvm-cov output. `src` is the source coveragepy json file, + `dst` is the destination json file, which will be overwritten. + """ + dst_dict = {'data': [{'files': {}}]} + lines_covered = 0 + lines_count = 0 + with open(src, "r") as src_f: + src_json = json.loads(src_f.read()) + if 'files' in src_json: + for elem in src_json.get('files'): + if 'summary' not in src_json['files'][elem]: + continue + src_dict = src_json['files'][elem]['summary'] + count = src_dict['covered_lines'] + src_dict['missing_lines'] + covered = src_dict['covered_lines'] + notcovered = src_dict['missing_lines'] + percent = src_dict['percent_covered'] + + # Accumulate line coverage + lines_covered += covered + lines_count += count + + dst_dict['data'][0]['files'][elem] = { + 'summary': { + 'lines': { + 'count': count, + 'covered': covered, + 'notcovered': notcovered, + 'percent': percent + } + } + } + if lines_count > 0: + lines_covered_percent = lines_covered / lines_count + else: + lines_covered_percent = 0.0 + dst_dict['data'][0]['totals'] = { + 'branches': { + 'count': 0, + 'covered': 0, + 'notcovered': 0, + 'percent': 0.0 + }, + 'functions': { + 'count': 0, + 'covered': 0, + 'percent': 0.0 + }, + 'instantiations': { + 'count': 0, + 'covered': 0, + 'percent': 0.0 + }, + 'lines': { + 'count': lines_count, + 'covered': lines_covered, + 'percent': lines_covered_percent + }, + 'regions': { + 'count': 0, + 'covered': 0, + 'notcovered': 0, + 'percent': 0.0 + } + } + + with open(dst, 'w') as dst_f: + dst_f.write(json.dumps(dst_dict)) + + +def main(): + """ + Main handler. + """ + if sys.argv[1] == 'translate': + print('Translating the coverage') + files_path = sys.argv[2] + all_file_paths = list() + for root, _, files in os.walk(files_path): + for relative_file_path in files: + abs_file_path = os.path.abspath(os.path.join(root, relative_file_path)) + all_file_paths.append(abs_file_path) + print('Done with path walk') + translate_coverage(all_file_paths) + elif sys.argv[1] == 'convert-to-summary-json': + src = sys.argv[2] + dst = sys.argv[3] + convert_coveragepy_cov_to_summary_json(src, dst) + else: + # Pass commands into coverage package + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(coverage_main()) + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt new file mode 100644 index 0000000000000000000000000000000000000000..1c621100c3be0ae4f50d0e4cf817120b8f169b5a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt @@ -0,0 +1,21 @@ +#!/bin/bash -u +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Symbol demangling for both C++ and Rust +# +################################################################################ + +# simply pipe +rustfilt | c++filt -n diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce new file mode 100644 index 0000000000000000000000000000000000000000..2c074d05e51bd0068f5ed4795159cc69eda0426f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce @@ -0,0 +1,34 @@ +#!/bin/bash -eux +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +FUZZER=$1 +shift + +if [ ! -v TESTCASE ]; then + TESTCASE="/testcase" +fi + +if [ ! -f $TESTCASE ]; then + echo "Error: $TESTCASE not found, use: docker run -v :$TESTCASE ..." + exit 1 +fi + +export RUN_FUZZER_MODE="interactive" +export FUZZING_ENGINE="libfuzzer" +export SKIP_SEED_CORPUS="1" + +run_fuzzer $FUZZER $@ $TESTCASE diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..6e55917cb62bf43b524eed257f585f54154c3362 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer @@ -0,0 +1,228 @@ +#!/bin/bash -eu +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Fuzzer runner. Appends .options arguments and seed corpus to users args. +# Usage: $0 + +sysctl -w vm.mmap_rnd_bits=28 + +export PATH=$OUT:$PATH +cd $OUT + +DEBUGGER=${DEBUGGER:-} + +FUZZER=$1 +shift + +# This env var is set by CIFuzz. CIFuzz fills this directory with the corpus +# from ClusterFuzz. +CORPUS_DIR=${CORPUS_DIR:-} +if [ -z "$CORPUS_DIR" ] +then + CORPUS_DIR="/tmp/${FUZZER}_corpus" + rm -rf $CORPUS_DIR && mkdir -p $CORPUS_DIR +fi + +SANITIZER=${SANITIZER:-} +if [ -z $SANITIZER ]; then + # If $SANITIZER is not specified (e.g. calling from `reproduce` command), it + # is not important and can be set to any value. + SANITIZER="default" +fi + +if [[ "$RUN_FUZZER_MODE" = interactive ]]; then + FUZZER_OUT="$OUT/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out" +else + FUZZER_OUT="/tmp/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out" +fi + +function get_dictionary() { + local options_file="$FUZZER.options" + local dict_file="$FUZZER.dict" + local dict="" + if [[ -f "$options_file" ]]; then + dict=$(sed -n 's/^\s*dict\s*=\s*\(.*\)/\1/p' "$options_file" | tail -1) + fi + if [[ -z "$dict" && -f "$dict_file" ]]; then + dict="$dict_file" + fi + [[ -z "$dict" ]] && return + if [[ "$FUZZING_ENGINE" = "libfuzzer" ]]; then + printf -- "-dict=%s" "$dict" + elif [[ "$FUZZING_ENGINE" = "afl" ]]; then + printf -- "-x %s" "$dict" + elif [[ "$FUZZING_ENGINE" = "honggfuzz" ]]; then + printf -- "--dict %s" "$dict" + elif [[ "$FUZZING_ENGINE" = "centipede" ]]; then + printf -- "--dictionary %s" "$dict" + else + printf "Unexpected FUZZING_ENGINE: $FUZZING_ENGINE, ignoring\n" >&2 + fi +} + +function get_extra_binaries() { + [[ "$FUZZING_ENGINE" != "centipede" ]] && return + + extra_binaries="$OUT/__centipede_${SANITIZER}/${FUZZER}" + if compgen -G "$extra_binaries" >> /dev/null; then + printf -- "--extra_binaries %s" \""$extra_binaries\"" + fi + +} + +rm -rf $FUZZER_OUT && mkdir -p $FUZZER_OUT + +SEED_CORPUS="${FUZZER}_seed_corpus.zip" + +# TODO: Investigate why this code block is skipped +# by all default fuzzers in bad_build_check. +# They all set SKIP_SEED_CORPUS=1. +if [ -f $SEED_CORPUS ] && [ -z ${SKIP_SEED_CORPUS:-} ]; then + echo "Using seed corpus: $SEED_CORPUS" + unzip -o -d ${CORPUS_DIR}/ $SEED_CORPUS > /dev/null +fi + +OPTIONS_FILE="${FUZZER}.options" +CUSTOM_LIBFUZZER_OPTIONS="" + +if [ -f $OPTIONS_FILE ]; then + custom_asan_options=$(parse_options.py $OPTIONS_FILE asan) + if [ ! -z $custom_asan_options ]; then + export ASAN_OPTIONS="$ASAN_OPTIONS:$custom_asan_options" + fi + + custom_msan_options=$(parse_options.py $OPTIONS_FILE msan) + if [ ! -z $custom_msan_options ]; then + export MSAN_OPTIONS="$MSAN_OPTIONS:$custom_msan_options" + fi + + custom_ubsan_options=$(parse_options.py $OPTIONS_FILE ubsan) + if [ ! -z $custom_ubsan_options ]; then + export UBSAN_OPTIONS="$UBSAN_OPTIONS:$custom_ubsan_options" + fi + + CUSTOM_LIBFUZZER_OPTIONS=$(parse_options.py $OPTIONS_FILE libfuzzer) +fi + +if [[ "$FUZZING_ENGINE" = afl ]]; then + + # Set afl++ environment options. + export ASAN_OPTIONS="$ASAN_OPTIONS:abort_on_error=1:symbolize=0:detect_odr_violation=0:" + export MSAN_OPTIONS="$MSAN_OPTIONS:exit_code=86:symbolize=0" + export UBSAN_OPTIONS="$UBSAN_OPTIONS:symbolize=0" + export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1 + export AFL_SKIP_CPUFREQ=1 + export AFL_TRY_AFFINITY=1 + export AFL_FAST_CAL=1 + export AFL_CMPLOG_ONLY_NEW=1 + export AFL_FORKSRV_INIT_TMOUT=30000 + export AFL_IGNORE_PROBLEMS=1 + export AFL_IGNORE_UNKNOWN_ENVS=1 + + # If $OUT/afl_cmplog.txt is present this means the target was compiled for + # CMPLOG. So we have to add the proper parameters to afl-fuzz. + test -e "$OUT/afl_cmplog.txt" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -c $OUT/$FUZZER" + + # If $OUT/afl++.dict we load it as a dictionary for afl-fuzz. + test -e "$OUT/afl++.dict" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -x $OUT/afl++.dict" + + # Ensure timeout is a bit larger than 1sec as some of the OSS-Fuzz fuzzers + # are slower than this. + AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -t 5000+" + + # AFL expects at least 1 file in the input dir. + echo input > ${CORPUS_DIR}/input + + CMD_LINE="$OUT/afl-fuzz $AFL_FUZZER_ARGS -i $CORPUS_DIR -o $FUZZER_OUT $(get_dictionary) $* -- $OUT/$FUZZER" + + echo afl++ setup: + env|grep AFL_ + cat "$OUT/afl_options.txt" + +elif [[ "$FUZZING_ENGINE" = honggfuzz ]]; then + + # Honggfuzz expects at least 1 file in the input dir. + echo input > $CORPUS_DIR/input + # --exit_upon_crash: exit whith a first crash seen + # -V: verify crashes + # -R (report): save report file to this location + # -W (working dir): where the crashes go + # -v (verbose): don't use VTE UI, just stderr + # -z: use software-instrumentation of clang (trace-pc-guard....) + # -P: use persistent mode of fuzzing (i.e. LLVMFuzzerTestOneInput) + # -f: location of the initial (and destination) file corpus + # -n: number of fuzzing threads (and processes) + CMD_LINE="$OUT/honggfuzz -n 1 --exit_upon_crash -V -R /tmp/${FUZZER}_honggfuzz.report -W $FUZZER_OUT -v -z -P -f \"$CORPUS_DIR\" $(get_dictionary) $* -- \"$OUT/$FUZZER\"" + + if [[ $(LC_ALL=C grep -P "\x01_LIBHFUZZ_NETDRIVER_BINARY_SIGNATURE_\x02\xFF" "$FUZZER" ) ]]; then + # Honggfuzz Netdriver port. This must match the port in Clusterfuzz. + export HFND_TCP_PORT=8666 + fi +elif [[ "$FUZZING_ENGINE" = centipede ]]; then + + # Create the work and corpus directory for Centipede. + CENTIPEDE_WORKDIR="${CENTIPEDE_WORKDIR:-$OUT}" + + # Centipede only saves crashes to crashes/ in workdir. + rm -rf $FUZZER_OUT + + # --workdir: Dir that stores corpus&features in Centipede's own format. + # --corpus_dir: Location of the initial (and destination) file corpus. + # --fork_server: Execute the target(s) via the fork server. + # Run in fork mode to continue fuzzing indefinitely in case of + # OOMs, timeouts, and crashes. + # --exit_on_crash=0: Do not exit on crash. + # --timeout=1200: The process that executes target binary will abort + # if an input runs >= 1200s. + # --rss_limit_mb=0: Disable RSS limit. + # --address_space_limit_mb=0: No address space limit. + # --binary: The target binary under test without sanitizer. + # --extra_binary: The target binaries under test with sanitizers. + CMD_LINE="$OUT/centipede --workdir=$CENTIPEDE_WORKDIR --corpus_dir=\"$CORPUS_DIR\" --fork_server=1 --exit_on_crash=1 --timeout=1200 --rss_limit_mb=4096 --address_space_limit_mb=5120 $(get_dictionary) --binary=\"$OUT/${FUZZER}\" $(get_extra_binaries) $*" +else + + CMD_LINE="$OUT/$FUZZER $FUZZER_ARGS $*" + + if [ -z ${SKIP_SEED_CORPUS:-} ]; then + CMD_LINE="$CMD_LINE $CORPUS_DIR" + fi + + if [[ ! -z ${CUSTOM_LIBFUZZER_OPTIONS} ]]; then + CMD_LINE="$CMD_LINE $CUSTOM_LIBFUZZER_OPTIONS" + fi + + if [[ ! "$CMD_LINE" =~ "-dict=" ]]; then + if [ -f "$FUZZER.dict" ]; then + CMD_LINE="$CMD_LINE -dict=$FUZZER.dict" + fi + fi + + CMD_LINE="$CMD_LINE < /dev/null" + +fi + +echo $CMD_LINE + +# Unset OUT so the fuzz target can't rely on it. +unset OUT + +if [ ! -z "$DEBUGGER" ]; then + CMD_LINE="$DEBUGGER $CMD_LINE" +fi + +bash -c "$CMD_LINE" + diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy new file mode 100644 index 0000000000000000000000000000000000000000..663e8f5b9c4288cde4e1c422343ca595e4afaab6 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ASAN_OPTIONS="allocator_may_return_null=1:detect_leaks=0:use_sigaltstack=0" LD_PRELOAD=$(ruby -e 'require "ruzzy"; print Ruzzy::ASAN_PATH') \ + ruby $@ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list new file mode 100644 index 0000000000000000000000000000000000000000..95615c81118c7991cea1c3e800ea9b5c304040ed --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list @@ -0,0 +1,10 @@ +#!/bin/bash + +for binary in $(find $OUT/ -executable -type f); do + [[ "$binary" != *.so ]] || continue + [[ $(basename "$binary") != jazzer_driver* ]] || continue + file "$binary" | grep -e ELF -e "shell script" > /dev/null 2>&1 || continue + grep "LLVMFuzzerTestOneInput" "$binary" > /dev/null 2>&1 || continue + + basename "$binary" +done diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py new file mode 100644 index 0000000000000000000000000000000000000000..428e49ac0b6d7179dcab5003e6104e7a88c98906 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py @@ -0,0 +1,295 @@ +#!/usr/bin/env python3 +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Does bad_build_check on all fuzz targets in $OUT.""" + +import contextlib +import multiprocessing +import os +import re +import subprocess +import stat +import sys +import tempfile + +BASE_TMP_FUZZER_DIR = '/tmp/not-out' + +EXECUTABLE = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH + +IGNORED_TARGETS = [ + r'do_stuff_fuzzer', r'checksum_fuzzer', r'fuzz_dump', r'fuzz_keyring', + r'xmltest', r'fuzz_compression_sas_rle', r'ares_*_fuzzer' +] + +IGNORED_TARGETS_RE = re.compile('^' + r'$|^'.join(IGNORED_TARGETS) + '$') + + +def move_directory_contents(src_directory, dst_directory): + """Moves contents of |src_directory| to |dst_directory|.""" + # Use mv because mv preserves file permissions. If we don't preserve file + # permissions that can mess up CheckFuzzerBuildTest in cifuzz_test.py and + # other cases where one is calling test_all on files not in OSS-Fuzz's real + # out directory. + src_contents = [ + os.path.join(src_directory, filename) + for filename in os.listdir(src_directory) + ] + command = ['mv'] + src_contents + [dst_directory] + subprocess.check_call(command) + + +def is_elf(filepath): + """Returns True if |filepath| is an ELF file.""" + result = subprocess.run(['file', filepath], + stdout=subprocess.PIPE, + check=False) + return b'ELF' in result.stdout + + +def is_shell_script(filepath): + """Returns True if |filepath| is a shell script.""" + result = subprocess.run(['file', filepath], + stdout=subprocess.PIPE, + check=False) + return b'shell script' in result.stdout + + +def find_fuzz_targets(directory): + """Returns paths to fuzz targets in |directory|.""" + # TODO(https://github.com/google/oss-fuzz/issues/4585): Use libClusterFuzz for + # this. + fuzz_targets = [] + for filename in os.listdir(directory): + path = os.path.join(directory, filename) + if filename == 'llvm-symbolizer': + continue + if filename.startswith('afl-'): + continue + if filename.startswith('jazzer_'): + continue + if not os.path.isfile(path): + continue + if not os.stat(path).st_mode & EXECUTABLE: + continue + # Fuzz targets can either be ELF binaries or shell scripts (e.g. wrapper + # scripts for Python and JVM targets or rules_fuzzing builds with runfiles + # trees). + if not is_elf(path) and not is_shell_script(path): + continue + if os.getenv('FUZZING_ENGINE') not in {'none', 'wycheproof'}: + with open(path, 'rb') as file_handle: + binary_contents = file_handle.read() + if b'LLVMFuzzerTestOneInput' not in binary_contents: + continue + fuzz_targets.append(path) + return fuzz_targets + + +def do_bad_build_check(fuzz_target): + """Runs bad_build_check on |fuzz_target|. Returns a + Subprocess.ProcessResult.""" + print('INFO: performing bad build checks for', fuzz_target) + if centipede_needs_auxiliaries(): + print('INFO: Finding Centipede\'s auxiliary for target', fuzz_target) + auxiliary_path = find_centipede_auxiliary(fuzz_target) + print('INFO: Using auxiliary binary:', auxiliary_path) + auxiliary = [auxiliary_path] + else: + auxiliary = [] + + command = ['bad_build_check', fuzz_target] + auxiliary + with tempfile.TemporaryDirectory() as temp_centipede_workdir: + # Do this so that centipede doesn't fill up the disk during bad build check + env = os.environ.copy() + env['CENTIPEDE_WORKDIR'] = temp_centipede_workdir + return subprocess.run(command, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + env=env, + check=False) + + +def get_broken_fuzz_targets(bad_build_results, fuzz_targets): + """Returns a list of broken fuzz targets and their process results in + |fuzz_targets| where each item in |bad_build_results| is the result of + bad_build_check on the corresponding element in |fuzz_targets|.""" + broken = [] + for result, fuzz_target in zip(bad_build_results, fuzz_targets): + if result.returncode != 0: + broken.append((fuzz_target, result)) + return broken + + +def has_ignored_targets(out_dir): + """Returns True if |out_dir| has any fuzz targets we are supposed to ignore + bad build checks of.""" + out_files = set(os.listdir(out_dir)) + for filename in out_files: + if re.match(IGNORED_TARGETS_RE, filename): + return True + return False + + +@contextlib.contextmanager +def use_different_out_dir(): + """Context manager that moves OUT to subdirectory of BASE_TMP_FUZZER_DIR. This + is useful for catching hardcoding. Note that this sets the environment + variable OUT and therefore must be run before multiprocessing.Pool is created. + Resets OUT at the end.""" + # Use a fake OUT directory to catch path hardcoding that breaks on + # ClusterFuzz. + initial_out = os.getenv('OUT') + os.makedirs(BASE_TMP_FUZZER_DIR, exist_ok=True) + # Use a random subdirectory of BASE_TMP_FUZZER_DIR to allow running multiple + # instances of test_all in parallel (useful for integration testing). + with tempfile.TemporaryDirectory(dir=BASE_TMP_FUZZER_DIR) as out: + # Set this so that run_fuzzer which is called by bad_build_check works + # properly. + os.environ['OUT'] = out + # We move the contents of the directory because we can't move the + # directory itself because it is a mount. + move_directory_contents(initial_out, out) + try: + yield out + finally: + move_directory_contents(out, initial_out) + os.environ['OUT'] = initial_out + + +def test_all_outside_out(allowed_broken_targets_percentage): + """Wrapper around test_all that changes OUT and returns the result.""" + with use_different_out_dir() as out: + return test_all(out, allowed_broken_targets_percentage) + + +def centipede_needs_auxiliaries(): + """Checks if auxiliaries are needed for Centipede.""" + # Centipede always requires unsanitized binaries as the main fuzz targets, + # and separate sanitized binaries as auxiliaries. + # 1. Building sanitized binaries with helper.py (i.e., local or GitHub CI): + # Unsanitized ones will be built automatically into the same docker container. + # Script bad_build_check tests both + # a) If main fuzz targets can run with the auxiliaries, and + # b) If the auxiliaries are built with the correct sanitizers. + # 2. In Trial build and production build: + # Two kinds of binaries will be in separated buckets / docker containers. + # Script bad_build_check tests either + # a) If the unsanitized binaries can run without the sanitized ones, or + # b) If the sanitized binaries are built with the correct sanitizers. + return (os.getenv('FUZZING_ENGINE') == 'centipede' and + os.getenv('SANITIZER') != 'none' and os.getenv('HELPER') == 'True') + + +def find_centipede_auxiliary(main_fuzz_target_path): + """Finds the sanitized binary path that corresponds to |main_fuzz_target| for + bad_build_check.""" + target_dir, target_name = os.path.split(main_fuzz_target_path) + sanitized_binary_dir = os.path.join(target_dir, + f'__centipede_{os.getenv("SANITIZER")}') + sanitized_binary_path = os.path.join(sanitized_binary_dir, target_name) + + if os.path.isfile(sanitized_binary_path): + return sanitized_binary_path + + # Neither of the following two should ever happen, returns None to indicate + # an error. + if os.path.isdir(sanitized_binary_dir): + print('ERROR: Unable to identify Centipede\'s sanitized target' + f'{sanitized_binary_path} in {os.listdir(sanitized_binary_dir)}') + else: + print('ERROR: Unable to identify Centipede\'s sanitized target directory' + f'{sanitized_binary_dir} in {os.listdir(target_dir)}') + return None + + +def test_all(out, allowed_broken_targets_percentage): # pylint: disable=too-many-return-statements + """Do bad_build_check on all fuzz targets.""" + # TODO(metzman): Refactor so that we can convert test_one to python. + fuzz_targets = find_fuzz_targets(out) + if not fuzz_targets: + print('ERROR: No fuzz targets found.') + return False + + if centipede_needs_auxiliaries(): + for fuzz_target in fuzz_targets: + if not find_centipede_auxiliary(fuzz_target): + print(f'ERROR: Couldn\'t find auxiliary for {fuzz_target}.') + return False + + pool = multiprocessing.Pool() + bad_build_results = pool.map(do_bad_build_check, fuzz_targets) + pool.close() + pool.join() + broken_targets = get_broken_fuzz_targets(bad_build_results, fuzz_targets) + broken_targets_count = len(broken_targets) + if not broken_targets_count: + return True + + print('Retrying failed fuzz targets sequentially', broken_targets_count) + pool = multiprocessing.Pool(1) + retry_targets = [] + for broken_target, result in broken_targets: + retry_targets.append(broken_target) + bad_build_results = pool.map(do_bad_build_check, retry_targets) + pool.close() + pool.join() + broken_targets = get_broken_fuzz_targets(bad_build_results, broken_targets) + broken_targets_count = len(broken_targets) + if not broken_targets_count: + return True + + print('Broken fuzz targets', broken_targets_count) + total_targets_count = len(fuzz_targets) + broken_targets_percentage = 100 * broken_targets_count / total_targets_count + for broken_target, result in broken_targets: + print(broken_target) + # Use write because we can't print binary strings. + sys.stdout.buffer.write(result.stdout + result.stderr + b'\n') + + if broken_targets_percentage > allowed_broken_targets_percentage: + print('ERROR: {broken_targets_percentage}% of fuzz targets seem to be ' + 'broken. See the list above for a detailed information.'.format( + broken_targets_percentage=broken_targets_percentage)) + if has_ignored_targets(out): + print('Build check automatically passing because of ignored targets.') + return True + return False + print('{total_targets_count} fuzzers total, {broken_targets_count} ' + 'seem to be broken ({broken_targets_percentage}%).'.format( + total_targets_count=total_targets_count, + broken_targets_count=broken_targets_count, + broken_targets_percentage=broken_targets_percentage)) + return True + + +def get_allowed_broken_targets_percentage(): + """Returns the value of the environment value + 'ALLOWED_BROKEN_TARGETS_PERCENTAGE' as an int or returns a reasonable + default.""" + return int(os.getenv('ALLOWED_BROKEN_TARGETS_PERCENTAGE') or '10') + + +def main(): + """Does bad_build_check on all fuzz targets in parallel. Returns 0 on success. + Returns 1 on failure.""" + allowed_broken_targets_percentage = get_allowed_broken_targets_percentage() + if not test_all_outside_out(allowed_broken_targets_percentage): + return 1 + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b3077ec1e4f6d1509b27caab141180a27202b872 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py @@ -0,0 +1,38 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Tests test_all.py""" +import unittest +from unittest import mock + +import test_all + + +class TestTestAll(unittest.TestCase): + """Tests for the test_all_function.""" + + @mock.patch('test_all.find_fuzz_targets', return_value=[]) + @mock.patch('builtins.print') + def test_test_all_no_fuzz_targets(self, mock_print, _): + """Tests that test_all returns False when there are no fuzz targets.""" + outdir = '/out' + allowed_broken_targets_percentage = 0 + self.assertFalse( + test_all.test_all(outdir, allowed_broken_targets_percentage)) + mock_print.assert_called_with('ERROR: No fuzz targets found.') + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_one.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_one.py new file mode 100644 index 0000000000000000000000000000000000000000..e391ec96dcc3b9b74fd2049f88806c2578df5ff8 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/base-images/base-runner/test_one.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Does bad_build_check on a fuzz target in $OUT.""" +import os +import sys + +import test_all + + +def test_one(fuzz_target): + """Does bad_build_check on one fuzz target. Returns True on success.""" + with test_all.use_different_out_dir(): + fuzz_target_path = os.path.join(os.environ['OUT'], fuzz_target) + result = test_all.do_bad_build_check(fuzz_target_path) + if result.returncode != 0: + sys.stdout.buffer.write(result.stdout + result.stderr + b'\n') + return False + return True + + +def main(): + """Does bad_build_check on one fuzz target. Returns 1 on failure, 0 on + success.""" + if len(sys.argv) != 2: + print('Usage: %d ', sys.argv[0]) + return 1 + + fuzz_target_binary = sys.argv[1] + return 0 if test_one(fuzz_target_binary) else 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/actions/build_fuzzers/action.yml b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/actions/build_fuzzers/action.yml new file mode 100644 index 0000000000000000000000000000000000000000..164c890e91849dc1374c71f0320896f098c5ad97 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/actions/build_fuzzers/action.yml @@ -0,0 +1,55 @@ +# action.yml +name: 'build-fuzzers' +description: "Builds an OSS-Fuzz project's fuzzers." +inputs: + oss-fuzz-project-name: + description: 'Name of the corresponding OSS-Fuzz project.' + required: true + language: + description: 'Programming language project is written in.' + required: false + default: 'c++' + dry-run: + description: 'If set, run the action without actually reporting a failure.' + default: false + allowed-broken-targets-percentage: + description: 'The percentage of broken targets allowed in bad_build_check.' + required: false + sanitizer: + description: 'The sanitizer to build the fuzzers with.' + default: 'address' + architecture: + description: 'The architecture used to build the fuzzers.' + default: 'x86_64' + project-src-path: + description: "The path to the project's source code checkout." + required: false + bad-build-check: + description: "Whether or not OSS-Fuzz's check for bad builds should be done." + required: false + default: true + keep-unaffected-fuzz-targets: + description: "Whether to keep unaffected fuzzers or delete them." + required: false + default: false + output-sarif: + description: "Whether to output fuzzing results to SARIF." + required: false + default: false +runs: + using: 'docker' + image: '../../../build_fuzzers.Dockerfile' + env: + OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }} + LANGUAGE: ${{ inputs.language }} + DRY_RUN: ${{ inputs.dry-run}} + ALLOWED_BROKEN_TARGETS_PERCENTAGE: ${{ inputs.allowed-broken-targets-percentage}} + SANITIZER: ${{ inputs.sanitizer }} + ARCHITECTURE: ${{ inputs.architecture }} + PROJECT_SRC_PATH: ${{ inputs.project-src-path }} + LOW_DISK_SPACE: 'True' + BAD_BUILD_CHECK: ${{ inputs.bad-build-check }} + CIFUZZ_DEBUG: 'True' + CFL_PLATFORM: 'github' + KEEP_UNAFFECTED_FUZZ_TARGETS: ${{ inputs.keep-unaffected-fuzz-targets }} + OUTPUT_SARIF: ${{ inputs.output-sarif }} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/actions/run_fuzzers/action.yml b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/actions/run_fuzzers/action.yml new file mode 100644 index 0000000000000000000000000000000000000000..197aed2b02e4af2b3725040a5ce46f258337af82 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/actions/run_fuzzers/action.yml @@ -0,0 +1,82 @@ +# action.yml +name: 'run-fuzzers' +description: 'Runs fuzz target binaries for a specified length of time.' +inputs: + oss-fuzz-project-name: + description: 'The OSS-Fuzz project name.' + required: true + language: + description: 'Programming language project is written in.' + required: false + default: 'c++' + fuzz-seconds: + description: 'The total time allotted for fuzzing in seconds.' + required: true + default: 600 + dry-run: + description: 'If set, run the action without actually reporting a failure.' + default: false + sanitizer: + description: 'The sanitizer to run the fuzzers with.' + default: 'address' + mode: + description: | + The mode to run the fuzzers with ("code-change", "batch", "coverage", or "prune"). + "code-change" is for fuzzing a pull request or commit. + "batch" is for non-interactive fuzzing of an entire project. + "coverage" is for coverage generation. + "prune" is for corpus pruning. + required: false + default: 'code-change' + github-token: + description: | + Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET + You should use "secrets.GITHUB_TOKEN" in your workflow file, do not + hardcode the token. + TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361): + Document locking this down. + required: false + report-unreproducible-crashes: + description: 'If True, then unreproducible crashes will be reported.' + required: false + default: False + minimize-crashes: + description: 'If True, reportable crashes will be minimized.' + required: false + default: False + parallel-fuzzing: + description: "Whether to use all available cores for fuzzing." + required: false + default: false + output-sarif: + description: "Whether to output fuzzing results to SARIF." + required: false + default: false + report-timeouts: + description: "Whether to report fails due to timeout." + required: false + default: true + report-ooms: + description: "Whether to report fails due to OOM." + required: false + default: true +runs: + using: 'docker' + image: '../../../run_fuzzers.Dockerfile' + env: + OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }} + LANGUAGE: ${{ inputs.language }} + FUZZ_SECONDS: ${{ inputs.fuzz-seconds }} + DRY_RUN: ${{ inputs.dry-run}} + SANITIZER: ${{ inputs.sanitizer }} + MODE: ${{ inputs.mode }} + GITHUB_TOKEN: ${{ inputs.github-token }} + LOW_DISK_SPACE: 'True' + REPORT_UNREPRODUCIBLE_CRASHES: ${{ inputs.report-unreproducible-crashes }} + MINIMIZE_CRASHES: ${{ inputs.minimize-crashes }} + CIFUZZ_DEBUG: 'True' + CFL_PLATFORM: 'github' + PARALLEL_FUZZING: ${{ inputs.parallel-fuzzing }} + OUTPUT_SARIF: ${{ inputs.output-sarif }} + REPORT_TIMEOUTS: ${{ inputs.report-timeouts }} + REPORT_OOMS: ${{ inputs.report-ooms}} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/affected_fuzz_targets_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/affected_fuzz_targets_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3197ebbf1ae89dd327a2d4ea191f082360165389 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/affected_fuzz_targets_test.py @@ -0,0 +1,113 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for affected_fuzz_targets.py""" +import os +import shutil +import tempfile +import unittest +from unittest import mock + +import parameterized + +import affected_fuzz_targets +import clusterfuzz_deployment +import get_coverage +import test_helpers +import workspace_utils + +# pylint: disable=protected-access + +# NOTE: This integration test relies on +# https://github.com/google/oss-fuzz/tree/master/projects/example project. +EXAMPLE_PROJECT = 'example' + +EXAMPLE_FILE_CHANGED = 'test.txt' + +TEST_DATA_OUT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), + 'test_data', 'build-out') + + +class RemoveUnaffectedFuzzTargetsTest(unittest.TestCase): + """Tests remove_unaffected_fuzzers.""" + + TEST_FUZZER_1 = os.path.join(TEST_DATA_OUT_PATH, 'example_crash_fuzzer') + TEST_FUZZER_2 = os.path.join(TEST_DATA_OUT_PATH, 'example_nocrash_fuzzer') + + # yapf: disable + @parameterized.parameterized.expand([ + # Tests specific affected fuzzers are kept. + ([[EXAMPLE_FILE_CHANGED], None], 2,), + + # Tests specific affected fuzzer is kept. + ([[EXAMPLE_FILE_CHANGED], ['not/a/real/file']], 1), + + # Tests all fuzzers are kept if none are deemed affected. + ([None, None], 2), + + # Tests that multiple fuzzers are kept if multiple fuzzers are affected. + ([[EXAMPLE_FILE_CHANGED], [EXAMPLE_FILE_CHANGED]], 2), + ]) + # yapf: enable + def test_remove_unaffected_fuzz_targets(self, side_effect, expected_dir_len): + """Tests that remove_unaffected_fuzzers has the intended effect.""" + config = test_helpers.create_run_config( + cfl_platform='github', + oss_fuzz_project_name=EXAMPLE_PROJECT, + workspace='/workspace') + workspace = workspace_utils.Workspace(config) + deployment = clusterfuzz_deployment.get_clusterfuzz_deployment( + config, workspace) + # We can't use fakefs in this test because this test executes + # utils.is_fuzz_target_local. This function relies on the executable bit + # being set, which doesn't work properly in fakefs. + with tempfile.TemporaryDirectory() as tmp_dir, mock.patch( + 'get_coverage.OSSFuzzCoverage.get_files_covered_by_target' + ) as mock_get_files: + with mock.patch('get_coverage._get_oss_fuzz_fuzzer_stats_dir_url', + return_value=1): + mock_get_files.side_effect = side_effect + shutil.copy(self.TEST_FUZZER_1, tmp_dir) + shutil.copy(self.TEST_FUZZER_2, tmp_dir) + + affected_fuzz_targets.remove_unaffected_fuzz_targets( + deployment, tmp_dir, [EXAMPLE_FILE_CHANGED], '') + self.assertEqual(expected_dir_len, len(os.listdir(tmp_dir))) + + +class IsFuzzTargetAffectedTest(unittest.TestCase): + """Tests for is_fuzz_target_affected.""" + + def setUp(self): + self.fuzz_target_path = '/fuzz_target' + + def test_relative_paths(self): + """Tests that is_fuzz_target_affected works as intended when the covered + files are relative paths.""" + with mock.patch.object( + get_coverage.FilesystemCoverage, + 'get_files_covered_by_target', + ) as get_files_covered_by_target: + get_files_covered_by_target.return_value = [ + '/work/build/../../src/systemd/src/basic/alloc-util.c' + ] + coverage = get_coverage.FilesystemCoverage('/', '/') + + self.assertTrue( + affected_fuzz_targets.is_fuzz_target_affected( + coverage, self.fuzz_target_path, + ['/src/systemd/src/basic/alloc-util.c'])) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/base_runner_utils.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/base_runner_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d04a54adb18a2bcf7faf5402a93d5b078367d2e9 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/base_runner_utils.py @@ -0,0 +1,33 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for scripts from ghcr.io/aixcc-finals/base-runner.""" + +import os + +import config_utils + + +def get_env(config, workspace): + """Returns a dictionary containing the current environment with additional env + vars set to values needed to run a fuzzer.""" + env = os.environ.copy() + env['SANITIZER'] = config.sanitizer + env['FUZZING_LANGUAGE'] = config.language + env['OUT'] = workspace.out + env['CIFUZZ'] = 'True' + env['FUZZING_ENGINE'] = config_utils.DEFAULT_ENGINE + env['ARCHITECTURE'] = config.architecture + # Do this so we don't fail in tests. + env['FUZZER_ARGS'] = '-rss_limit_mb=2560 -timeout=25' + return env diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers.py new file mode 100644 index 0000000000000000000000000000000000000000..fd15642e8d88c44f3ea82ba64108a8994d160ef0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers.py @@ -0,0 +1,217 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module used by CI tools in order to interact with fuzzers. This module helps +CI tools to build fuzzers.""" + +import logging +import os +import sys + +import affected_fuzz_targets +import base_runner_utils +import clusterfuzz_deployment +import continuous_integration +import docker +import logs +import workspace_utils + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +import helper +import utils + +logs.init() + + +def check_project_src_path(project_src_path): + """Returns True if |project_src_path| exists.""" + if not os.path.exists(project_src_path): + logging.error( + 'PROJECT_SRC_PATH: %s does not exist. ' + 'Are you mounting it correctly?', project_src_path) + return False + return True + + +# pylint: disable=too-many-arguments + + +class Builder: # pylint: disable=too-many-instance-attributes + """Class for fuzzer builders.""" + + def __init__(self, config, ci_system): + self.config = config + self.ci_system = ci_system + self.workspace = workspace_utils.Workspace(config) + self.workspace.initialize_dir(self.workspace.out) + self.workspace.initialize_dir(self.workspace.work) + self.clusterfuzz_deployment = ( + clusterfuzz_deployment.get_clusterfuzz_deployment( + self.config, self.workspace)) + self.image_repo_path = None + self.host_repo_path = None + self.repo_manager = None + + def build_image_and_checkout_src(self): + """Builds the project builder image and checkout source code for the patch + we want to fuzz (if necessary). Returns True on success.""" + result = self.ci_system.prepare_for_fuzzer_build() + if not result.success: + return False + self.image_repo_path = result.image_repo_path + self.repo_manager = result.repo_manager + if self.config.output_sarif: + self.workspace.make_repo_for_sarif(self.repo_manager) + logging.info('repo_dir: %s.', self.repo_manager.repo_dir) + self.host_repo_path = self.repo_manager.repo_dir + return True + + def build_fuzzers(self): + """Moves the source code we want to fuzz into the project builder and builds + the fuzzers from that source code. Returns True on success.""" + docker_args, docker_container = docker.get_base_docker_run_args( + self.workspace, self.config.sanitizer, self.config.language, + self.config.architecture, self.config.docker_in_docker) + if not docker_container: + docker_args.extend( + _get_docker_build_fuzzers_args_not_container(self.host_repo_path)) + + build_command = self.ci_system.get_build_command(self.host_repo_path, + self.image_repo_path) + + # Set extra environment variables so that they are visible to the build. + for key in self.config.extra_environment_variables: + # Don't specify their value in case they get echoed. + docker_args.extend(['-e', key]) + + docker_args.extend([ + docker.get_project_image_name(self.config.oss_fuzz_project_name), + '/bin/bash', + '-c', + build_command, + ]) + logging.info('Building with %s sanitizer.', self.config.sanitizer) + + # TODO(metzman): Stop using helper.docker_run so we can get rid of + # docker.get_base_docker_run_args and merge its contents into + # docker.get_base_docker_run_command. + if not helper.docker_run(docker_args): + logging.error('Building fuzzers failed.') + return False + + return True + + def upload_build(self): + """Upload build.""" + if self.config.upload_build: + self.clusterfuzz_deployment.upload_build( + self.repo_manager.get_current_commit()) + + return True + + def check_fuzzer_build(self): + """Checks the fuzzer build. Returns True on success or if config specifies + to skip check.""" + if not self.config.bad_build_check: + return True + + return check_fuzzer_build(self.config) + + def build(self): + """Builds the image, checkouts the source (if needed), builds the fuzzers + and then removes the unaffectted fuzzers. Returns True on success.""" + methods = [ + self.build_image_and_checkout_src, + self.build_fuzzers, + self.remove_unaffected_fuzz_targets, + self.upload_build, + self.check_fuzzer_build, + ] + for method in methods: + if not method(): + return False + return True + + def remove_unaffected_fuzz_targets(self): + """Removes the fuzzers unaffected by the patch.""" + if self.config.keep_unaffected_fuzz_targets: + logging.info('Not removing unaffected fuzz targets.') + return True + + logging.info('Removing unaffected fuzz targets.') + changed_files = self.ci_system.get_changed_code_under_test( + self.repo_manager) + affected_fuzz_targets.remove_unaffected_fuzz_targets( + self.clusterfuzz_deployment, self.workspace.out, changed_files, + self.image_repo_path) + return True + + +def build_fuzzers(config): + """Builds all of the fuzzers for a specific OSS-Fuzz project. + + Args: + config: The configuration object for building fuzzers. + + Returns: + True if build succeeded. + """ + # Do some quick validation. + if config.project_src_path and not check_project_src_path( + config.project_src_path): + return False + + # Get the builder and then build the fuzzers. + ci_system = continuous_integration.get_ci(config) + logging.info('ci_system: %s.', ci_system) + builder = Builder(config, ci_system) + return builder.build() + + +def check_fuzzer_build(config): + """Checks the integrity of the built fuzzers. + + Args: + config: The config object. + + Returns: + True if fuzzers pass OSS-Fuzz's build check. + """ + workspace = workspace_utils.Workspace(config) + if not os.path.exists(workspace.out): + logging.error('Invalid out directory: %s.', workspace.out) + return False + if not os.listdir(workspace.out): + logging.error('No fuzzers found in out directory: %s.', workspace.out) + return False + + env = base_runner_utils.get_env(config, workspace) + if config.allowed_broken_targets_percentage is not None: + env['ALLOWED_BROKEN_TARGETS_PERCENTAGE'] = ( + config.allowed_broken_targets_percentage) + + stdout, stderr, retcode = utils.execute('test_all.py', env=env) + print(f'Build check: stdout: {stdout}\nstderr: {stderr}') + if retcode == 0: + logging.info('Build check passed.') + return True + logging.error('Build check failed.') + return False + + +def _get_docker_build_fuzzers_args_not_container(host_repo_path): + """Returns arguments to the docker build arguments that are needed to use + |host_repo_path| when the host of the OSS-Fuzz builder container is not + another container.""" + return ['-v', f'{host_repo_path}:{host_repo_path}'] diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers_test.py new file mode 100644 index 0000000000000000000000000000000000000000..36a4d4b2287401b29bf3f8b31c203296142b7179 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers_test.py @@ -0,0 +1,403 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the functionality of the cifuzz module.""" +import os +import shutil +import sys +import tempfile +import unittest +from unittest import mock + +import parameterized + +# pylint: disable=wrong-import-position +INFRA_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(INFRA_DIR) + +OSS_FUZZ_DIR = os.path.dirname(INFRA_DIR) + +import build_fuzzers +import continuous_integration +import repo_manager +import test_helpers + +# NOTE: This integration test relies on +# https://github.com/google/oss-fuzz/tree/master/projects/example project. +EXAMPLE_PROJECT = 'example' + +# Location of data used for testing. +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), + 'test_data') + +# An example fuzzer that triggers an crash. +# Binary is a copy of the example project's do_stuff_fuzzer and can be +# generated by running "python3 infra/helper.py build_fuzzers example". +EXAMPLE_CRASH_FUZZER = 'example_crash_fuzzer' + +# An example fuzzer that does not trigger a crash. +# Binary is a modified version of example project's do_stuff_fuzzer. It is +# created by removing the bug in my_api.cpp. +EXAMPLE_NOCRASH_FUZZER = 'example_nocrash_fuzzer' + +# A fuzzer to be built in build_fuzzers integration tests. +EXAMPLE_BUILD_FUZZER = 'do_stuff_fuzzer' + +# pylint: disable=no-self-use,protected-access,too-few-public-methods,unused-argument + + +def docker_command_has_env_var_arg(command, env_var_arg): + """Returns True if a docker command has a specific env var argument.""" + for idx, element in enumerate(command): + if idx == 0: + # We're looking for the variable which can't be the first argument. + continue + + if element == env_var_arg and command[idx - 1] == '-e': + return True + return False + + +class BuildFuzzersTest(unittest.TestCase): + """Unit tests for build_fuzzers.""" + + @mock.patch('build_specified_commit.detect_main_repo', + return_value=('example.com', '/path')) + @mock.patch('repo_manager._clone', return_value=None) + @mock.patch('continuous_integration.checkout_specified_commit') + @mock.patch('helper.docker_run', return_value=False) # We want to quit early. + def test_cifuzz_env_var(self, mock_docker_run, _, __, ___): + """Tests that the CIFUZZ env var is set.""" + + with tempfile.TemporaryDirectory() as tmp_dir: + build_fuzzers.build_fuzzers( + test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name=EXAMPLE_PROJECT, + workspace=tmp_dir, + pr_ref='refs/pull/1757/merge')) + + docker_run_command = mock_docker_run.call_args_list[0][0][0] + + self.assertTrue( + docker_command_has_env_var_arg(docker_run_command, 'CIFUZZ=True')) + + @mock.patch('build_specified_commit.detect_main_repo', + return_value=('example.com', '/path')) + @mock.patch('repo_manager._clone', return_value=None) + @mock.patch('continuous_integration.checkout_specified_commit') + @mock.patch('helper.docker_run', return_value=False) # We want to quit early. + def test_extra_env_var(self, mock_docker_run, _, __, ___): + """Tests that the CIFUZZ env var is set.""" + + extra_env_var = 'CFL_EXTRA_TOKEN' + with tempfile.TemporaryDirectory() as tmp_dir: + with mock.patch.dict(os.environ, {extra_env_var: 'BLAH'}): + build_fuzzers.build_fuzzers( + test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name=EXAMPLE_PROJECT, + workspace=tmp_dir, + pr_ref='refs/pull/1757/merge')) + + docker_run_command = mock_docker_run.call_args_list[0][0][0] + self.assertTrue( + docker_command_has_env_var_arg(docker_run_command, extra_env_var)) + + +class InternalGithubBuildTest(unittest.TestCase): + """Tests for building OSS-Fuzz projects on GitHub actions.""" + PROJECT_REPO_NAME = 'myproject' + SANITIZER = 'address' + GIT_SHA = 'fake' + PR_REF = 'fake' + + def _create_builder(self, tmp_dir, oss_fuzz_project_name='myproject'): + """Creates an InternalGithubBuilder and returns it.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=oss_fuzz_project_name, + project_repo_name=self.PROJECT_REPO_NAME, + workspace=tmp_dir, + sanitizer=self.SANITIZER, + git_sha=self.GIT_SHA, + pr_ref=self.PR_REF, + cfl_platform='github') + cfl_platform = continuous_integration.get_ci(config) + builder = build_fuzzers.Builder(config, cfl_platform) + builder.repo_manager = repo_manager.RepoManager('/fake') + return builder + + @mock.patch('helper.docker_run', return_value=True) + @mock.patch('continuous_integration.checkout_specified_commit', + side_effect=None) + def test_correct_host_repo_path(self, _, __): + """Tests that the correct self.host_repo_path is set by + build_image_and_checkout_src. Specifically, we want the name of the + directory the repo is in to match the name used in the docker + image/container, so that it will replace the host's copy properly.""" + image_repo_path = '/src/repo_dir' + with tempfile.TemporaryDirectory() as tmp_dir, mock.patch( + 'build_specified_commit.detect_main_repo', + return_value=('inferred_url', image_repo_path)): + builder = self._create_builder(tmp_dir) + builder.build_image_and_checkout_src() + + self.assertEqual(os.path.basename(builder.host_repo_path), + os.path.basename(image_repo_path)) + + @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_build', + return_value=True) + def test_upload_build_disabled(self, mock_upload_build): + """Test upload build (disabled).""" + with tempfile.TemporaryDirectory() as tmp_dir: + builder = self._create_builder(tmp_dir) + builder.upload_build() + + mock_upload_build.assert_not_called() + + @mock.patch('repo_manager.RepoManager.get_current_commit', + return_value='commit') + @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_build', + return_value=True) + def test_upload_build(self, mock_upload_build, mock_get_current_commit): + """Test upload build.""" + with tempfile.TemporaryDirectory() as tmp_dir: + builder = self._create_builder(tmp_dir, oss_fuzz_project_name='') + builder.config.upload_build = True + builder.upload_build() + + mock_upload_build.assert_called_with('commit') + + +@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'), + 'INTEGRATION_TESTS=1 not set') +class BuildFuzzersIntegrationTest(unittest.TestCase): + """Integration tests for build_fuzzers.""" + + def setUp(self): + self.temp_dir_ctx_manager = test_helpers.docker_temp_dir() + self.workspace = self.temp_dir_ctx_manager.__enter__() + self.out_dir = os.path.join(self.workspace, 'build-out') + test_helpers.patch_environ(self) + + base_runner_path = os.path.join(INFRA_DIR, 'base-images', 'base-runner') + os.environ['PATH'] = os.environ['PATH'] + os.pathsep + base_runner_path + + def tearDown(self): + self.temp_dir_ctx_manager.__exit__(None, None, None) + + def test_external_github_project(self): + """Tests building fuzzers from an external project on Github.""" + project_repo_name = 'external-project' + git_url = 'https://github.com/jonathanmetzman/cifuzz-external-example.git' + # This test is dependant on the state of + # github.com/jonathanmetzman/cifuzz-external-example. + config = test_helpers.create_build_config( + project_repo_name=project_repo_name, + workspace=self.workspace, + git_url=git_url, + git_sha='HEAD', + cfl_platform='github', + base_commit='HEAD^1') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_external_generic_project(self): + """Tests building fuzzers from an external project not on Github.""" + project_repo_name = 'cifuzz-external-example' + git_url = 'https://github.com/jonathanmetzman/cifuzz-external-example.git' + # This test is dependant on the state of + # github.com/jonathanmetzman/cifuzz-external-example. + manager = repo_manager.clone_repo_and_get_manager( + 'https://github.com/jonathanmetzman/cifuzz-external-example', + self.workspace) + project_src_path = manager.repo_dir + config = test_helpers.create_build_config( + project_repo_name=project_repo_name, + workspace=self.workspace, + git_url=git_url, + filestore='no_filestore', + git_sha='HEAD', + project_src_path=project_src_path, + base_commit='HEAD^1') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_valid_commit(self): + """Tests building fuzzers with valid inputs.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523', + base_commit='da0746452433dc18bae699e355a9821285d863c8', + cfl_platform='github') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_valid_pull_request(self): + """Tests building fuzzers with valid pull request.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + pr_ref='refs/pull/1757/merge', + base_ref='master', + cfl_platform='github') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_invalid_pull_request(self): + """Tests building fuzzers with invalid pull request.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + pr_ref='ref-1/merge', + base_ref='master', + cfl_platform='github') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + + def test_invalid_oss_fuzz_project_name(self): + """Tests building fuzzers with invalid project name.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name='not_a_valid_project', + project_repo_name='oss-fuzz', + workspace=self.workspace, + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523') + self.assertFalse(build_fuzzers.build_fuzzers(config)) + + def test_invalid_repo_name(self): + """Tests building fuzzers with invalid repo name.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='not-real-repo', + workspace=self.workspace, + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523') + self.assertFalse(build_fuzzers.build_fuzzers(config)) + + def test_invalid_git_sha(self): + """Tests building fuzzers with invalid commit SHA.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + git_sha='', + cfl_platform='github') + with self.assertRaises(AssertionError): + build_fuzzers.build_fuzzers(config) + + def test_invalid_workspace(self): + """Tests building fuzzers with invalid workspace.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=os.path.join(self.workspace, 'not', 'a', 'dir'), + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523') + self.assertFalse(build_fuzzers.build_fuzzers(config)) + + +class CheckFuzzerBuildTest(unittest.TestCase): + """Tests the check_fuzzer_build function in the cifuzz module.""" + + SANITIZER = 'address' + LANGUAGE = 'c++' + + def setUp(self): + self.temp_dir_obj = tempfile.TemporaryDirectory() + workspace_path = os.path.join(self.temp_dir_obj.name, 'workspace') + self.config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + sanitizer=self.SANITIZER, + language=self.LANGUAGE, + workspace=workspace_path, + pr_ref='refs/pull/1757/merge') + self.workspace = test_helpers.create_workspace(workspace_path) + shutil.copytree(TEST_DATA_PATH, workspace_path) + test_helpers.patch_environ(self, runner=True) + + def tearDown(self): + self.temp_dir_obj.cleanup() + + def test_correct_fuzzer_build(self): + """Checks check_fuzzer_build function returns True for valid fuzzers.""" + self.assertTrue(build_fuzzers.check_fuzzer_build(self.config)) + + def test_not_a_valid_path(self): + """Tests that False is returned when a nonexistent path is given.""" + self.config.workspace = 'not/a/valid/path' + self.assertFalse(build_fuzzers.check_fuzzer_build(self.config)) + + def test_no_valid_fuzzers(self): + """Tests that False is returned when an empty directory is given.""" + with tempfile.TemporaryDirectory() as tmp_dir: + self.config.workspace = tmp_dir + os.mkdir(os.path.join(self.config.workspace, 'build-out')) + self.assertFalse(build_fuzzers.check_fuzzer_build(self.config)) + + @mock.patch('utils.execute', return_value=(None, None, 0)) + def test_allow_broken_fuzz_targets_percentage(self, mock_execute): + """Tests that ALLOWED_BROKEN_TARGETS_PERCENTAGE is set when running + docker if passed to check_fuzzer_build.""" + percentage = '0' + self.config.allowed_broken_targets_percentage = percentage + build_fuzzers.check_fuzzer_build(self.config) + self.assertEqual( + mock_execute.call_args[1]['env']['ALLOWED_BROKEN_TARGETS_PERCENTAGE'], + percentage) + + +@unittest.skip('Test is too long to be run with presubmit.') +class BuildSantizerIntegrationTest(unittest.TestCase): + """Integration tests for the build_fuzzers. + Note: This test relies on "curl" being an OSS-Fuzz project.""" + PROJECT_NAME = 'curl' + PR_REF = 'fake_pr' + + @classmethod + def _create_config(cls, tmp_dir, sanitizer): + return test_helpers.create_build_config( + oss_fuzz_project_name=cls.PROJECT_NAME, + project_repo_name=cls.PROJECT_NAME, + workspace=tmp_dir, + pr_ref=cls.PR_REF, + sanitizer=sanitizer) + + @parameterized.parameterized.expand([('memory',), ('undefined',)]) + def test_valid_project_curl(self, sanitizer): + """Tests that MSAN can be detected from project.yaml""" + with tempfile.TemporaryDirectory() as tmp_dir: + self.assertTrue( + build_fuzzers.build_fuzzers(self._create_config(tmp_dir, sanitizer))) + + +class GetDockerBuildFuzzersArgsNotContainerTest(unittest.TestCase): + """Tests that _get_docker_build_fuzzers_args_not_container works as + intended.""" + + def test_get_docker_build_fuzzers_args_no_container(self): + """Tests that _get_docker_build_fuzzers_args_not_container works + as intended.""" + host_repo_path = '/host/repo' + result = build_fuzzers._get_docker_build_fuzzers_args_not_container( + host_repo_path) + expected_result = ['-v', '/host/repo:/host/repo'] + self.assertEqual(result, expected_result) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz-base/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz-base/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..6f3ada4f2762c6e6ddf058b6be9a67b31c154594 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz-base/Dockerfile @@ -0,0 +1,44 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +FROM ghcr.io/aixcc-finals/base-runner + +RUN apt-get update && \ + apt-get install -y systemd && \ + wget https://download.docker.com/linux/ubuntu/dists/focal/pool/stable/amd64/docker-ce-cli_20.10.8~3-0~ubuntu-focal_amd64.deb -O /tmp/docker-ce.deb && \ + dpkg -i /tmp/docker-ce.deb && \ + rm /tmp/docker-ce.deb + +ENV PATH=/opt/gcloud/google-cloud-sdk/bin/:$PATH +ENV OSS_FUZZ_ROOT=/opt/oss-fuzz + +# Do this step before copying to make rebuilding faster when developing. +COPY ./infra/cifuzz/requirements.txt /tmp/requirements.txt +RUN python3 -m pip install -r /tmp/requirements.txt && rm /tmp/requirements.txt + +ADD . ${OSS_FUZZ_ROOT} +# Don't use the default npm location since jazzer.js can break us. +# This means javascript needed by cifuzz/clusterfuzzlite must be executed in +# OSS_FUZZ_ROOT. +RUN cd ${OSS_FUZZ_ROOT} && npm install ${OSS_FUZZ_ROOT}/infra/cifuzz + + +ENV PYTHONUNBUFFERED=1 + +# Python file to execute when the docker container starts up. +# We can't use the env var $OSS_FUZZ_ROOT here. Since it's a constant env var, +# just expand to '/opt/oss-fuzz'. +ENTRYPOINT ["python3", "/opt/oss-fuzz/infra/cifuzz/cifuzz_combined_entrypoint.py"] diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_combined_entrypoint.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_combined_entrypoint.py new file mode 100644 index 0000000000000000000000000000000000000000..920e32e42965c314c7fcc5c7808315bb68e051a0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_combined_entrypoint.py @@ -0,0 +1,40 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Builds fuzzers and runs fuzzers. Entrypoint used for external users""" +import logging +import sys + +import build_fuzzers_entrypoint +import run_fuzzers_entrypoint + + +def main(): + """Builds and runs fuzzers for CI tools. + + NOTE: Any crash report will be in the filepath: $WORKSPACE/out/testcase + This can be used with GitHub's upload-artifact action to surface the logs. + + Required environment variables: + Returns: + 0 on success or 1 on failure. + """ + logging.debug("Using cifuzz_combined_entrypoint.") + result = build_fuzzers_entrypoint.build_fuzzers_entrypoint() + if result != 0: + return result + return run_fuzzers_entrypoint.run_fuzzers_entrypoint() + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_end_to_end_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_end_to_end_test.py new file mode 100644 index 0000000000000000000000000000000000000000..30e28beda330c621ac3a0f5b9d1ee2dc11a5e463 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_end_to_end_test.py @@ -0,0 +1,48 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""End-to-End tests for CIFuzz.""" +import os +import unittest + +import run_cifuzz +import test_helpers + +CIFUZZ_DIR = os.path.dirname(os.path.abspath(__file__)) +EXTERNAL_PROJECT_PATH = os.path.join(CIFUZZ_DIR, 'test_data', + 'external-project') + + +# This test will fail if not run as root because the fuzzer build process +# creates binaries that only root can write to. +# Use a seperate env var to keep this seperate from integration tests which +# don't have this annoying property. +@unittest.skipIf(not os.getenv('END_TO_END_TESTS'), + 'END_TO_END_TESTS=1 not set') +class EndToEndTest(unittest.TestCase): + """End-to-End tests for CIFuzz.""" + + def setUp(self): + test_helpers.patch_environ(self, runner=True) + + def test_simple(self): + """Simple end-to-end test using run_cifuzz.main().""" + os.environ['REPOSITORY'] = 'external-project' + os.environ['PROJECT_SRC_PATH'] = EXTERNAL_PROJECT_PATH + os.environ['FILESTORE'] = 'no_filestore' + os.environ['NO_CLUSTERFUZZ_DEPLOYMENT'] = 'True' + + with test_helpers.docker_temp_dir() as temp_dir: + os.environ['WORKSPACE'] = temp_dir + # TODO(metzman): Verify the crash, affected fuzzers, and other things. + self.assertEqual(run_cifuzz.main(), 1) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cloudbuild.yaml b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cloudbuild.yaml new file mode 100644 index 0000000000000000000000000000000000000000..03b1961e50ff9c701133753e918179177323726e --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/cloudbuild.yaml @@ -0,0 +1,52 @@ +# TODO(metzman): Get rid of cifuzz-build-fuzzers and cifuzz-run-fuzzers. +steps: +- name: 'gcr.io/cloud-builders/docker' + args: + - build + - '-t' + - ghcr.io/aixcc-finals/cifuzz-base + - '-t' + - ghcr.io/aixcc-finals/cifuzz-base:v1 + - '-f' + - infra/cifuzz/cifuzz-base/Dockerfile + - . +- name: 'gcr.io/cloud-builders/docker' + args: + - build + - '-t' + - ghcr.io/aixcc-finals/cifuzz-build-fuzzers + - '-t' + - ghcr.io/aixcc-finals/cifuzz-build-fuzzers:v1 + - '-t' + - ghcr.io/aixcc-finals/clusterfuzzlite-build-fuzzers + - '-t' + - ghcr.io/aixcc-finals/clusterfuzzlite-build-fuzzers:v1 + - '-f' + - infra/build_fuzzers.Dockerfile + - infra +- name: 'gcr.io/cloud-builders/docker' + args: + - build + - '-t' + - ghcr.io/aixcc-finals/cifuzz-run-fuzzers + - '-t' + - ghcr.io/aixcc-finals/cifuzz-run-fuzzers:v1 + - '-t' + - ghcr.io/aixcc-finals/clusterfuzzlite-run-fuzzers + - '-t' + - ghcr.io/aixcc-finals/clusterfuzzlite-run-fuzzers:v1 + - '-f' + - infra/run_fuzzers.Dockerfile + - infra +images: +- ghcr.io/aixcc-finals/cifuzz-base +- ghcr.io/aixcc-finals/cifuzz-base:v1 +- ghcr.io/aixcc-finals/cifuzz-run-fuzzers +- ghcr.io/aixcc-finals/cifuzz-run-fuzzers:v1 +- ghcr.io/aixcc-finals/cifuzz-build-fuzzers +- ghcr.io/aixcc-finals/cifuzz-build-fuzzers:v1 +- ghcr.io/aixcc-finals/clusterfuzzlite-build-fuzzers +- ghcr.io/aixcc-finals/clusterfuzzlite-build-fuzzers:v1 +- ghcr.io/aixcc-finals/clusterfuzzlite-run-fuzzers +- ghcr.io/aixcc-finals/clusterfuzzlite-run-fuzzers:v1 +timeout: 1800s diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/config_utils.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/config_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a4a3cb8f7ad9265f3eaaac9af4a810d412e3fc00 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/config_utils.py @@ -0,0 +1,283 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for getting the configuration CIFuzz needs to run.""" + +import enum +import importlib +import logging +import os +import sys + +import environment + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import platform_config +import constants + +SANITIZERS = ['address', 'memory', 'undefined', 'coverage'] + +# TODO(metzman): Set these on config objects so there's one source of truth. +DEFAULT_ENGINE = 'libfuzzer' + +# This module deals a lot with env variables. Many of these will be set by users +# and others beyond CIFuzz's control. Thus, you should be careful about using +# the environment.py helpers for getting env vars, since it can cause values +# that should be interpreted as strings to be returned as other types (bools or +# ints for example). The environment.py helpers should not be used for values +# that are supposed to be strings. + + +def _get_sanitizer(): + return os.getenv('SANITIZER', constants.DEFAULT_SANITIZER).lower() + + +def _get_architecture(): + return os.getenv('ARCHITECTURE', constants.DEFAULT_ARCHITECTURE).lower() + + +def _is_dry_run(): + """Returns True if configured to do a dry run.""" + return environment.get_bool('DRY_RUN', False) + + +def _get_language(): + """Returns the project language.""" + # Get language from environment. We took this approach because the convenience + # given to OSS-Fuzz users by not making them specify the language again (and + # getting it from the project.yaml) is outweighed by the complexity in + # implementing this. A lot of the complexity comes from our unittests not + # setting a proper projet at this point. + return os.getenv('LANGUAGE', constants.DEFAULT_LANGUAGE) + + +def _get_extra_environment_variables(): + """Gets extra environment variables specified by the user with + CFL_EXTRA_$NAME=$VALUE.""" + return [key for key in os.environ if key.startswith('CFL_EXTRA_')] + + +# pylint: disable=too-many-instance-attributes + + +class ConfigError(Exception): + """Error for invalid configuration.""" + + +class BaseConfig: + """Object containing constant configuration for CIFuzz.""" + + class Platform(enum.Enum): + """Enum representing the different platforms CIFuzz runs on.""" + EXTERNAL_GITHUB = 0 # Non-OSS-Fuzz on GitHub actions. + INTERNAL_GITHUB = 1 # OSS-Fuzz on GitHub actions. + INTERNAL_GENERIC_CI = 2 # OSS-Fuzz on any CI. + EXTERNAL_GENERIC_CI = 3 # Non-OSS-Fuzz on any CI. + + @property + def is_github(self): + """Returns True if running on GitHub.""" + return self.cfl_platform == 'github' + + def __init__(self): + # Need to set these before calling self.platform. + self.oss_fuzz_project_name = os.getenv('OSS_FUZZ_PROJECT_NAME') + self.cfl_platform = os.getenv('CFL_PLATFORM') + logging.debug('Is github: %s.', self.is_github) + + self.platform_conf = _get_platform_config(self.cfl_platform) + self.base_commit = self.platform_conf.base_commit + self.base_ref = self.platform_conf.base_ref + self.pr_ref = self.platform_conf.pr_ref + self.workspace = self.platform_conf.workspace + self.project_src_path = self.platform_conf.project_src_path + self.actor = self.platform_conf.actor + self.token = self.platform_conf.token + self.project_repo_owner = self.platform_conf.project_repo_owner + self.project_repo_name = self.platform_conf.project_repo_name + self.filestore = self.platform_conf.filestore + + # This determines if builds are done using docker in docker + # rather than the normal method which is sibling containers. + self.docker_in_docker = self.platform_conf.docker_in_docker + + self.dry_run = _is_dry_run() # Check if failures should not be reported. + self.sanitizer = _get_sanitizer() + self.architecture = _get_architecture() + self.language = _get_language() + self.low_disk_space = environment.get_bool('LOW_DISK_SPACE', False) + + self.git_store_repo = os.environ.get('GIT_STORE_REPO') + self.git_store_branch = os.environ.get('GIT_STORE_BRANCH') + self.git_store_branch_coverage = os.environ.get('GIT_STORE_BRANCH_COVERAGE', + self.git_store_branch) + self.cloud_bucket = os.environ.get('CLOUD_BUCKET') + self.no_clusterfuzz_deployment = environment.get_bool( + 'NO_CLUSTERFUZZ_DEPLOYMENT', False) + self.build_integration_path = ( + constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH) + + self.parallel_fuzzing = environment.get_bool('PARALLEL_FUZZING', False) + self.extra_environment_variables = _get_extra_environment_variables() + self.output_sarif = environment.get_bool('OUTPUT_SARIF', False) + + # TODO(metzman): Fix tests to create valid configurations and get rid of + # CIFUZZ_TEST here and in presubmit.py. + if not os.getenv('CIFUZZ_TEST') and not self.validate(): + raise ConfigError('Invalid Configuration.') + + def validate(self): + """Returns False if the configuration is invalid.""" + # Do validation here so that unittests don't need to make a fully-valid + # config. + # pylint: disable=too-many-return-statements + if not self.workspace: + logging.error('Must set WORKSPACE.') + return False + + if self.sanitizer not in SANITIZERS: + logging.error('Invalid SANITIZER: %s. Must be one of: %s.', + self.sanitizer, SANITIZERS) + return False + + if self.architecture not in constants.ARCHITECTURES: + logging.error('Invalid ARCHITECTURE: %s. Must be one of: %s.', + self.architecture, constants.ARCHITECTURES) + return False + + if self.architecture == 'i386' and self.sanitizer != 'address': + logging.error( + 'ARCHITECTURE=i386 can be used with SANITIZER=address only.') + return False + + if self.language not in constants.LANGUAGES: + logging.error('Invalid LANGUAGE: %s. Must be one of: %s.', self.language, + constants.LANGUAGES) + return False + + if not self.project_repo_name: + logging.error('Must set REPOSITORY.') + return False + + return True + + @property + def is_internal(self): + """Returns True if this is an OSS-Fuzz project.""" + return bool(self.oss_fuzz_project_name) + + @property + def platform(self): + """Returns the platform CIFuzz is runnning on.""" + if not self.is_internal: + if not self.is_github: + return self.Platform.EXTERNAL_GENERIC_CI + return self.Platform.EXTERNAL_GITHUB + + if self.is_github: + return self.Platform.INTERNAL_GITHUB + return self.Platform.INTERNAL_GENERIC_CI + + @property + def is_coverage(self): + """Returns True if this CIFuzz run (building fuzzers and running them) for + generating a coverage report.""" + return self.sanitizer == 'coverage' + + +def _get_platform_config(cfl_platform): + """Returns the CI environment object for |cfl_platform|.""" + module_name = f'platform_config.{cfl_platform}' + try: + cls = importlib.import_module(module_name).PlatformConfig + except ImportError: + cls = platform_config.BasePlatformConfig + return cls() + + +class RunFuzzersConfig(BaseConfig): + """Class containing constant configuration for running fuzzers in CIFuzz.""" + + MODES = ['batch', 'code-change', 'coverage', 'prune'] + + def __init__(self): + super().__init__() + # TODO(metzman): Pick a better default for pruning. + self.fuzz_seconds = int(os.environ.get('FUZZ_SECONDS', 600)) + self.mode = os.environ.get('MODE', 'code-change').lower() + if self.is_coverage: + self.mode = 'coverage' + + self.report_unreproducible_crashes = environment.get_bool( + 'REPORT_UNREPRODUCIBLE_CRASHES', False) + + self.minimize_crashes = environment.get_bool('MINIMIZE_CRASHES', False) + if self.mode == 'batch': + logging.warning( + 'Minimizing crashes reduces fuzzing time in batch fuzzing.') + self.report_timeouts = environment.get_bool('REPORT_TIMEOUTS', False) + self.report_ooms = environment.get_bool('REPORT_OOMS', True) + self.upload_all_crashes = environment.get_bool('UPLOAD_ALL_CRASHES', False) + + # TODO(metzman): Fix tests to create valid configurations and get rid of + # CIFUZZ_TEST here and in presubmit.py. + if not os.getenv('CIFUZZ_TEST') and not self._run_config_validate(): + raise ConfigError('Invalid Run Configuration.') + + def _run_config_validate(self): + """Do extra validation on RunFuzzersConfig.__init__(). Do not name this + validate or else it will be called when using the parent's __init__ and will + fail. Returns True if valid.""" + if self.mode not in self.MODES: + logging.error('Invalid MODE: %s. Must be one of %s.', self.mode, + self.MODES) + return False + + return True + + +class BuildFuzzersConfig(BaseConfig): + """Class containing constant configuration for building fuzzers in CIFuzz.""" + + def __init__(self): + """Get the configuration from CIFuzz from the environment. These variables + are set by GitHub or the user.""" + super().__init__() + self.git_sha = self.platform_conf.git_sha + self.git_url = self.platform_conf.git_url + + self.allowed_broken_targets_percentage = os.getenv( + 'ALLOWED_BROKEN_TARGETS_PERCENTAGE') + self.bad_build_check = environment.get_bool('BAD_BUILD_CHECK', True) + + self.keep_unaffected_fuzz_targets = environment.get_bool( + 'KEEP_UNAFFECTED_FUZZ_TARGETS') + + self.upload_build = environment.get_bool('UPLOAD_BUILD', False) + if not self.keep_unaffected_fuzz_targets: + has_base_for_diff = (self.base_ref or self.base_commit) + if not has_base_for_diff: + logging.info( + 'Keeping all fuzzers because there is nothing to diff against.') + self.keep_unaffected_fuzz_targets = True + elif self.upload_build: + logging.info('Keeping all fuzzers because we are uploading build.') + self.keep_unaffected_fuzz_targets = True + elif self.sanitizer == 'coverage': + logging.info('Keeping all fuzzers because we are doing coverage.') + self.keep_unaffected_fuzz_targets = True + + if self.sanitizer == 'coverage': + self.bad_build_check = False diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/continuous_integration_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/continuous_integration_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5c4fa32895773b9a742509bc46d5f5df008e5a98 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/continuous_integration_test.py @@ -0,0 +1,88 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for continuous_integration_module.""" +import os +import sys +import unittest +from unittest import mock + +import continuous_integration +import docker + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import repo_manager + +# pylint: disable=no-self-use + + +class FixGitRepoForDiffTest(unittest.TestCase): + """Tests for fix_git_repo_for_diff.""" + + @mock.patch('utils.execute') + def test_fix_git_repo_for_diff(self, mock_execute): + """Tests that fix_git_repo_for_diff works as intended.""" + repo_dir = '/dir' + repo_manager_obj = repo_manager.RepoManager(repo_dir) + continuous_integration.fix_git_repo_for_diff(repo_manager_obj) + expected_command = [ + 'git', 'symbolic-ref', 'refs/remotes/origin/HEAD', + 'refs/remotes/origin/master' + ] + + mock_execute.assert_called_with(expected_command, location=repo_dir) + + +class GetBuildCommand(unittest.TestCase): + """Tests for get_build_command.""" + + def test_build_command(self): + """Tests that get_build_command works as intended.""" + self.assertEqual(continuous_integration.get_build_command(), 'compile') + + +class GetReplaceRepoAndBuildCommand(unittest.TestCase): + """Tests for get_replace_repo_and_build_command.""" + + def test_get_replace_repo_and_build_command(self): + """Tests that get_replace_repo_and_build_command works as intended.""" + host_repo_path = '/path/on/host/to/repo' + image_repo_path = '/src/repo' + command = continuous_integration.get_replace_repo_and_build_command( + host_repo_path, image_repo_path) + expected_command = ('cd / && rm -rf /src/repo/* && ' + 'cp -r /path/on/host/to/repo /src && cd - ' + '&& compile') + self.assertEqual(command, expected_command) + + +class BuildExternalProjetDockerImage(unittest.TestCase): + """Tests for build_external_project_docker_image.""" + + @mock.patch('helper.docker_build') + def test_build_external_project_docker_image(self, mock_docker_build): + """Tests that build_external_project_docker_image works as intended.""" + build_integration_path = '.clusterfuzzlite' + project_src = '/path/to/project/src' + continuous_integration.build_external_project_docker_image( + project_src, build_integration_path) + + mock_docker_build.assert_called_with([ + '-t', docker.EXTERNAL_PROJECT_IMAGE, '-f', + os.path.join('.clusterfuzzlite', 'Dockerfile'), project_src + ]) + + +# TODO(metzman): Write tests for the rest of continuous_integration.py. diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/environment.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/environment.py new file mode 100644 index 0000000000000000000000000000000000000000..e99a67910948bcd4ef49e6f7684a976f49f68827 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/environment.py @@ -0,0 +1,51 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for dealing with env vars.""" + +import ast +import os + + +def _eval_value(value_string): + """Returns evaluated value.""" + try: + return ast.literal_eval(value_string) + except: # pylint: disable=bare-except + # String fallback. + return value_string + + +def get(env_var, default_value=None): + """Returns an environment variable value.""" + value_string = os.getenv(env_var) + if value_string is None: + return default_value + + return _eval_value(value_string) + + +def get_bool(env_var, default_value=None): + """Returns a boolean environment variable value. This is needed because a lot + of CIFuzz users specified 'false' for dry-run. So we need to special case + this.""" + value = get(env_var, default_value) + if not isinstance(value, str): + return bool(value) + + lower_value = value.lower() + allowed_values = {'true', 'false'} + if lower_value not in allowed_values: + raise Exception(f'Bool env var {env_var} value {value} is invalid. ' + f'Must be one of {allowed_values}.') + return lower_value == 'true' diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/example_cifuzz.yml b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/example_cifuzz.yml new file mode 100644 index 0000000000000000000000000000000000000000..e19f2c7cd2397a1e49c453ade22a7a87e14c91e0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/example_cifuzz.yml @@ -0,0 +1,33 @@ +name: CIFuzz +on: [pull_request] +permissions: {} +jobs: + Fuzzing: + runs-on: ubuntu-latest + permissions: + security-events: write + steps: + - name: Build Fuzzers + id: build + uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master + with: + oss-fuzz-project-name: 'example' + - name: Run Fuzzers + uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master + with: + oss-fuzz-project-name: 'example' + fuzz-seconds: 600 + output-sarif: true + - name: Upload Crash + uses: actions/upload-artifact@v3 + if: failure() && steps.build.outcome == 'success' + with: + name: artifacts + path: ./out/artifacts + - name: Upload Sarif + if: always() && steps.build.outcome == 'success' + uses: github/codeql-action/upload-sarif@v2 + with: + # Path to SARIF file relative to the root of the repository + sarif_file: cifuzz-sarif/results.sarif + checkout_path: cifuzz-sarif diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/external-actions/build_fuzzers/action.yml b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/external-actions/build_fuzzers/action.yml new file mode 100644 index 0000000000000000000000000000000000000000..e5b782e46d4d3116b628143666db8f3f6406ace0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/external-actions/build_fuzzers/action.yml @@ -0,0 +1,75 @@ +# action.yml +name: 'build-fuzzers' +description: "Builds an OSS-Fuzz project's fuzzers." +inputs: + language: + description: 'Programming language project is written in.' + required: false + default: 'c++' + dry-run: + description: 'If set, run the action without actually reporting a failure.' + default: false + allowed-broken-targets-percentage: + description: 'The percentage of broken targets allowed in bad_build_check.' + required: false + sanitizer: + description: 'The sanitizer to build the fuzzers with.' + default: 'address' + project-src-path: + description: "The path to the project's source code checkout." + required: false + bad-build-check: + description: "Whether or not OSS-Fuzz's check for bad builds should be done." + required: false + default: true + keep-unaffected-fuzz-targets: + description: "Whether to keep unaffected fuzzers or delete them." + required: false + default: false + storage-repo: + description: | + The git repo to use for storing certain artifacts from fuzzing. + required: false + storage-repo-branch: + description: | + The branch of the git repo to use for storing certain artifacts from + fuzzing. + required: false + storage-repo-branch-coverage: + description: | + The branch of the git repo to use for storing coverage reports. + required: false + upload-build: + description: | + If set, will upload the build. + default: false + github-token: + description: | + Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET + You should use "secrets.GITHUB_TOKEN" in your workflow file, do not + hardcode the token. + TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361): + Document locking this down. + required: false + output-sarif: + description: "Whether to output fuzzing results to SARIF." + required: false + default: false +runs: + using: 'docker' + image: '../../../build_fuzzers.Dockerfile' + env: + OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }} + LANGUAGE: ${{ inputs.language }} + DRY_RUN: ${{ inputs.dry-run}} + ALLOWED_BROKEN_TARGETS_PERCENTAGE: ${{ inputs.allowed-broken-targets-percentage}} + SANITIZER: ${{ inputs.sanitizer }} + PROJECT_SRC_PATH: ${{ inputs.project-src-path }} + GITHUB_TOKEN: ${{ inputs.github-token }} + LOW_DISK_SPACE: 'True' + BAD_BUILD_CHECK: ${{ inputs.bad-build-check }} + UPLOAD_BUILD: ${{ inputs.upload-build }} + CIFUZZ_DEBUG: 'True' + CFL_PLATFORM: 'github' + KEEP_UNAFFECTED_FUZZ_TARGETS: ${{ inputs.keep-unaffected-fuzz-targets }} + OUTPUT_SARIF: ${{ inputs.output-sarif }} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/external-actions/run_fuzzers/action.yml b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/external-actions/run_fuzzers/action.yml new file mode 100644 index 0000000000000000000000000000000000000000..9ca11e448a0d86d74112eac5133783b57dbe55c0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/external-actions/run_fuzzers/action.yml @@ -0,0 +1,97 @@ +# action.yml +name: 'run-fuzzers' +description: 'Runs fuzz target binaries for a specified length of time.' +inputs: + language: + description: 'Programming language project is written in.' + required: false + default: 'c++' + fuzz-seconds: + description: 'The total time allotted for fuzzing in seconds.' + required: true + default: 600 + dry-run: + description: 'If set, run the action without actually reporting a failure.' + default: false + sanitizer: + description: 'The sanitizer to run the fuzzers with.' + default: 'address' + mode: + description: | + The mode to run the fuzzers with ("code-change", "batch", "coverage", or "prune"). + "code-change" is for fuzzing a pull request or commit. + "batch" is for non-interactive fuzzing of an entire project. + "coverage" is for coverage generation. + "prune" is for corpus pruning. + required: false + default: 'code-change' + github-token: + description: | + Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET + You should use "secrets.GITHUB_TOKEN" in your workflow file, do not + hardcode the token. + TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361): + Document locking this down. + required: true + storage-repo: + description: | + The git repo to use for storing certain artifacts from fuzzing. + required: false + storage-repo-branch: + description: | + The branch of the git repo to use for storing certain artifacts from + fuzzing. + default: main + required: false + storage-repo-branch-coverage: + description: | + The branch of the git repo to use for storing coverage reports. + default: gh-pages + required: false + report-unreproducible-crashes: + description: 'If True, then unreproducible crashes will be reported.' + required: false + default: false + minimize-crashes: + description: 'If True, reportable crashes will be minimized.' + required: false + default: False + parallel-fuzzing: + description: "Whether to use all available cores for fuzzing." + required: false + default: false + output-sarif: + description: "Whether to output fuzzing results to SARIF." + required: false + default: false + report-timeouts: + description: "Whether to report fails due to timeout." + required: false + default: true + report-ooms: + description: "Whether to report fails due to OOM." + required: false + default: true +runs: + using: 'docker' + image: '../../../run_fuzzers.Dockerfile' + env: + OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }} + LANGUAGE: ${{ inputs.language }} + FUZZ_SECONDS: ${{ inputs.fuzz-seconds }} + DRY_RUN: ${{ inputs.dry-run}} + SANITIZER: ${{ inputs.sanitizer }} + MODE: ${{ inputs.mode }} + GITHUB_TOKEN: ${{ inputs.github-token }} + LOW_DISK_SPACE: 'True' + GIT_STORE_REPO: ${{ inputs.storage-repo }} + GIT_STORE_BRANCH: ${{ inputs.storage-repo-branch }} + GIT_STORE_BRANCH_COVERAGE: ${{ inputs.storage-repo-branch-coverage }} + REPORT_UNREPRODUCIBLE_CRASHES: ${{ inputs.report-unreproducible-crashes }} + MINIMIZE_CRASHES: ${{ inputs.minimize-crashes }} + CIFUZZ_DEBUG: 'True' + CFL_PLATFORM: 'github' + PARALLEL_FUZZING: ${{ inputs.parallel-fuzzing }} + OUTPUT_SARIF: ${{ inputs.output-sarif }} + REPORT_TIMEOUTS: ${{ inputs.report-timeouts }} + REPORT_OOMS: ${{ inputs.report-ooms}} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bce4271ce61303b8567beee2cc6343646be2aeda --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/__init__.py @@ -0,0 +1,54 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for a generic filestore.""" + + +class FilestoreError(Exception): + """Error using the filestore.""" + + +# pylint: disable=unused-argument,no-self-use +class BaseFilestore: + """Base class for a filestore.""" + + def __init__(self, config): + self.config = config + + def upload_crashes(self, name, directory): + """Uploads the crashes at |directory| to |name|.""" + raise NotImplementedError('Child class must implement method.') + + def upload_corpus(self, name, directory, replace=False): + """Uploads the corpus at |directory| to |name|.""" + raise NotImplementedError('Child class must implement method.') + + def upload_build(self, name, directory): + """Uploads the build at |directory| to |name|.""" + raise NotImplementedError('Child class must implement method.') + + def upload_coverage(self, name, directory): + """Uploads the coverage report at |directory| to |name|.""" + raise NotImplementedError('Child class must implement method.') + + def download_corpus(self, name, dst_directory): + """Downloads the corpus located at |name| to |dst_directory|.""" + raise NotImplementedError('Child class must implement method.') + + def download_build(self, name, dst_directory): + """Downloads the build with |name| to |dst_directory|.""" + raise NotImplementedError('Child class must implement method.') + + def download_coverage(self, name, dst_directory): + """Downloads the latest project coverage report.""" + raise NotImplementedError('Child class must implement method.') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/filesystem/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/filesystem/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7ddea132661e3acae27eb5abff61f1432eb65ba5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/filesystem/__init__.py @@ -0,0 +1,107 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Filestore implementation using a filesystem directory.""" +import logging +import os +import shutil +import subprocess +import sys + +from distutils import dir_util + +# pylint: disable=wrong-import-position,import-error +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, os.pardir)) +import filestore + + +def recursive_list_dir(directory): + """Returns list of all files in |directory|, including those in + subdirectories.""" + files = [] + for root, _, filenames in os.walk(directory): + for filename in filenames: + files.append(os.path.join(root, filename)) + return files + + +class FilesystemFilestore(filestore.BaseFilestore): + """Filesystem implementation using a filesystem directory.""" + BUILD_DIR = 'build' + CRASHES_DIR = 'crashes' + CORPUS_DIR = 'corpus' + COVERAGE_DIR = 'coverage' + + def __init__(self, config): + super().__init__(config) + self._filestore_root_dir = self.config.platform_conf.filestore_root_dir + + def _get_filestore_path(self, name, prefix_dir): + """Returns the filesystem path in the filestore for |name| and + |prefix_dir|.""" + return os.path.join(self._filestore_root_dir, prefix_dir, name) + + def _upload_directory(self, name, directory, prefix, delete=False): + filestore_path = self._get_filestore_path(name, prefix) + if os.path.exists(filestore_path): + initial_files = set(recursive_list_dir(filestore_path)) + else: + initial_files = set() + + # Make directory and any parents. + os.makedirs(filestore_path, exist_ok=True) + copied_files = set(dir_util.copy_tree(directory, filestore_path)) + if not delete: + return True + + files_to_delete = initial_files - copied_files + for file_path in files_to_delete: + os.remove(file_path) + return True + + def _download_directory(self, name, dst_directory, prefix): + filestore_path = self._get_filestore_path(name, prefix) + return dir_util.copy_tree(filestore_path, dst_directory) + + def upload_crashes(self, name, directory): + """Uploads the crashes at |directory| to |name|.""" + return self._upload_directory(name, directory, self.CRASHES_DIR) + + def upload_corpus(self, name, directory, replace=False): + """Uploads the crashes at |directory| to |name|.""" + return self._upload_directory(name, + directory, + self.CORPUS_DIR, + delete=replace) + + def upload_build(self, name, directory): + """Uploads the build located at |directory| to |name|.""" + return self._upload_directory(name, directory, self.BUILD_DIR) + + def upload_coverage(self, name, directory): + """Uploads the coverage report at |directory| to |name|.""" + return self._upload_directory(name, directory, self.COVERAGE_DIR) + + def download_corpus(self, name, dst_directory): + """Downloads the corpus located at |name| to |dst_directory|.""" + return self._download_directory(name, dst_directory, self.CORPUS_DIR) + + def download_build(self, name, dst_directory): + """Downloads the build with |name| to |dst_directory|.""" + return self._download_directory(name, dst_directory, self.BUILD_DIR) + + def download_coverage(self, name, dst_directory): + """Downloads the latest project coverage report.""" + return self._download_directory(name, dst_directory, self.COVERAGE_DIR) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/git/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/git/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..636ed693f4357f160ad14e3ccadd6ea07eddb38b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/git/__init__.py @@ -0,0 +1,158 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for a git based filestore.""" + +import logging +import os +import shutil +import subprocess +import sys +import tempfile + +import filestore + +# pylint: disable=wrong-import-position +INFRA_DIR = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname( + os.path.abspath(__file__))))) +sys.path.append(INFRA_DIR) + +import retry + +_PUSH_RETRIES = 3 +_PUSH_BACKOFF = 1 +_GIT_EMAIL = 'cifuzz@clusterfuzz.com' +_GIT_NAME = 'CIFuzz' +_CORPUS_DIR = 'corpus' +_COVERAGE_DIR = 'coverage' + + +def git_runner(repo_path): + """Returns a gits runner for the repo_path.""" + + def func(*args): + return subprocess.check_call(('git', '-C', repo_path) + args) + + return func + + +# pylint: disable=unused-argument,no-self-use +class GitFilestore(filestore.BaseFilestore): + """Generic git filestore. This still relies on another filestore provided by + the CI for larger artifacts or artifacts which make sense to be included as + the result of a workflow run.""" + + def __init__(self, config, ci_filestore): + super().__init__(config) + self.repo_path = tempfile.mkdtemp() + self._git = git_runner(self.repo_path) + self._clone(self.config.git_store_repo) + + self._ci_filestore = ci_filestore + + def __del__(self): + shutil.rmtree(self.repo_path) + + def _clone(self, repo_url): + """Clones repo URL.""" + self._git('clone', repo_url, '.') + self._git('config', '--local', 'user.email', _GIT_EMAIL) + self._git('config', '--local', 'user.name', _GIT_NAME) + + def _reset_git(self, branch): + """Resets the git repo.""" + self._git('fetch', 'origin') + try: + self._git('checkout', '-B', branch, 'origin/' + branch) + self._git('reset', '--hard', 'HEAD') + except subprocess.CalledProcessError: + self._git('checkout', '--orphan', branch) + + self._git('clean', '-fxd') + + # pylint: disable=too-many-arguments + @retry.wrap(_PUSH_RETRIES, _PUSH_BACKOFF) + def _upload_to_git(self, + message, + branch, + upload_path, + local_path, + replace=False): + """Uploads a directory to git. If `replace` is True, then existing contents + in the upload_path is deleted.""" + self._reset_git(branch) + + full_repo_path = os.path.join(self.repo_path, upload_path) + if replace and os.path.exists(full_repo_path): + shutil.rmtree(full_repo_path) + + shutil.copytree(local_path, full_repo_path, dirs_exist_ok=True) + self._git('add', '.') + try: + self._git('commit', '-m', message) + except subprocess.CalledProcessError: + logging.debug('No changes, skipping git push.') + return + + self._git('push', 'origin', branch) + + def upload_crashes(self, name, directory): + """Uploads the crashes at |directory| to |name|.""" + return self._ci_filestore.upload_crashes(name, directory) + + def upload_corpus(self, name, directory, replace=False): + """Uploads the corpus at |directory| to |name|.""" + self._upload_to_git('Corpus upload', + self.config.git_store_branch, + os.path.join(_CORPUS_DIR, name), + directory, + replace=replace) + + def upload_build(self, name, directory): + """Uploads the build at |directory| to |name|.""" + return self._ci_filestore.upload_build(name, directory) + + def upload_coverage(self, name, directory): + """Uploads the coverage report at |directory| to |name|.""" + self._upload_to_git('Coverage upload', + self.config.git_store_branch_coverage, + os.path.join(_COVERAGE_DIR, name), + directory, + replace=True) + + def download_corpus(self, name, dst_directory): + """Downloads the corpus located at |name| to |dst_directory|.""" + self._reset_git(self.config.git_store_branch) + path = os.path.join(self.repo_path, _CORPUS_DIR, name) + if not os.path.exists(path): + logging.debug('Corpus does not exist at %s.', path) + return False + + shutil.copytree(path, dst_directory, dirs_exist_ok=True) + return True + + def download_build(self, name, dst_directory): + """Downloads the build with |name| to |dst_directory|.""" + return self._ci_filestore.download_build(name, dst_directory) + + def download_coverage(self, name, dst_directory): + """Downloads the latest project coverage report.""" + self._reset_git(self.config.git_store_branch_coverage) + path = os.path.join(self.repo_path, _COVERAGE_DIR, name) + if not os.path.exists(path): + logging.debug('Coverage does not exist at %s.', path) + return False + + shutil.copytree(path, dst_directory, dirs_exist_ok=True) + return True diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/git/git_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/git/git_test.py new file mode 100644 index 0000000000000000000000000000000000000000..56be23bacd0fdf75c5f9bf7fb0fe475a2d48b6b6 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/git/git_test.py @@ -0,0 +1,122 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for git.""" +import filecmp +import os +import tempfile +import subprocess +import sys +import unittest +from unittest import mock + +# pylint: disable=wrong-import-position +INFRA_DIR = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname( + os.path.abspath(__file__))))) +sys.path.append(INFRA_DIR) + +from filestore import git +import test_helpers + +# pylint: disable=protected-access,no-self-use + + +class GitFilestoreTest(unittest.TestCase): + """Tests for GitFilestore.""" + + def setUp(self): + self.git_dir = tempfile.TemporaryDirectory() + self.addCleanup(self.git_dir.cleanup) + + self.local_dir = tempfile.TemporaryDirectory() + self.addCleanup(self.local_dir.cleanup) + + self.download_dir = tempfile.TemporaryDirectory() + self.addCleanup(self.download_dir.cleanup) + + with open(os.path.join(self.local_dir.name, 'a'), 'w') as handle: + handle.write('') + + os.makedirs(os.path.join(self.local_dir.name, 'b')) + + with open(os.path.join(self.local_dir.name, 'b', 'c'), 'w') as handle: + handle.write('') + + self.git_repo = git.git_runner(self.git_dir.name) + self.git_repo('init', '--bare') + + self.config = test_helpers.create_run_config( + git_store_repo='file://' + self.git_dir.name, + git_store_branch='main', + git_store_branch_coverage='cov-branch') + + self.mock_ci_filestore = mock.MagicMock() + self.git_store = git.GitFilestore(self.config, self.mock_ci_filestore) + + def assert_dirs_same(self, first, second): + """Asserts two dirs are the same.""" + dcmp = filecmp.dircmp(first, second) + if dcmp.diff_files or dcmp.left_only or dcmp.right_only: + return False + + return all( + self.assert_dirs_same(os.path.join(first, subdir), + os.path.join(second, subdir)) + for subdir in dcmp.common_dirs) + + def get_repo_filelist(self, branch): + """Get files in repo.""" + return subprocess.check_output([ + 'git', '-C', self.git_dir.name, 'ls-tree', '-r', '--name-only', branch + ]).decode().splitlines() + + def test_upload_download_corpus(self): + """Tests uploading and downloading corpus.""" + self.git_store.upload_corpus('target', self.local_dir.name) + self.git_store.download_corpus('target', self.download_dir.name) + self.assert_dirs_same(self.local_dir.name, self.download_dir.name) + + self.assertCountEqual([ + 'corpus/target/a', + 'corpus/target/b/c', + ], self.get_repo_filelist('main')) + + def test_upload_download_coverage(self): + """Tests uploading and downloading corpus.""" + self.git_store.upload_coverage('latest', self.local_dir.name) + self.git_store.download_coverage('latest', self.download_dir.name) + self.assert_dirs_same(self.local_dir.name, self.download_dir.name) + + self.assertCountEqual([ + 'coverage/latest/a', + 'coverage/latest/b/c', + ], self.get_repo_filelist('cov-branch')) + + def test_upload_crashes(self): + """Tests uploading crashes.""" + self.git_store.upload_crashes('current', self.local_dir.name) + self.mock_ci_filestore.upload_crashes.assert_called_with( + 'current', self.local_dir.name) + + def test_upload_build(self): + """Tests uploading build.""" + self.git_store.upload_build('sanitizer', self.local_dir.name) + self.mock_ci_filestore.upload_build.assert_called_with( + 'sanitizer', self.local_dir.name) + + def test_download_build(self): + """Tests downloading build.""" + self.git_store.download_build('sanitizer', self.download_dir.name) + self.mock_ci_filestore.download_build.assert_called_with( + 'sanitizer', self.download_dir.name) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4750a39b9709d49b9f207321480147b1e1d75476 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/__init__.py @@ -0,0 +1,179 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Implementation of a filestore using Github actions artifacts.""" +import logging +import os +import shutil +import sys +import tarfile +import tempfile + +# pylint: disable=wrong-import-position,import-error +INFRA_DIR = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir, + os.path.pardir)) +sys.path.append(INFRA_DIR) +OSS_FUZZ_ROOT_DIR = os.path.dirname(INFRA_DIR) + +import utils +import http_utils +import filestore +from filestore.github_actions import github_api + +UPLOAD_JS = os.path.join(os.path.dirname(__file__), 'upload.js') + + +def tar_directory(directory, archive_path): + """Tars a |directory| and stores archive at |archive_path|. |archive_path| + must end in .tar""" + assert archive_path.endswith('.tar') + # Do this because make_archive will append the extension to archive_path. + archive_path = os.path.splitext(archive_path)[0] + + root_directory = os.path.abspath(directory) + shutil.make_archive(archive_path, + 'tar', + root_dir=root_directory, + base_dir='./') + + +class GithubActionsFilestore(filestore.BaseFilestore): + """Implementation of BaseFilestore using Github actions artifacts. Relies on + github_actions_toolkit for using the GitHub actions API and the github_api + module for using GitHub's standard API. We need to use both because the GitHub + actions API is the only way to upload an artifact but it does not support + downloading artifacts from other runs. The standard GitHub API does support + this however.""" + + ARTIFACT_PREFIX = 'cifuzz-' + BUILD_PREFIX = 'build-' + CRASHES_PREFIX = 'crashes-' + CORPUS_PREFIX = 'corpus-' + COVERAGE_PREFIX = 'coverage-' + + def __init__(self, config): + super().__init__(config) + self.github_api_http_headers = github_api.get_http_auth_headers(config) + + def _get_artifact_name(self, name): + """Returns |name| prefixed with |self.ARITFACT_PREFIX| if it isn't already + prefixed. Otherwise returns |name|.""" + if name.startswith(self.ARTIFACT_PREFIX): + return name + return f'{self.ARTIFACT_PREFIX}{name}' + + def _upload_directory(self, name, directory): # pylint: disable=no-self-use + """Uploads |directory| as artifact with |name|.""" + name = self._get_artifact_name(name) + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, name + '.tar') + tar_directory(directory, archive_path) + _raw_upload_directory(name, temp_dir) + + def upload_crashes(self, name, directory): + """Uploads the crashes at |directory| to |name|.""" + return _raw_upload_directory(self.CRASHES_PREFIX + name, directory) + + def upload_corpus(self, name, directory, replace=False): + """Uploads the corpus at |directory| to |name|.""" + # Not applicable as the the entire corpus is uploaded under a single + # artifact name. + del replace + return self._upload_directory(self.CORPUS_PREFIX + name, directory) + + def upload_build(self, name, directory): + """Uploads the build at |directory| to |name|.""" + return self._upload_directory(self.BUILD_PREFIX + name, directory) + + def upload_coverage(self, name, directory): + """Uploads the coverage report at |directory| to |name|.""" + return self._upload_directory(self.COVERAGE_PREFIX + name, directory) + + def download_corpus(self, name, dst_directory): # pylint: disable=unused-argument,no-self-use + """Downloads the corpus located at |name| to |dst_directory|.""" + return self._download_artifact(self.CORPUS_PREFIX + name, dst_directory) + + def _find_artifact(self, name): + """Finds an artifact using the GitHub API and returns it.""" + logging.debug('Listing artifacts.') + artifacts = self._list_artifacts() + artifact = github_api.find_artifact(name, artifacts) + logging.debug('Artifact: %s.', artifact) + return artifact + + def _download_artifact(self, name, dst_directory): + """Downloads artifact with |name| to |dst_directory|. Returns True on + success.""" + name = self._get_artifact_name(name) + + with tempfile.TemporaryDirectory() as temp_dir: + if not self._raw_download_artifact(name, temp_dir): + logging.warning('Could not download artifact: %s.', name) + return False + + artifact_tarfile_path = os.path.join(temp_dir, name + '.tar') + if not os.path.exists(artifact_tarfile_path): + logging.error('Artifact zip did not contain a tarfile.') + return False + + # TODO(jonathanmetzman): Replace this with archive.unpack from + # libClusterFuzz so we can avoid path traversal issues. + with tarfile.TarFile(artifact_tarfile_path) as artifact_tarfile: + artifact_tarfile.extractall(dst_directory) + return True + + def _raw_download_artifact(self, name, dst_directory): + """Downloads the artifact with |name| to |dst_directory|. Returns True on + success. Does not do any untarring or adding prefix to |name|.""" + artifact = self._find_artifact(name) + if not artifact: + logging.warning('Could not find artifact: %s.', name) + return False + download_url = artifact['archive_download_url'] + return http_utils.download_and_unpack_zip( + download_url, dst_directory, headers=self.github_api_http_headers) + + def _list_artifacts(self): + """Returns a list of artifacts.""" + return github_api.list_artifacts(self.config.project_repo_owner, + self.config.project_repo_name, + self.github_api_http_headers) + + def download_build(self, name, dst_directory): + """Downloads the build with name |name| to |dst_directory|.""" + return self._download_artifact(self.BUILD_PREFIX + name, dst_directory) + + def download_coverage(self, name, dst_directory): + """Downloads the latest project coverage report.""" + return self._download_artifact(self.COVERAGE_PREFIX + name, dst_directory) + + +def _upload_artifact_with_upload_js(name, artifact_paths, directory): + """Uploads the artifacts in |artifact_paths| that are located in |directory| + to |name|, using the upload.js script.""" + command = [UPLOAD_JS, name, directory] + artifact_paths + _, _, retcode = utils.execute(command, location=OSS_FUZZ_ROOT_DIR) + return retcode == 0 + + +def _raw_upload_directory(name, directory): + """Uploads the artifacts located in |directory| to |name|. Does not do any + tarring or adding prefixes to |name|.""" + # Get file paths. + artifact_paths = [] + for root, _, curr_file_paths in os.walk(directory): + for file_path in curr_file_paths: + artifact_paths.append(os.path.join(root, file_path)) + logging.debug('Artifact paths: %s.', artifact_paths) + return _upload_artifact_with_upload_js(name, artifact_paths, directory) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_actions_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_actions_test.py new file mode 100644 index 0000000000000000000000000000000000000000..90a0b070db61df690ad0781c2fdd63009d1af6a8 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_actions_test.py @@ -0,0 +1,283 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for github_actions.""" +import os +import shutil +import sys +import tarfile +import tempfile +import unittest +from unittest import mock + +from pyfakefs import fake_filesystem_unittest + +# pylint: disable=wrong-import-position +INFRA_DIR = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +sys.path.append(INFRA_DIR) + +from filestore import github_actions +import test_helpers + +# pylint: disable=protected-access,no-self-use + + +class GithubActionsFilestoreTest(fake_filesystem_unittest.TestCase): + """Tests for GithubActionsFilestore.""" + + @mock.patch('platform_config.github._get_event_data', return_value={}) + def setUp(self, _): # pylint: disable=arguments-differ + test_helpers.patch_environ(self) + self.token = 'example githubtoken' + self.owner = 'exampleowner' + self.repo = 'examplerepo' + os.environ['GITHUB_REPOSITORY'] = f'{self.owner}/{self.repo}' + os.environ['GITHUB_EVENT_PATH'] = '/fake' + os.environ['CFL_PLATFORM'] = 'github' + os.environ['GITHUB_WORKSPACE'] = '/workspace' + self.config = test_helpers.create_run_config(token=self.token) + self.local_dir = '/local-dir' + self.testcase = os.path.join(self.local_dir, 'testcase') + + def _get_expected_http_headers(self): + return { + 'Authorization': f'token {self.token}', + 'Accept': 'application/vnd.github.v3+json', + } + + @mock.patch('filestore.github_actions.github_api.list_artifacts') + def test_list_artifacts(self, mock_list_artifacts): + """Tests that _list_artifacts works as intended.""" + filestore = github_actions.GithubActionsFilestore(self.config) + filestore._list_artifacts() + mock_list_artifacts.assert_called_with(self.owner, self.repo, + self._get_expected_http_headers()) + + @mock.patch('logging.warning') + @mock.patch('filestore.github_actions.GithubActionsFilestore._list_artifacts', + return_value=None) + @mock.patch('filestore.github_actions.github_api.find_artifact', + return_value=None) + def test_download_build_no_artifact(self, _, __, mock_warning): + """Tests that download_build returns None and doesn't exception when + find_artifact can't find an artifact.""" + filestore = github_actions.GithubActionsFilestore(self.config) + name = 'name' + build_dir = 'build-dir' + self.assertFalse(filestore.download_build(name, build_dir)) + mock_warning.assert_called_with('Could not download artifact: %s.', + 'cifuzz-build-' + name) + + @mock.patch('logging.warning') + @mock.patch('filestore.github_actions.GithubActionsFilestore._list_artifacts', + return_value=None) + @mock.patch('filestore.github_actions.github_api.find_artifact', + return_value=None) + def test_download_corpus_no_artifact(self, _, __, mock_warning): + """Tests that download_corpus_build returns None and doesn't exception when + find_artifact can't find an artifact.""" + filestore = github_actions.GithubActionsFilestore(self.config) + name = 'name' + dst_dir = 'local-dir' + self.assertFalse(filestore.download_corpus(name, dst_dir)) + mock_warning.assert_called_with('Could not download artifact: %s.', + 'cifuzz-corpus-' + name) + + @mock.patch('filestore.github_actions.tar_directory') + @mock.patch('filestore.github_actions._upload_artifact_with_upload_js') + def test_upload_corpus(self, mock_upload_artifact, mock_tar_directory): + """Test uploading corpus.""" + self._create_local_dir() + + def mock_tar_directory_impl(_, archive_path): + self.fs.create_file(archive_path) + + mock_tar_directory.side_effect = mock_tar_directory_impl + + filestore = github_actions.GithubActionsFilestore(self.config) + filestore.upload_corpus('target', self.local_dir) + self.assert_upload(mock_upload_artifact, mock_tar_directory, + 'corpus-target') + + @mock.patch('filestore.github_actions._upload_artifact_with_upload_js') + def test_upload_crashes(self, mock_upload_artifact): + """Test uploading crashes.""" + self._create_local_dir() + + filestore = github_actions.GithubActionsFilestore(self.config) + filestore.upload_crashes('current', self.local_dir) + mock_upload_artifact.assert_has_calls( + [mock.call('crashes-current', ['/local-dir/testcase'], '/local-dir')]) + + @mock.patch('filestore.github_actions.tar_directory') + @mock.patch('filestore.github_actions._upload_artifact_with_upload_js') + def test_upload_build(self, mock_upload_artifact, mock_tar_directory): + """Test uploading build.""" + self._create_local_dir() + + def mock_tar_directory_impl(_, archive_path): + self.fs.create_file(archive_path) + + mock_tar_directory.side_effect = mock_tar_directory_impl + + filestore = github_actions.GithubActionsFilestore(self.config) + filestore.upload_build('sanitizer', self.local_dir) + self.assert_upload(mock_upload_artifact, mock_tar_directory, + 'build-sanitizer') + + @mock.patch('filestore.github_actions.tar_directory') + @mock.patch('filestore.github_actions._upload_artifact_with_upload_js') + def test_upload_coverage(self, mock_upload_artifact, mock_tar_directory): + """Test uploading coverage.""" + self._create_local_dir() + + def mock_tar_directory_impl(_, archive_path): + self.fs.create_file(archive_path) + + mock_tar_directory.side_effect = mock_tar_directory_impl + + filestore = github_actions.GithubActionsFilestore(self.config) + filestore.upload_coverage('latest', self.local_dir) + self.assert_upload(mock_upload_artifact, mock_tar_directory, + 'coverage-latest') + + def assert_upload(self, mock_upload_artifact, mock_tar_directory, + expected_artifact_name): + """Tests that upload_directory invokes tar_directory and + artifact_client.upload_artifact properly.""" + # Don't assert what second argument will be since it's a temporary + # directory. + self.assertEqual(mock_tar_directory.call_args_list[0][0][0], self.local_dir) + + # Don't assert what second and third arguments will be since they are + # temporary directories. + expected_artifact_name = 'cifuzz-' + expected_artifact_name + self.assertEqual(mock_upload_artifact.call_args_list[0][0][0], + expected_artifact_name) + + # Assert artifacts list contains one tarfile. + artifacts_list = mock_upload_artifact.call_args_list[0][0][1] + self.assertEqual(len(artifacts_list), 1) + self.assertEqual(os.path.basename(artifacts_list[0]), + expected_artifact_name + '.tar') + + def _create_local_dir(self): + """Sets up pyfakefs and creates a corpus directory containing + self.testcase.""" + self.setUpPyfakefs() + self.fs.create_file(self.testcase, contents='hi') + + @mock.patch('filestore.github_actions.GithubActionsFilestore._find_artifact') + @mock.patch('http_utils.download_and_unpack_zip') + def test_download_artifact(self, mock_download_and_unpack_zip, + mock_find_artifact): + """Tests that _download_artifact works as intended.""" + artifact_download_url = 'http://example.com/download' + artifact_listing = { + 'expired': False, + 'name': 'corpus', + 'archive_download_url': artifact_download_url + } + mock_find_artifact.return_value = artifact_listing + + self._create_local_dir() + with tempfile.TemporaryDirectory() as temp_dir: + # Create a tarball. + archive_path = os.path.join(temp_dir, 'cifuzz-corpus.tar') + github_actions.tar_directory(self.local_dir, archive_path) + + artifact_download_dst_dir = os.path.join(temp_dir, 'dst') + os.mkdir(artifact_download_dst_dir) + + def mock_download_and_unpack_zip_impl(url, download_artifact_temp_dir, + headers): + self.assertEqual(url, artifact_download_url) + self.assertEqual(headers, self._get_expected_http_headers()) + shutil.copy( + archive_path, + os.path.join(download_artifact_temp_dir, + os.path.basename(archive_path))) + return True + + mock_download_and_unpack_zip.side_effect = ( + mock_download_and_unpack_zip_impl) + filestore = github_actions.GithubActionsFilestore(self.config) + self.assertTrue( + filestore._download_artifact('corpus', artifact_download_dst_dir)) + mock_find_artifact.assert_called_with('cifuzz-corpus') + self.assertTrue( + os.path.exists( + os.path.join(artifact_download_dst_dir, + os.path.basename(self.testcase)))) + + @mock.patch('filestore.github_actions.github_api.list_artifacts') + def test_find_artifact(self, mock_list_artifacts): + """Tests that _find_artifact works as intended.""" + artifact_listing_1 = { + 'expired': False, + 'name': 'other', + 'archive_download_url': 'http://download1' + } + artifact_listing_2 = { + 'expired': False, + 'name': 'artifact', + 'archive_download_url': 'http://download2' + } + artifact_listing_3 = { + 'expired': True, + 'name': 'artifact', + 'archive_download_url': 'http://download3' + } + artifact_listing_4 = { + 'expired': False, + 'name': 'artifact', + 'archive_download_url': 'http://download4' + } + artifacts = [ + artifact_listing_1, artifact_listing_2, artifact_listing_3, + artifact_listing_4 + ] + mock_list_artifacts.return_value = artifacts + filestore = github_actions.GithubActionsFilestore(self.config) + # Test that find_artifact will return the most recent unexpired artifact + # with the correct name. + self.assertEqual(filestore._find_artifact('artifact'), artifact_listing_2) + mock_list_artifacts.assert_called_with(self.owner, self.repo, + self._get_expected_http_headers()) + + +class TarDirectoryTest(unittest.TestCase): + """Tests for tar_directory.""" + + def test_tar_directory(self): + """Tests that tar_directory writes the archive to the correct location and + archives properly.""" + with tempfile.TemporaryDirectory() as temp_dir: + archive_path = os.path.join(temp_dir, 'myarchive.tar') + archived_dir = os.path.join(temp_dir, 'toarchive') + os.mkdir(archived_dir) + archived_filename = 'file1' + archived_file_path = os.path.join(archived_dir, archived_filename) + with open(archived_file_path, 'w') as file_handle: + file_handle.write('hi') + github_actions.tar_directory(archived_dir, archive_path) + self.assertTrue(os.path.exists(archive_path)) + + # Now check it archives correctly. + unpacked_directory = os.path.join(temp_dir, 'unpacked') + with tarfile.TarFile(archive_path) as artifact_tarfile: + artifact_tarfile.extractall(unpacked_directory) + unpacked_archived_file_path = os.path.join(unpacked_directory, + archived_filename) + self.assertTrue(os.path.exists(unpacked_archived_file_path)) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api.py new file mode 100644 index 0000000000000000000000000000000000000000..35e92eafb2cf64159c372721598e2a084a573ad4 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api.py @@ -0,0 +1,109 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for dealing with the GitHub API. This is different from +github_actions_toolkit which only deals with the actions API. We need to use +both.""" +import logging +import os +import sys + +import requests + +import filestore + +# pylint: disable=wrong-import-position,import-error + +sys.path.append( + os.path.join(__file__, os.path.pardir, os.path.pardir, os.path.pardir, + os.path.pardir)) +import retry + +_MAX_ITEMS_PER_PAGE = 100 + +_GET_ATTEMPTS = 3 +_GET_BACKOFF = 1 + + +def get_http_auth_headers(config): + """Returns HTTP headers for authentication to the API.""" + authorization = f'token {config.token}' + return { + 'Authorization': authorization, + 'Accept': 'application/vnd.github.v3+json' + } + + +def _get_artifacts_list_api_url(repo_owner, repo_name): + """Returns the artifacts_api_url for |repo_name| owned by |repo_owner|.""" + github_api_url = os.getenv('GITHUB_API_URL', 'https://api.github.com') + return (f'{github_api_url}/repos/{repo_owner}/' + f'{repo_name}/actions/artifacts') + + +@retry.wrap(_GET_ATTEMPTS, _GET_BACKOFF) +def _do_get_request(*args, **kwargs): + """Wrapped version of requests.get that does retries.""" + return requests.get(*args, **kwargs) + + +def _get_items(url, headers): + """Generator that gets and yields items from a GitHub API endpoint (specified + by |URL|) sending |headers| with the get request.""" + # Github API response pages are 1-indexed. + page_counter = 1 + + # Set to infinity so we run loop at least once. + total_num_items = float('inf') + + item_num = 0 + while item_num < total_num_items: + params = {'per_page': _MAX_ITEMS_PER_PAGE, 'page': str(page_counter)} + response = _do_get_request(url, params=params, headers=headers) + response_json = response.json() + if not response.status_code == 200: + # Check that request was successful. + logging.error('Request to %s failed. Code: %d. Response: %s', + response.request.url, response.status_code, response_json) + raise filestore.FilestoreError('Github API request failed.') + + if total_num_items == float('inf'): + # Set proper total_num_items + total_num_items = response_json['total_count'] + + # Get the key for the items we are after. + keys = [key for key in response_json.keys() if key != 'total_count'] + assert len(keys) == 1, keys + items_key = keys[0] + + for item in response_json[items_key]: + yield item + item_num += 1 + + page_counter += 1 + + +def find_artifact(artifact_name, artifacts): + """Find the artifact with the name |artifact_name| in |artifacts|.""" + for artifact in artifacts: + # TODO(metzman): Handle multiple by making sure we download the latest. + if artifact['name'] == artifact_name and not artifact['expired']: + return artifact + return None + + +def list_artifacts(owner, repo, headers): + """Returns a generator of all the artifacts for |owner|/|repo|.""" + url = _get_artifacts_list_api_url(owner, repo) + logging.debug('Getting artifacts from: %s', url) + return _get_items(url, headers) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api_test.py new file mode 100644 index 0000000000000000000000000000000000000000..1d6f54e41c8dad942072dc859e42d9c1f8fd82ad --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api_test.py @@ -0,0 +1,41 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for github_api.""" +import os +import sys +import unittest + +# pylint: disable=wrong-import-position,import-error +sys.path.append( + os.path.abspath( + os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir, + os.path.pardir))) + +from filestore.github_actions import github_api +import test_helpers + + +class GetHttpAuthHeaders(unittest.TestCase): + """Tests for get_http_auth_headers.""" + + def test_get_http_auth_headers(self): + """Tests that get_http_auth_headers returns the correct result.""" + token = 'example githubtoken' + run_config = test_helpers.create_run_config(token=token) + expected_headers = { + 'Authorization': f'token {token}', + 'Accept': 'application/vnd.github.v3+json', + } + self.assertEqual(expected_headers, + github_api.get_http_auth_headers(run_config)) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/upload.js b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/upload.js new file mode 100644 index 0000000000000000000000000000000000000000..13a805891bf5565ce88e30120a12c0b078c65af5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/github_actions/upload.js @@ -0,0 +1,45 @@ +#!/usr/bin/env node +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Script for uploading an artifact. Returns 0 on success. +// Usage: upload.js ... + +const fs = require('fs'); +const { DefaultArtifactClient } = require('@actions/artifact'); + +const artifactClient = new DefaultArtifactClient(); +const artifactName = process.argv[2]; +const rootDirectory = process.argv[3] +const files = process.argv.slice(4); +const options = { + continueOnError: true +}; + +async function uploadArtifact() { + try { + const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options); + console.log(uploadResult); + if (uploadResult.failedItems.length > 0) { + return 1; + } + return 0; + } catch (error) { + console.error('Error uploading artifact:', error); + return 1; + } +} + +uploadArtifact().then(exitCode => { + process.exit(exitCode); +}); diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/gitlab/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/gitlab/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..43ee0ce8f17a4231bb46f41377d1277cb87a854b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/gitlab/__init__.py @@ -0,0 +1,133 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""GitLab filestore implementation.""" +import logging + +import json +import os +import shutil +import tempfile + +import filestore +import http_utils + +# pylint: disable=no-self-use,unused-argument + + +class GitlabFilestore(filestore.BaseFilestore): + """Implementation of BaseFilestore using GitLab. + Needs a cache to upload and download builds. + Needs a git repository for corpus and coverage. + """ + + BUILD_PREFIX = 'build-' + CORPUS_PREFIX = 'corpus-' + COVERAGE_PREFIX = 'coverage-' + CRASHES_PREFIX = 'crashes-' + + def __init__(self, config): + super().__init__(config) + self.artifacts_dir = self.config.platform_conf.artifacts_dir + self.cache_dir = self.config.platform_conf.cache_dir + if self.config.git_store_repo: + self.git_filestore = filestore.git.GitFilestore(config, None) + else: + self.git_filestore = None + + def upload_crashes(self, name, directory): + """GitLab artifacts implementation of upload_crashes.""" + # Upload crashes as job artifacts. + if os.listdir(directory): + dest_dir_artifacts = os.path.join(self.config.project_src_path, + self.artifacts_dir, + self.CRASHES_PREFIX + name) + logging.info('Uploading artifacts to %s.', dest_dir_artifacts) + shutil.copytree(directory, dest_dir_artifacts) + + def upload_corpus(self, name, directory, replace=False): + """GitLab artifacts implementation of upload_corpus.""" + # Use the git filestore if any. + if self.git_filestore: + self.git_filestore.upload_corpus(name, directory, replace) + return + # Fall back to cache. + dest_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir, + self.CORPUS_PREFIX + name) + logging.info('Copying from %s to cache %s.', directory, dest_dir_cache) + # Remove previous corpus from cache if any. + shutil.rmtree(dest_dir_cache, ignore_errors=True) + shutil.copytree(directory, dest_dir_cache, dirs_exist_ok=True) + + def upload_build(self, name, directory): + """GitLab artifacts implementation of upload_build.""" + # Puts build into the cache. + dest_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir, + self.BUILD_PREFIX + name) + logging.info('Copying from %s to cache %s.', directory, dest_dir_cache) + shutil.copytree(directory, dest_dir_cache, dirs_exist_ok=True) + + def upload_coverage(self, name, directory): + """GitLab artifacts implementation of upload_coverage.""" + # Use the git filestore. + if self.git_filestore: + self.git_filestore.upload_coverage(name, directory) + return + # Fall back to cache. + dest_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir, + self.COVERAGE_PREFIX + name) + logging.info('Copying from %s to cache %s.', directory, dest_dir_cache) + shutil.copytree(directory, dest_dir_cache, dirs_exist_ok=True) + # And also updates coverage reports as artifacts + # as it should not be too big. + dest_dir_artifacts = os.path.join(self.config.project_src_path, + self.artifacts_dir, + self.COVERAGE_PREFIX + name) + logging.info('Uploading artifacts to %s.', dest_dir_artifacts) + shutil.copytree(directory, dest_dir_artifacts) + + def _copy_from_cache(self, src_dir_cache, dst_directory): + if not os.path.exists(src_dir_cache): + logging.info('Cache %s does not exist.', src_dir_cache) + return False + logging.info('Copying %s from cache to %s.', src_dir_cache, dst_directory) + shutil.copytree(src_dir_cache, dst_directory, dirs_exist_ok=True) + return True + + def download_corpus(self, name, dst_directory): + """GitLab artifacts implementation of download_corpus.""" + # Use the git filestore if any. + if self.git_filestore: + self.git_filestore.download_corpus(name, dst_directory) + return + # Fall back to cache. + src_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir, + self.CORPUS_PREFIX + name) + self._copy_from_cache(src_dir_cache, dst_directory) + + def download_build(self, name, dst_directory): + """GitLab artifacts implementation of download_build.""" + # Gets build from the cache. + src_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir, + self.BUILD_PREFIX + name) + return self._copy_from_cache(src_dir_cache, dst_directory) + + def download_coverage(self, name, dst_directory): + """GitLab artifacts implementation of download_coverage.""" + # Use the git filestore if any. + if self.git_filestore: + return self.git_filestore.download_coverage(name, dst_directory) + # Fall back to cache. + src_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir, + self.COVERAGE_PREFIX + name) + return self._copy_from_cache(src_dir_cache, dst_directory) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/gsutil/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/gsutil/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..85f85508d78e8a01d11dcbed7c3f463d830d322b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/gsutil/__init__.py @@ -0,0 +1,109 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Filestore implementation using gsutil.""" +import logging +import os +import posixpath +import subprocess +import sys + +# pylint: disable=wrong-import-position,import-error +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, os.pardir)) +import filestore +import utils + + +def _gsutil_execute(*args, parallel=True): + """Executes a gsutil command, passing |*args| to gsutil and returns the + stdout, stderr and returncode. Exceptions on failure.""" + command = ['gsutil'] + if parallel: + command.append('-m') + command += list(args) + logging.info('Executing gsutil command: %s', command) + return utils.execute(command, check_result=True) + + +def _rsync(src, dst, recursive=True, delete=False): + """Executes gsutil rsync on |src| and |dst|""" + args = ['rsync'] + if recursive: + args.append('-r') + if delete: + args.append('-d') + args += [src, dst] + return _gsutil_execute(*args) + + +class GSUtilFilestore(filestore.BaseFilestore): + """Filestore implementation using gsutil.""" + BUILD_DIR = 'build' + CRASHES_DIR = 'crashes' + CORPUS_DIR = 'corpus' + COVERAGE_DIR = 'coverage' + + def __init__(self, config): + super().__init__(config) + self._cloud_bucket = self.config.cloud_bucket + + def _get_gsutil_url(self, name, prefix_dir): + """Returns the gsutil URL for |name| and |prefix_dir|.""" + if not prefix_dir: + return posixpath.join(self._cloud_bucket, name) + return posixpath.join(self._cloud_bucket, prefix_dir, name) + + def _upload_directory(self, name, directory, prefix, delete=False): + gsutil_url = self._get_gsutil_url(name, prefix) + return _rsync(directory, gsutil_url, delete=delete) + + def _download_directory(self, name, dst_directory, prefix): + gsutil_url = self._get_gsutil_url(name, prefix) + return _rsync(gsutil_url, dst_directory) + + def upload_crashes(self, name, directory): + """Uploads the crashes at |directory| to |name|.""" + # Name is going to be "current". I don't know if this makes sense outside of + # GitHub Actions. + gsutil_url = self._get_gsutil_url(name, self.CRASHES_DIR) + logging.info('Uploading crashes to %s.', gsutil_url) + return _rsync(directory, gsutil_url) + + def upload_corpus(self, name, directory, replace=False): + """Uploads the crashes at |directory| to |name|.""" + return self._upload_directory(name, + directory, + self.CORPUS_DIR, + delete=replace) + + def upload_build(self, name, directory): + """Uploads the build located at |directory| to |name|.""" + return self._upload_directory(name, directory, self.BUILD_DIR) + + def upload_coverage(self, name, directory): + """Uploads the coverage report at |directory| to |name|.""" + return self._upload_directory(name, directory, self.COVERAGE_DIR) + + def download_corpus(self, name, dst_directory): + """Downloads the corpus located at |name| to |dst_directory|.""" + return self._download_directory(name, dst_directory, self.CORPUS_DIR) + + def download_build(self, name, dst_directory): + """Downloads the build with |name| to |dst_directory|.""" + return self._download_directory(name, dst_directory, self.BUILD_DIR) + + def download_coverage(self, name, dst_directory): + """Downloads the latest project coverage report.""" + return self._download_directory(name, dst_directory, self.COVERAGE_DIR) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/no_filestore/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/no_filestore/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2d063116ebcffcf2552225a58a824e4e4409be49 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore/no_filestore/__init__.py @@ -0,0 +1,51 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Empty filestore implementation for platforms that haven't implemented it.""" +import logging + +import filestore + +# pylint:disable=no-self-use,unused-argument + + +class NoFilestore(filestore.BaseFilestore): + """Empty Filestore implementation.""" + + def upload_crashes(self, name, directory): + """Noop implementation of upload_crashes.""" + logging.info('Not uploading crashes because no Filestore.') + + def upload_corpus(self, name, directory, replace=False): + """Noop implementation of upload_corpus.""" + logging.info('Not uploading corpus because no Filestore.') + + def upload_build(self, name, directory): + """Noop implementation of upload_build.""" + logging.info('Not uploading build because no Filestore.') + + def upload_coverage(self, name, directory): + """Noop implementation of upload_coverage.""" + logging.info('Not uploading coverage because no Filestore.') + + def download_corpus(self, name, dst_directory): + """Noop implementation of download_corpus.""" + logging.info('Not downloading corpus because no Filestore.') + + def download_build(self, name, dst_directory): + """Noop implementation of download_build.""" + logging.info('Not downloading build because no Filestore.') + + def download_coverage(self, name, dst_directory): + """Noop implementation of download_coverage.""" + logging.info('Not downloading coverage because no Filestore.') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore_utils_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..d5e1307376548252e0633cb2c2343feee2c0f745 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/filestore_utils_test.py @@ -0,0 +1,50 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for filestore_utils.""" +import unittest +from unittest import mock + +import parameterized + +import platform_config +import filestore +from filestore import github_actions +import filestore_utils +import test_helpers + + +class GetFilestoreTest(unittest.TestCase): + """Tests for get_filestore.""" + + @parameterized.parameterized.expand([ + ({ + 'cfl_platform': 'github', + }, github_actions.GithubActionsFilestore), + ]) + def test_get_filestore(self, config_kwargs, filestore_cls): + """Tests that get_filestore returns the right filestore given a certain + platform.""" + run_config = test_helpers.create_run_config(**config_kwargs) + filestore_impl = filestore_utils.get_filestore(run_config) + self.assertIsInstance(filestore_impl, filestore_cls) + + @mock.patch('config_utils.BaseConfig.platform', return_value='other') + @mock.patch('config_utils._get_platform_config', + return_value=platform_config.BasePlatformConfig()) + def test_get_filestore_unsupported_platform(self, _, __): + """Tests that get_filestore exceptions given a platform it doesn't + support.""" + run_config = test_helpers.create_run_config() + with self.assertRaises(filestore.FilestoreError): + filestore_utils.get_filestore(run_config) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/fuzz_target_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/fuzz_target_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e553a25aecacfebc1a15e920fc54f19388d396d6 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/fuzz_target_test.py @@ -0,0 +1,298 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the functionality of the fuzz_target module.""" + +import os +import shutil +import tempfile +import unittest +from unittest import mock + +import certifi +# Importing this later causes import failures with pytest for some reason. +# TODO(ochang): Figure out why. +import parameterized +import google.cloud.ndb # pylint: disable=unused-import +from pyfakefs import fake_filesystem_unittest +from clusterfuzz.fuzz import engine + +import clusterfuzz_deployment +import fuzz_target +import test_helpers +import workspace_utils + +# NOTE: This integration test relies on +# https://github.com/google/oss-fuzz/tree/master/projects/example project. +EXAMPLE_PROJECT = 'example' + +# An example fuzzer that triggers an error. +EXAMPLE_FUZZER = 'example_crash_fuzzer' + +# Mock return values for engine_impl.reproduce. +EXECUTE_SUCCESS_RESULT = engine.ReproduceResult([], 0, 0, '') +EXECUTE_FAILURE_RESULT = engine.ReproduceResult([], 1, 0, '') + +TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'test_data') + + +def _create_config(**kwargs): + """Creates a config object and then sets every attribute that is a key in + |kwargs| to the corresponding value. Asserts that each key in |kwargs| is an + attribute of Config.""" + defaults = { + 'cfl_platform': 'github', + 'oss_fuzz_project_name': EXAMPLE_PROJECT, + 'workspace': '/workspace' + } + for default_key, default_value in defaults.items(): + if default_key not in kwargs: + kwargs[default_key] = default_value + + return test_helpers.create_run_config(**kwargs) + + +def _create_deployment(**kwargs): + config = _create_config(**kwargs) + workspace = workspace_utils.Workspace(config) + return clusterfuzz_deployment.get_clusterfuzz_deployment(config, workspace) + + +@mock.patch('utils.get_container_name', return_value='container') +class IsReproducibleTest(fake_filesystem_unittest.TestCase): + """Tests the is_reproducible method in the fuzz_target.FuzzTarget class.""" + + def setUp(self): + """Sets up example fuzz target to test is_reproducible method.""" + self.fuzz_target_name = 'fuzz-target' + deployment = _create_deployment() + self.config = deployment.config + self.workspace = deployment.workspace + self.fuzz_target_path = os.path.join(self.workspace.out, + self.fuzz_target_name) + self.setUpPyfakefs() + self.fs.create_file(self.fuzz_target_path) + self.testcase_path = '/testcase' + self.fs.create_file(self.testcase_path) + + self.target = fuzz_target.FuzzTarget(self.fuzz_target_path, + fuzz_target.REPRODUCE_ATTEMPTS, + self.workspace, deployment, + deployment.config) + + # ClusterFuzz requires ROOT_DIR. + root_dir = os.environ['ROOT_DIR'] + test_helpers.patch_environ(self, empty=True) + os.environ['ROOT_DIR'] = root_dir + + # There's an extremely bad issue that happens if this test is run: Other tests + # in this file fail in CI with stacktraces using referencing fakefs even if + # the tests do not use fakefs. + # TODO(metzman): Stop using fakefs. + @mock.patch('os.chmod') + @unittest.skip('Skip because of weird failures.') + def test_repro_timed_out(self, mock_chmod, mock_get_container_name): + """Tests that is_reproducible behaves correctly when reproduction times + out.""" + del mock_get_container_name + del mock_chmod + + with mock.patch( + 'clusterfuzz._internal.bot.fuzzers.libFuzzer.engine.LibFuzzerEngine.' + 'reproduce', + side_effect=TimeoutError): + self.assertFalse( + self.target.is_reproducible('/testcase', self.target.target_path, [])) + + def test_reproducible(self, _): + """Tests that is_reproducible returns True if crash is detected and that + is_reproducible uses the correct command to reproduce a crash.""" + all_repro = [EXECUTE_FAILURE_RESULT] * fuzz_target.REPRODUCE_ATTEMPTS + with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine: + mock_get_engine().reproduce.side_effect = all_repro + + result = self.target.is_reproducible(self.testcase_path, + self.fuzz_target_path, []) + mock_get_engine().reproduce.assert_called_once_with( + '/workspace/build-out/fuzz-target', + '/testcase', + arguments=[], + max_time=30) + self.assertTrue(result) + self.assertEqual(1, mock_get_engine().reproduce.call_count) + + def test_flaky(self, _): + """Tests that is_reproducible returns True if crash is detected on the last + attempt.""" + last_time_repro = [EXECUTE_SUCCESS_RESULT] * 9 + [EXECUTE_FAILURE_RESULT] + with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine: + mock_get_engine().reproduce.side_effect = last_time_repro + self.assertTrue( + self.target.is_reproducible(self.testcase_path, self.fuzz_target_path, + [])) + self.assertEqual(fuzz_target.REPRODUCE_ATTEMPTS, + mock_get_engine().reproduce.call_count) + + def test_nonexistent_fuzzer(self, _): + """Tests that is_reproducible raises an error if it could not attempt + reproduction because the fuzzer doesn't exist.""" + with self.assertRaises(fuzz_target.ReproduceError): + self.target.is_reproducible(self.testcase_path, '/non-existent-path', []) + + def test_unreproducible(self, _): + """Tests that is_reproducible returns False for a crash that did not + reproduce.""" + all_unrepro = [EXECUTE_SUCCESS_RESULT] * fuzz_target.REPRODUCE_ATTEMPTS + with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine: + mock_get_engine().reproduce.side_effect = all_unrepro + result = self.target.is_reproducible(self.testcase_path, + self.fuzz_target_path, []) + self.assertFalse(result) + + +class IsCrashReportableTest(fake_filesystem_unittest.TestCase): + """Tests the is_crash_reportable method of FuzzTarget.""" + + def setUp(self): + """Sets up example fuzz target to test is_crash_reportable method.""" + self.setUpPyfakefs() + self.fuzz_target_path = '/example/do_stuff_fuzzer' + deployment = _create_deployment() + self.target = fuzz_target.FuzzTarget(self.fuzz_target_path, 100, + deployment.workspace, deployment, + deployment.config) + self.oss_fuzz_build_path = '/oss-fuzz-build' + self.fs.create_file(self.fuzz_target_path) + self.oss_fuzz_target_path = os.path.join( + self.oss_fuzz_build_path, os.path.basename(self.fuzz_target_path)) + self.fs.create_file(self.oss_fuzz_target_path) + self.testcase_path = '/testcase' + self.fs.create_file(self.testcase_path, contents='') + + # Do this to prevent pyfakefs from messing with requests. + self.fs.add_real_directory(os.path.dirname(certifi.__file__)) + + @mock.patch('fuzz_target.FuzzTarget.is_reproducible', + side_effect=[True, False]) + @mock.patch('logging.info') + def test_new_reproducible_crash(self, mock_info, _): + """Tests that a new reproducible crash returns True.""" + with tempfile.TemporaryDirectory() as tmp_dir: + self.target.out_dir = tmp_dir + self.assertTrue(self.target.is_crash_reportable(self.testcase_path, [])) + mock_info.assert_called_with( + 'The crash is not reproducible on previous build. ' + 'Code change (pr/commit) introduced crash.') + + # yapf: disable + @parameterized.parameterized.expand([ + # Reproducible on PR build, but also reproducible on OSS-Fuzz. + ([True, True],), + + # Not reproducible on PR build, but somehow reproducible on OSS-Fuzz. + # Unlikely to happen in real world except if test is flaky. + ([False, True],), + + # Not reproducible on PR build, and not reproducible on OSS-Fuzz. + ([False, False],), + ]) + # yapf: enable + def test_invalid_crash(self, is_reproducible_retvals): + """Tests that a nonreportable crash causes the method to return False.""" + with mock.patch('fuzz_target.FuzzTarget.is_reproducible', + side_effect=is_reproducible_retvals): + with mock.patch('clusterfuzz_deployment.OSSFuzz.download_latest_build', + return_value=self.oss_fuzz_build_path): + self.assertFalse(self.target.is_crash_reportable( + self.testcase_path, [])) + + @mock.patch('logging.info') + @mock.patch('fuzz_target.FuzzTarget.is_reproducible', return_value=[True]) + def test_reproducible_no_oss_fuzz_target(self, _, mock_info): + """Tests that is_crash_reportable returns True when a crash reproduces on + the PR build but the target is not in the OSS-Fuzz build (usually because it + is new).""" + os.remove(self.oss_fuzz_target_path) + + def is_reproducible_side_effect(testcase, target_path, reproduce_arguments): + del testcase + del reproduce_arguments + if os.path.dirname(target_path) == self.oss_fuzz_build_path: + raise fuzz_target.ReproduceError() + return True + + with mock.patch( + 'fuzz_target.FuzzTarget.is_reproducible', + side_effect=is_reproducible_side_effect) as mock_is_reproducible: + with mock.patch('clusterfuzz_deployment.OSSFuzz.download_latest_build', + return_value=self.oss_fuzz_build_path): + self.assertTrue(self.target.is_crash_reportable(self.testcase_path, [])) + mock_is_reproducible.assert_any_call(self.testcase_path, + self.oss_fuzz_target_path, []) + mock_info.assert_called_with( + 'Could not run previous build of target to determine if this code ' + 'change (pr/commit) introduced crash. Assuming crash was newly ' + 'introduced.') + + +class FuzzTest(fake_filesystem_unittest.TestCase): + """Fuzz test.""" + + def setUp(self): + """Sets up example fuzz target.""" + self.setUpPyfakefs() + deployment = _create_deployment() + config = deployment.config + workspace = deployment.workspace + self.fuzz_target = fuzz_target.FuzzTarget('/path/fuzz-target', 10, + workspace, deployment, config) + + def test_get_fuzz_target_artifact(self): + """Tests that get_fuzz_target_artifact works as intended.""" + # pylint: disable=protected-access + fuzz_target_artifact = self.fuzz_target._target_artifact_path() + self.assertEqual('/workspace/out/artifacts/fuzz-target/address', + fuzz_target_artifact) + + +class TimeoutIntegrationTest(unittest.TestCase): + """Tests handling of fuzzer timeout (timeout crashes reported by + libFuzzer).""" + TIMEOUT_FUZZER_NAME = 'timeout_fuzzer' + + @parameterized.parameterized.expand([(True, True), (False, False)]) + def test_timeout_reported(self, report_timeouts, expect_crash): + """Tests that timeouts are not reported.""" + with test_helpers.temp_dir_copy(TEST_DATA_PATH) as temp_dir: + fuzz_target_path = os.path.join(temp_dir, 'build-out', + self.TIMEOUT_FUZZER_NAME) + shutil.copy(os.path.join(temp_dir, self.TIMEOUT_FUZZER_NAME), + fuzz_target_path) + deployment = _create_deployment(workspace=temp_dir, + report_timeouts=report_timeouts) + config = deployment.config + fuzz_target_obj = fuzz_target.FuzzTarget(fuzz_target_path, + fuzz_target.REPRODUCE_ATTEMPTS, + deployment.workspace, deployment, + config) + with mock.patch('clusterfuzz._internal.bot.fuzzers.libfuzzer.' + 'fix_timeout_argument_for_reproduction') as _: + with mock.patch( + 'clusterfuzz._internal.bot.fuzzers.libFuzzer.fuzzer.get_arguments', + return_value=['-timeout=1', '-rss_limit_mb=2560']): + fuzz_result = fuzz_target_obj.fuzz() + self.assertEqual(bool(fuzz_result.testcase), expect_crash) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report.py new file mode 100644 index 0000000000000000000000000000000000000000..9901c452a0fe536670e5ec6da68b73a902d985e5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report.py @@ -0,0 +1,48 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for generating coverage reports.""" +import os + +import base_runner_utils +import fuzz_target +import utils + + +def run_coverage_command(config, workspace): + """Runs the coverage command in base-runner to generate a coverage report.""" + env = base_runner_utils.get_env(config, workspace) + env['HTTP_PORT'] = '' + env['COVERAGE_EXTRA_ARGS'] = '' + env['CORPUS_DIR'] = workspace.corpora + env['COVERAGE_OUTPUT_DIR'] = workspace.coverage_report + command = 'coverage' + return utils.execute(command, env=env) + + +def download_corpora(fuzz_target_paths, clusterfuzz_deployment): + """Downloads corpora for fuzz targets in |fuzz_target_paths| using + |clusterfuzz_deployment| to download corpora from ClusterFuzz/OSS-Fuzz.""" + for target_path in fuzz_target_paths: + target_name = os.path.basename(target_path) + corpus_dir = fuzz_target.get_fuzz_target_corpus_dir( + clusterfuzz_deployment.workspace, target_name) + clusterfuzz_deployment.download_corpus(target_name, corpus_dir) + + +def generate_coverage_report(fuzz_target_paths, workspace, + clusterfuzz_deployment, config): + """Generates a coverage report using Clang's source based coverage.""" + download_corpora(fuzz_target_paths, clusterfuzz_deployment) + run_coverage_command(config, workspace) + clusterfuzz_deployment.upload_coverage() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report_test.py new file mode 100644 index 0000000000000000000000000000000000000000..df2c9b206b84cdc92b5304f2669ea4cadcc71cae --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report_test.py @@ -0,0 +1,71 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for generate_coverage_report.""" + +import unittest +from unittest import mock + +import generate_coverage_report +import test_helpers + +OUT_DIR = '/outdir' +PROJECT = 'example-project' +SANITIZER = 'coverage' + + +class TestRunCoverageCommand(unittest.TestCase): + """Tests run_coverage_command""" + + def setUp(self): + test_helpers.patch_environ(self, empty=True) + + @mock.patch('utils.execute') + def test_run_coverage_command(self, mock_execute): # pylint: disable=no-self-use + """Tests that run_coverage_command works as intended.""" + config = test_helpers.create_run_config(oss_fuzz_project_name=PROJECT, + sanitizer=SANITIZER) + workspace = test_helpers.create_workspace() + generate_coverage_report.run_coverage_command(config, workspace) + expected_command = 'coverage' + expected_env = { + 'SANITIZER': config.sanitizer, + 'FUZZING_LANGUAGE': config.language, + 'OUT': workspace.out, + 'CIFUZZ': 'True', + 'FUZZING_ENGINE': 'libfuzzer', + 'ARCHITECTURE': 'x86_64', + 'FUZZER_ARGS': '-rss_limit_mb=2560 -timeout=25', + 'HTTP_PORT': '', + 'COVERAGE_EXTRA_ARGS': '', + 'CORPUS_DIR': workspace.corpora, + 'COVERAGE_OUTPUT_DIR': workspace.coverage_report + } + mock_execute.assert_called_with(expected_command, env=expected_env) + + +class DownloadCorporaTest(unittest.TestCase): + """Tests for download_corpora.""" + + def test_download_corpora(self): # pylint: disable=no-self-use + """Tests that download_corpora works as intended.""" + clusterfuzz_deployment = mock.Mock() + clusterfuzz_deployment.workspace = test_helpers.create_workspace() + fuzz_target_paths = ['/path/to/fuzzer1', '/path/to/fuzzer2'] + expected_calls = [ + mock.call('fuzzer1', '/workspace/cifuzz-corpus/fuzzer1'), + mock.call('fuzzer2', '/workspace/cifuzz-corpus/fuzzer2') + ] + generate_coverage_report.download_corpora(fuzz_target_paths, + clusterfuzz_deployment) + clusterfuzz_deployment.download_corpus.assert_has_calls(expected_calls) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/get_coverage.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/get_coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..762de7928cb93f074b73b51a37514e3807104d37 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/get_coverage.py @@ -0,0 +1,208 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for determining coverage of fuzz targets.""" +import json +import logging +import os +import sys + +import http_utils + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +import utils + +# The path to get OSS-Fuzz project's latest report json file.` +OSS_FUZZ_LATEST_COVERAGE_INFO_PATH = 'oss-fuzz-coverage/latest_report_info/' + + +# pylint: disable=too-few-public-methods +class CoverageError(Exception): + """Exceptions for project coverage.""" + + +class BaseCoverage: + """Gets coverage data for a project.""" + + def __init__(self, repo_path): + self.repo_path = _normalize_repo_path(repo_path) + + def get_files_covered_by_target(self, target): + """Returns a list of source files covered by the specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + A list of files that the fuzz target covers or None. + """ + target_cov = self.get_target_coverage(target) + if not target_cov: + logging.info('No coverage available for %s.', target) + return None + + coverage_per_file = get_coverage_per_file(target_cov) + if not coverage_per_file: + logging.info('No files found in coverage report.') + return None + + affected_file_list = [] + for file_cov in coverage_per_file: + norm_file_path = os.path.normpath(file_cov['filename']) + if not norm_file_path.startswith(self.repo_path): + # Exclude files outside of the main repo. + continue + + if not is_file_covered(file_cov): + # Don't consider a file affected if code in it is never executed. + continue + + # TODO(metzman): It's weird to me that we access file_cov['filename'] + # again and not norm_file_path, figure out if this makes sense. + relative_path = utils.remove_prefix(file_cov['filename'], self.repo_path) + affected_file_list.append(relative_path) + + return affected_file_list + + def get_target_coverage(self, target): + """Get the coverage report for a specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + The target's coverage json dict or None on failure. + """ + raise NotImplementedError('Child class must implement method.') + + +class OSSFuzzCoverage(BaseCoverage): + """Gets coverage data for a project from OSS-Fuzz.""" + + def __init__(self, repo_path, oss_fuzz_project_name): + """Constructor for OSSFuzzCoverage.""" + super().__init__(repo_path) + self.oss_fuzz_project_name = oss_fuzz_project_name + self.fuzzer_stats_url = _get_oss_fuzz_fuzzer_stats_dir_url( + self.oss_fuzz_project_name) + if self.fuzzer_stats_url is None: + raise CoverageError('Could not get latest coverage.') + + def get_target_coverage(self, target): + """Get the coverage report for a specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + The target's coverage json dict or None on failure. + """ + if not self.fuzzer_stats_url: + return None + + target_url = utils.url_join(self.fuzzer_stats_url, target + '.json') + return http_utils.get_json_from_url(target_url) + + +def _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name): + """Gets and returns a dictionary containing the latest coverage report info + for |project|.""" + latest_report_info_url = utils.url_join(utils.GCS_BASE_URL, + OSS_FUZZ_LATEST_COVERAGE_INFO_PATH, + oss_fuzz_project_name + '.json') + latest_cov_info = http_utils.get_json_from_url(latest_report_info_url) + if latest_cov_info is None: + logging.error('Could not get the coverage report json from url: %s.', + latest_report_info_url) + return None + return latest_cov_info + + +def _get_oss_fuzz_fuzzer_stats_dir_url(oss_fuzz_project_name): + """Gets latest coverage report info for a specific OSS-Fuzz project from + GCS. + + Args: + oss_fuzz_project_name: The name of the project. + + Returns: + The projects coverage report info in json dict or None on failure. + """ + latest_cov_info = _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name) + + if not latest_cov_info: + return None + + if 'fuzzer_stats_dir' not in latest_cov_info: + logging.error('fuzzer_stats_dir not in latest coverage info.') + return None + + fuzzer_stats_dir_gs_url = latest_cov_info['fuzzer_stats_dir'] + fuzzer_stats_dir_url = utils.gs_url_to_https(fuzzer_stats_dir_gs_url) + return fuzzer_stats_dir_url + + +class FilesystemCoverage(BaseCoverage): + """Class that gets a project's coverage from the filesystem.""" + + def __init__(self, repo_path, project_coverage_dir): + super().__init__(repo_path) + self.project_coverage_dir = project_coverage_dir + + def get_target_coverage(self, target): + """Get the coverage report for a specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + The target's coverage json dict or None on failure. + """ + logging.info('Getting coverage for %s from filesystem.', target) + fuzzer_stats_json_path = os.path.join(self.project_coverage_dir, + 'fuzzer_stats', target + '.json') + if not os.path.exists(fuzzer_stats_json_path): + logging.warning('%s does not exist.', fuzzer_stats_json_path) + return None + + with open(fuzzer_stats_json_path) as fuzzer_stats_json_file_handle: + try: + return json.load(fuzzer_stats_json_file_handle) + except json.decoder.JSONDecodeError as err: + logging.error('Could not decode: %s. Error: %s.', + fuzzer_stats_json_path, err) + return None + + +def is_file_covered(file_cov): + """Returns whether the file is covered.""" + return file_cov['summary']['regions']['covered'] + + +def get_coverage_per_file(target_cov): + """Returns the coverage per file within |target_cov|.""" + try: + return target_cov['data'][0]['files'] + except (IndexError, TypeError, KeyError): + logging.error('target_cov: %s is malformed.', target_cov) + return None + + +def _normalize_repo_path(repo_path): + """Normalizes and returns |repo_path| to make sure cases like /src/curl and + /src/curl/ are both handled.""" + repo_path = os.path.normpath(repo_path) + if not repo_path.endswith('/'): + repo_path += '/' + return repo_path diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/get_coverage_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/get_coverage_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3551f22adf5c0f77a2627c986de033bc71a8febb --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/get_coverage_test.py @@ -0,0 +1,253 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for get_coverage.py""" +import os +import json +import unittest +from unittest import mock + +import parameterized +from pyfakefs import fake_filesystem_unittest +import pytest + +import get_coverage + +# pylint: disable=protected-access + +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), + 'test_data') + +PROJECT_NAME = 'curl' +REPO_PATH = '/src/curl' +FUZZ_TARGET = 'curl_fuzzer' +PROJECT_COV_JSON_FILENAME = 'example_curl_cov.json' +FUZZ_TARGET_COV_JSON_FILENAME = 'example_curl_fuzzer_cov.json' +INVALID_TARGET = 'not-a-fuzz-target' + +with open(os.path.join(TEST_DATA_PATH, + PROJECT_COV_JSON_FILENAME),) as cov_file_handle: + PROJECT_COV_INFO = json.loads(cov_file_handle.read()) + + +class GetOssFuzzFuzzerStatsDirUrlTest(unittest.TestCase): + """Tests _get_oss_fuzz_fuzzer_stats_dir_url.""" + + @mock.patch('http_utils.get_json_from_url', + return_value={ + 'fuzzer_stats_dir': + 'gs://oss-fuzz-coverage/systemd/fuzzer_stats/20210303' + }) + def test_get_valid_project(self, mock_get_json_from_url): + """Tests that a project's coverage report can be downloaded and parsed. + + NOTE: This test relies on the PROJECT_NAME repo's coverage report. + The "example" project was not used because it has no coverage reports. + """ + result = get_coverage._get_oss_fuzz_fuzzer_stats_dir_url(PROJECT_NAME) + (url,), _ = mock_get_json_from_url.call_args + self.assertEqual( + 'https://storage.googleapis.com/oss-fuzz-coverage/' + 'latest_report_info/curl.json', url) + + expected_result = ( + 'https://storage.googleapis.com/oss-fuzz-coverage/systemd/fuzzer_stats/' + '20210303') + self.assertEqual(result, expected_result) + + def test_get_invalid_project(self): + """Tests that passing a bad project returns None.""" + self.assertIsNone( + get_coverage._get_oss_fuzz_fuzzer_stats_dir_url('not-a-proj')) + + +class OSSFuzzCoverageGetTargetCoverageTest(unittest.TestCase): + """Tests OSSFuzzCoverage.get_target_coverage.""" + + def setUp(self): + with mock.patch('get_coverage._get_oss_fuzz_latest_cov_report_info', + return_value=PROJECT_COV_INFO): + self.oss_fuzz_coverage = get_coverage.OSSFuzzCoverage( + REPO_PATH, PROJECT_NAME) + + @mock.patch('http_utils.get_json_from_url', return_value={}) + def test_valid_target(self, mock_get_json_from_url): + """Tests that a target's coverage report can be downloaded and parsed.""" + self.oss_fuzz_coverage.get_target_coverage(FUZZ_TARGET) + (url,), _ = mock_get_json_from_url.call_args + self.assertEqual( + 'https://storage.googleapis.com/oss-fuzz-coverage/' + 'curl/fuzzer_stats/20200226/curl_fuzzer.json', url) + + def test_invalid_target(self): + """Tests that passing an invalid target coverage report returns None.""" + self.assertIsNone( + self.oss_fuzz_coverage.get_target_coverage(INVALID_TARGET)) + + @mock.patch('get_coverage._get_oss_fuzz_latest_cov_report_info', + return_value=None) + def test_invalid_project_json(self, _): # pylint: disable=no-self-use + """Tests an invalid project JSON results in None being returned.""" + with pytest.raises(get_coverage.CoverageError): + get_coverage.OSSFuzzCoverage(REPO_PATH, PROJECT_NAME) + + +def _get_expected_curl_covered_file_list(): + """Returns the expected covered file list for + FUZZ_TARGET_COV_JSON_FILENAME.""" + curl_files_list_path = os.path.join(TEST_DATA_PATH, + 'example_curl_file_list.json') + with open(curl_files_list_path) as file_handle: + return json.loads(file_handle.read()) + + +def _get_example_curl_coverage(): + """Returns the contents of the fuzzer stats JSON file for + FUZZ_TARGET_COV_JSON_FILENAME.""" + with open(os.path.join(TEST_DATA_PATH, + FUZZ_TARGET_COV_JSON_FILENAME)) as file_handle: + return json.loads(file_handle.read()) + + +class OSSFuzzCoverageGetFilesCoveredByTargetTest(unittest.TestCase): + """Tests OSSFuzzCoverage.get_files_covered_by_target.""" + + def setUp(self): + with mock.patch('get_coverage._get_oss_fuzz_latest_cov_report_info', + return_value=PROJECT_COV_INFO): + self.oss_fuzz_coverage = get_coverage.OSSFuzzCoverage( + REPO_PATH, PROJECT_NAME) + + @parameterized.parameterized.expand([({ + 'data': [] + },), ({ + 'data': [[]] + },), ({ + 'data': [{}] + },)]) + def test_malformed_cov_data(self, coverage_data): + """Tests that covered files can be retrieved from a coverage report.""" + with mock.patch('get_coverage.OSSFuzzCoverage.get_target_coverage', + return_value=coverage_data): + self.oss_fuzz_coverage.get_files_covered_by_target(FUZZ_TARGET) + + def test_valid_target(self): + """Tests that covered files can be retrieved from a coverage report.""" + fuzzer_cov_data = _get_example_curl_coverage() + with mock.patch('get_coverage.OSSFuzzCoverage.get_target_coverage', + return_value=fuzzer_cov_data): + file_list = self.oss_fuzz_coverage.get_files_covered_by_target( + FUZZ_TARGET) + + expected_file_list = _get_expected_curl_covered_file_list() + self.assertCountEqual(file_list, expected_file_list) + + def test_invalid_target(self): + """Tests passing invalid fuzz target returns None.""" + self.assertIsNone( + self.oss_fuzz_coverage.get_files_covered_by_target(INVALID_TARGET)) + + +class FilesystemCoverageGetFilesCoveredByTargetTest( + fake_filesystem_unittest.TestCase): + """Tests FilesystemCoverage.get_files_covered_by_target.""" + + def setUp(self): + _fuzzer_cov_data = _get_example_curl_coverage() + self._expected_file_list = _get_expected_curl_covered_file_list() + self.coverage_path = '/coverage' + self.filesystem_coverage = get_coverage.FilesystemCoverage( + REPO_PATH, self.coverage_path) + self.setUpPyfakefs() + self.fs.create_file(os.path.join(self.coverage_path, 'fuzzer_stats', + FUZZ_TARGET + '.json'), + contents=json.dumps(_fuzzer_cov_data)) + + def test_valid_target(self): + """Tests that covered files can be retrieved from a coverage report.""" + file_list = self.filesystem_coverage.get_files_covered_by_target( + FUZZ_TARGET) + self.assertCountEqual(file_list, self._expected_file_list) + + def test_invalid_target(self): + """Tests passing invalid fuzz target returns None.""" + self.assertIsNone( + self.filesystem_coverage.get_files_covered_by_target(INVALID_TARGET)) + + +class IsFileCoveredTest(unittest.TestCase): + """Tests for is_file_covered.""" + + def test_is_file_covered_covered(self): + """Tests that is_file_covered returns True for a covered file.""" + file_coverage = { + 'filename': '/src/systemd/src/basic/locale-util.c', + 'summary': { + 'regions': { + 'count': 204, + 'covered': 200, + 'notcovered': 200, + 'percent': 98.03 + } + } + } + self.assertTrue(get_coverage.is_file_covered(file_coverage)) + + def test_is_file_covered_not_covered(self): + """Tests that is_file_covered returns False for a not covered file.""" + file_coverage = { + 'filename': '/src/systemd/src/basic/locale-util.c', + 'summary': { + 'regions': { + 'count': 204, + 'covered': 0, + 'notcovered': 0, + 'percent': 0 + } + } + } + self.assertFalse(get_coverage.is_file_covered(file_coverage)) + + +class GetOssFuzzLatestCovReportInfo(unittest.TestCase): + """Tests that _get_oss_fuzz_latest_cov_report_info works as + intended.""" + + PROJECT = 'project' + LATEST_REPORT_INFO_URL = ('https://storage.googleapis.com/oss-fuzz-coverage/' + 'latest_report_info/project.json') + + @mock.patch('logging.error') + @mock.patch('http_utils.get_json_from_url', return_value={'coverage': 1}) + def test_get_oss_fuzz_latest_cov_report_info(self, mock_get_json_from_url, + mock_error): + """Tests that _get_oss_fuzz_latest_cov_report_info works as intended.""" + result = get_coverage._get_oss_fuzz_latest_cov_report_info(self.PROJECT) + self.assertEqual(result, {'coverage': 1}) + mock_error.assert_not_called() + mock_get_json_from_url.assert_called_with(self.LATEST_REPORT_INFO_URL) + + @mock.patch('logging.error') + @mock.patch('http_utils.get_json_from_url', return_value=None) + def test_get_oss_fuzz_latest_cov_report_info_fail(self, _, mock_error): + """Tests that _get_oss_fuzz_latest_cov_report_info works as intended when we + can't get latest report info.""" + result = get_coverage._get_oss_fuzz_latest_cov_report_info('project') + self.assertIsNone(result) + mock_error.assert_called_with( + 'Could not get the coverage report json from url: %s.', + self.LATEST_REPORT_INFO_URL) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/http_utils.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/http_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..dfcef216766cea9ac3c56da6580f512d585af1bd --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/http_utils.py @@ -0,0 +1,118 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utility module for HTTP.""" +import json +import logging +import os +import sys +import tempfile +import zipfile + +import requests + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +import retry + +_DOWNLOAD_URL_RETRIES = 3 +_DOWNLOAD_URL_BACKOFF = 1 +_HTTP_REQUEST_TIMEOUT = 10 + + +def download_and_unpack_zip(url, extract_directory, headers=None): + """Downloads and unpacks a zip file from an HTTP URL. + + Args: + url: A url to the zip file to be downloaded and unpacked. + extract_directory: The path where the zip file should be extracted to. + headers: (Optional) HTTP headers to send with the download request. + + Returns: + True on success. + """ + if headers is None: + headers = {} + + if not os.path.exists(extract_directory): + logging.error('Extract directory: %s does not exist.', extract_directory) + return False + + # Gives the temporary zip file a unique identifier in the case that + # that download_and_unpack_zip is done in parallel. + with tempfile.NamedTemporaryFile(suffix='.zip') as tmp_file: + if not download_url(url, tmp_file.name, headers=headers): + return False + + try: + with zipfile.ZipFile(tmp_file.name, 'r') as zip_file: + zip_file.extractall(extract_directory) + except zipfile.BadZipFile: + logging.error('Error unpacking zip from %s. Bad Zipfile.', url) + return False + + return True + + +def download_url(*args, **kwargs): + """Wrapper around _download_url that returns False if _download_url + exceptions.""" + try: + return _download_url(*args, **kwargs) + except Exception: # pylint: disable=broad-except + return False + + +def get_json_from_url(url): + """Gets a json object from a specified HTTP URL. + + Args: + url: The url of the json to be downloaded. + + Returns: + A dictionary deserialized from JSON or None on failure. + """ + try: + return requests.get(url, timeout=_HTTP_REQUEST_TIMEOUT).json() + except (ValueError, TypeError, json.JSONDecodeError, + requests.exceptions.ReadTimeout) as err: + logging.error('Loading json from url %s failed with: %s.', url, str(err)) + return None + + +@retry.wrap(_DOWNLOAD_URL_RETRIES, _DOWNLOAD_URL_BACKOFF) +def _download_url(url, filename, headers=None): + """Downloads the file located at |url|, using HTTP to |filename|. + + Args: + url: A url to a file to download. + filename: The path the file should be downloaded to. + headers: (Optional) HTTP headers to send with the download request. + + Returns: + True on success. + """ + if headers is None: + headers = {} + + response = requests.get(url, headers=headers) + + if response.status_code != 200: + logging.error('Unable to download from: %s. Code: %d. Content: %s.', url, + response.status_code, response.content) + return False + + with open(filename, 'wb') as file_handle: + file_handle.write(response.content) + + return True diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/http_utils_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/http_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..64d0598acf5f1b3e0afc92f34d5b660ff989e4f2 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/http_utils_test.py @@ -0,0 +1,71 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for http_utils.py""" + +import unittest +from unittest import mock + +from pyfakefs import fake_filesystem_unittest + +import http_utils + +mock_get_response = mock.MagicMock(status_code=200, content=b'') + + +class DownloadUrlTest(unittest.TestCase): + """Tests that download_url works.""" + URL = 'https://example.com/file' + FILE_PATH = '/tmp/file' + + @mock.patch('time.sleep') + @mock.patch('requests.get', return_value=mock_get_response) + def test_download_url_no_error(self, mock_urlretrieve, _): + """Tests that download_url works when there is no error.""" + self.assertTrue(http_utils.download_url(self.URL, self.FILE_PATH)) + self.assertEqual(1, mock_urlretrieve.call_count) + + @mock.patch('time.sleep') + @mock.patch('logging.error') + @mock.patch('requests.get', + return_value=mock.MagicMock(status_code=404, content=b'')) + def test_download_url_http_error(self, mock_get, mock_error, _): + """Tests that download_url doesn't retry when there is an HTTP error.""" + self.assertFalse(http_utils.download_url(self.URL, self.FILE_PATH)) + mock_error.assert_called_with( + 'Unable to download from: %s. Code: %d. Content: %s.', self.URL, 404, + b'') + self.assertEqual(1, mock_get.call_count) + + @mock.patch('time.sleep') + @mock.patch('requests.get', side_effect=ConnectionResetError) + def test_download_url_connection_error(self, mock_get, mock_sleep): + """Tests that download_url doesn't retry when there is an HTTP error.""" + self.assertFalse(http_utils.download_url(self.URL, self.FILE_PATH)) + self.assertEqual(4, mock_get.call_count) + self.assertEqual(3, mock_sleep.call_count) + + +class DownloadAndUnpackZipTest(fake_filesystem_unittest.TestCase): + """Tests download_and_unpack_zip.""" + + def setUp(self): + self.setUpPyfakefs() + + @mock.patch('requests.get', return_value=mock_get_response) + def test_bad_zip_download(self, _): + """Tests download_and_unpack_zip returns none when a bad zip is passed.""" + self.fs.create_file('/url_tmp.zip', contents='Test file.') + self.assertFalse( + http_utils.download_and_unpack_zip('/not/a/real/url', + '/extract-directory')) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/__init__.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..219edbe457260f6bb9c3310665d853f2720ffc1d --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/__init__.py @@ -0,0 +1,124 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for getting the configuration CIFuzz needs to run.""" +import logging +import os + +import environment + + +class BasePlatformConfig: + """Base class for PlatformConfig subclasses.""" + + @property + def project_src_path(self): + """Returns the manually checked out path of the project's source if + specified or None.""" + path = os.getenv('PROJECT_SRC_PATH') + if not path: + logging.debug('No PROJECT_SRC_PATH.') + return path + + logging.debug('PROJECT_SRC_PATH: %s.', path) + return path + + @property + def workspace(self): + """Returns the workspace.""" + return os.getenv('WORKSPACE') + + # Optional config variables. + + @property + def git_sha(self): + """Returns the Git SHA to checkout and fuzz. This is used only by GitHub + projects when commit fuzzing. It is not used when PR fuzzing. It is + definitely needed by OSS-Fuzz on GitHub since they have no copy of the repo + on the host and the repo on the builder image is a clone from main/master. + Right now it is needed by external on GitHub because we need to clone a new + repo because the copy they give us doesn't work for diffing. + + TODO(metzman): Try to eliminate the need for this by 1. Making the clone + from external github projects usable. 2. Forcing OSS-Fuzz on Github to clone + before starting CIFuzz.""" + return None + + @property + def base_commit(self): + """Returns the base commit to diff against (commit fuzzing).""" + # TODO(metzman) Rename base_commit to git_base_commit. + return os.getenv('GIT_BASE_COMMIT') + + @property + def base_ref(self): + """Returns the base branch to diff against (pr fuzzing).""" + # TODO(metzman) Rename base_ref to git_base_ref. + return os.getenv('GIT_BASE_REF') + + @property + def pr_ref(self): + """Returns the pull request to checkout and fuzz. This is used only by + GitHub projects when PR fuzzing. It is not used when commit fuzzing. It is + definitely needed by OSS-Fuzz on GitHub since they have no copy of the repo + on the host and the repo on the builder image is a clone from main/master. + Right now it is needed by external on GitHub because we need to clone a new + repo because the copy they give us doesn't work for diffing. + + TODO(metzman): Try to eliminate the need for this by 1. Making the clone + from external github projects usable. 2. Forcing OSS-Fuzz on Github to clone + before starting CIFuzz.""" + return None + + @property + def project_repo_owner(self): + """Returns the project repo owner (githubism).""" + return None + + @property + def project_repo_name(self): + """Returns the project repo name.""" + return os.environ.get('REPOSITORY') + + @property + def actor(self): + """Name of the actor for the CI.""" + return None + + @property + def token(self): + """Returns the CI API token.""" + return None + + @property + def docker_in_docker(self): + """Returns whether or not CFL is running using Docker in Docker.""" + return environment.get_bool('DOCKER_IN_DOCKER', False) + + @property + def filestore(self): + """Returns the filestore used to store persistent data.""" + return os.environ.get('FILESTORE') + + @property + def git_url(self): + """Returns the repo URL. This is only used by GitHub users. Right now it is + needed by external on GitHub because we need to clone a new repo because the + copy they give us doesn't work for diffing. It isn't used by OSS-Fuzz on + github users since the Git URL is determined using repo detection. + + TODO(metzman): Try to eliminate the need for this by making the clone + from external github projects usable. + TODO(metzman): As an easier goal, maybe make OSS-Fuzz GitHub use this too + for: 1. Consistency 2. Maybe it will allow use on forks.""" + return None diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/gcb.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/gcb.py new file mode 100644 index 0000000000000000000000000000000000000000..59b7e2fab641cc9709cba880b475a9395ae36d98 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/gcb.py @@ -0,0 +1,40 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for getting the configuration CIFuzz needs to run on Github.""" +import logging +import os + +import platform_config + + +class PlatformConfig(platform_config.BasePlatformConfig): + """CI environment for Google Cloud Build.""" + + @property + def project_src_path(self): + """Returns the manually checked out path of the project's source if + specified or the default.""" + project_src_path = os.getenv('PROJECT_SRC_PATH', '/workspace') + logging.debug('PROJECT_SRC_PATH: %s.', project_src_path) + return project_src_path + + @property + def workspace(self): + """Returns the workspace.""" + return os.getenv('WORKSPACE', '/builder/home') + + @property + def filestore(self): + """Returns the filestore used to store persistent data.""" + return os.environ.get('FILESTORE', 'gsutil') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/github.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/github.py new file mode 100644 index 0000000000000000000000000000000000000000..dc66e495acd02bb3ede6b92e289a02e2216a219c --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/github.py @@ -0,0 +1,146 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for getting the configuration CIFuzz needs to run on Github.""" +import json +import logging +import os + +import platform_config + + +def _get_github_event_path(): + return os.getenv('GITHUB_EVENT_PATH') + + +def _get_event_data(): + """Returns the GitHub event data.""" + github_event_path = _get_github_event_path() + with open(github_event_path, encoding='utf-8') as file_handle: + return json.load(file_handle) + + +class PlatformConfig(platform_config.BasePlatformConfig): + """CI environment for GitHub.""" + + def __init__(self): + self._event_data = _get_event_data() + self._event = os.getenv('GITHUB_EVENT_NAME') + + @property + def workspace(self): + """Returns the workspace.""" + return os.getenv('GITHUB_WORKSPACE') + + @property + def git_sha(self): + """Returns the Git SHA to checkout and fuzz. This is used only by GitHub + projects when commit fuzzing. It is not used when PR fuzzing. It is + definitely needed by OSS-Fuzz on GitHub since they have no copy of the repo + on the host and the repo on the builder image is a clone from main/master. + Right now it is needed by external on GitHub because we need to clone a new + repo because the copy they give us doesn't work for diffing. + + TODO(metzman): Try to eliminate the need for this by 1. Making the clone + from external github projects usable. 2. Forcing OSS-Fuzz on Github to clone + before starting CIFuzz.""" + return os.getenv('GITHUB_SHA') + + @property + def actor(self): + """Name of the actor for the CI.""" + return os.getenv('GITHUB_ACTOR') + + @property + def token(self): + """Returns the CI API token.""" + return os.getenv('GITHUB_TOKEN') + + @property + def project_src_path(self): + """Returns the manually checked out path of the project's source if + specified or None. The path returned is relative to |self.workspace| since + on github the checkout will be relative to there.""" + project_src_path = super().project_src_path + if project_src_path is None: + # Not set for internal GitHub users. + return project_src_path + # On GitHub (external users), this path is relative to |workspace|. + return os.path.join(self.workspace, project_src_path) + + @property + def _project_repo_owner_and_name(self): + """Returns a tuple containing the project repo owner and the name of the + repo.""" + # On GitHub this includes owner and repo name. + repository = os.getenv('GITHUB_REPOSITORY') + # Use os.path.split to split owner from repo. + return os.path.split(repository) + + @property + def project_repo_owner(self): + """Returns the project repo owner (githubism).""" + return self._project_repo_owner_and_name[0] + + @property + def project_repo_name(self): + """Returns the project repo name.""" + return self._project_repo_owner_and_name[1] + + @property + def git_url(self): + """Returns the repo URL. This is only used by GitHub users. Right now it is + needed by external on GitHub because we need to clone a new repo because the + copy they give us doesn't work for diffing. It isn't used by OSS-Fuzz on + github users since the Git URL is determined using repo detection. + + TODO(metzman): Try to eliminate the need for this by making the clone + from external github projects usable. + TODO(metzman): As an easier goal, maybe make OSS-Fuzz GitHub use this too + for: 1. Consistency 2. Maybe it will allow use on forks.""" + repository = os.getenv('GITHUB_REPOSITORY') + github_server_url = os.getenv('GITHUB_SERVER_URL', 'https://github.com') + # TODO(metzman): Probably need to change this to github.server_url. + return os.path.join(github_server_url, repository) + + @property + def base_commit(self): + """Returns the base commit to diff against (commit fuzzing).""" + base_commit = None + if self._event == 'push': + base_commit = self._event_data['before'] + logging.debug('base_commit: %s', base_commit) + return base_commit + + @property + def pr_ref(self): + """Returns the pull request to checkout and fuzz. This is used only by + GitHub projects when PR fuzzing. It is not used when commit fuzzing. It is + definitely needed by OSS-Fuzz on GitHub since they have no copy of the repo + on the host and the repo on the builder image is a clone from main/master. + Right now it is needed by external on GitHub because we need to clone a new + repo because the copy they give us doesn't work for diffing. + + TODO(metzman): Try to eliminate the need for this by 1. Making the clone + from external github projects usable. 2. Forcing OSS-Fuzz on Github to clone + before starting CIFuzz.""" + if self._event == 'pull_request': + pr_ref = f'refs/pull/{self._event_data["pull_request"]["number"]}/merge' + logging.debug('pr_ref: %s', pr_ref) + return pr_ref + return None + + @property + def base_ref(self): + """Returns the base branch to diff against (pr fuzzing).""" + return os.getenv('GITHUB_BASE_REF') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/github_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/github_test.py new file mode 100644 index 0000000000000000000000000000000000000000..11cc6866cdffd122bd0a1dbba91c1b87e676cc39 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/github_test.py @@ -0,0 +1,87 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for platform_config.github.""" +import os +import unittest +from unittest import mock + +import platform_config.github +import test_helpers + +# pylint: disable=arguments-differ + + +class GetProjectRepoOwnerAndNameTest(unittest.TestCase): + """Tests for get_project_repo_owner and get_project_repo_name.""" + + @mock.patch('platform_config.github._get_event_data', return_value={}) + def setUp(self, _): + test_helpers.patch_environ(self) + self.repo_owner = 'repo-owner' + self.repo_name = 'repo-name' + os.environ['GITHUB_REPOSITORY'] = f'{self.repo_owner}/{self.repo_name}' + self.platform_conf = platform_config.github.PlatformConfig() + + def test_github_repository_owner(self): + """Tests that the correct result is returned when repository contains the + owner and repo name (as it does on GitHub).""" + self.assertEqual(self.platform_conf.project_repo_owner, self.repo_owner) + + def test_github_repository_name(self): + """Tests that the correct result is returned when repository contains the + owner and repo name (as it does on GitHub).""" + os.environ['GITHUB_REPOSITORY'] = f'{self.repo_owner}/{self.repo_name}' + self.assertEqual(self.platform_conf.project_repo_name, self.repo_name) + + +class ProjectSrcPathTest(unittest.TestCase): + """Tests for project_src_path.""" + + def setUp(self): + test_helpers.patch_environ(self) + self.workspace = '/workspace' + os.environ['GITHUB_WORKSPACE'] = self.workspace + self.project_src_dir_name = 'project-src' + + @mock.patch('platform_config.github._get_event_data', return_value={}) + def test_github_unset(self, _): + """Tests that project_src_path returns None when no PROJECT_SRC_PATH is + set.""" + github_env = platform_config.github.PlatformConfig() + self.assertIsNone(github_env.project_src_path) + + @mock.patch('platform_config.github._get_event_data', return_value={}) + def test_github(self, _): + """Tests that project_src_path returns the correct result on GitHub.""" + os.environ['PROJECT_SRC_PATH'] = self.project_src_dir_name + expected_project_src_path = os.path.join(self.workspace, + self.project_src_dir_name) + github_env = platform_config.github.PlatformConfig() + self.assertEqual(github_env.project_src_path, expected_project_src_path) + + +class GetGitUrlTest(unittest.TestCase): + """Tests for GenericPlatformConfig.git_url.""" + + @mock.patch('platform_config.github._get_event_data', return_value={}) + def setUp(self, _): + test_helpers.patch_environ(self) + self.platform_conf = platform_config.github.PlatformConfig() + + def test_repository(self): + """Tests that the correct result is returned when repository contains the + owner and repo name (as it does on GitHub).""" + os.environ['GITHUB_REPOSITORY'] = 'repo/owner' + self.assertEqual('https://github.com/repo/owner', + self.platform_conf.git_url) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/gitlab.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/gitlab.py new file mode 100644 index 0000000000000000000000000000000000000000..186cbefc4db9d2dc3693e36a20ed4ec40227d748 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/gitlab.py @@ -0,0 +1,78 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for getting the configuration CIFuzz needs to run on GitLab.""" +import logging +import os + +import environment +import platform_config + + +class PlatformConfig(platform_config.BasePlatformConfig): + """CI environment for GitLab.""" + + @property + def workspace(self): + """Returns the workspace.""" + return os.path.join(os.getenv('CI_BUILDS_DIR'), os.getenv('CI_JOB_ID')) + + @property + def git_sha(self): + """Returns the Git SHA to checkout and fuzz.""" + return os.getenv('CI_COMMIT_SHA') + + @property + def project_src_path(self): + """Returns the directory with the source of the project""" + return os.getenv('CI_PROJECT_DIR') + + @property + def token(self): + """Returns the job token""" + return os.getenv('CI_JOB_TOKEN') + + @property + def project_repo_name(self): + """Returns the project's name""" + return os.getenv('CI_PROJECT_NAME') + + @property + def base_commit(self): + """Returns the previous commit sha for commit-fuzzing""" + base_commit = None + if os.getenv('CI_PIPELINE_SOURCE') == 'push': + base_commit = os.getenv('CI_COMMIT_BEFORE_SHA') + logging.debug('base_commit: %s.', base_commit) + return base_commit + + @property + def base_ref(self): + """Returns the base commit sha for a merge request""" + # Could also be CI_MERGE_REQUEST_TARGET_BRANCH_NAME. + return os.getenv('CI_MERGE_REQUEST_DIFF_BASE_SHA') + + @property + def filestore(self): + """Returns the filestore used to store persistent data.""" + return os.environ.get('FILESTORE', 'gitlab') + + @property + def artifacts_dir(self): + """Gitlab: returns the directory to put artifacts""" + return environment.get('CFL_ARTIFACTS_DIR', 'artifacts') + + @property + def cache_dir(self): + """Gitlab: returns the directory to use as cache""" + return environment.get('CFL_CACHE_DIR', 'cache') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/platform_config_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/platform_config_test.py new file mode 100644 index 0000000000000000000000000000000000000000..75b26f526a4f887383afa122e668d2a6010ff606 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/platform_config_test.py @@ -0,0 +1,82 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for platform_config.""" +import os +import unittest + +import platform_config +import test_helpers + + +class GetProjectRepoOwnerAndNameTest(unittest.TestCase): + """Tests for get_project_repo_owner and get_project_repo_name.""" + + def setUp(self): + test_helpers.patch_environ(self) + self.repo_owner = 'repo-owner' + self.repo_name = 'repo-name' + self.env = platform_config.BasePlatformConfig() + + def test_unset_repository(self): + """Tests that the correct result is returned when repository is not set.""" + self.assertIsNone(self.env.project_repo_name) + + def test_owner(self): + """Tests that the correct result is returned for owner.""" + self.assertIsNone(self.env.project_repo_owner) + + def test_empty_repository(self): + """Tests that the correct result is returned when repository is an empty + string.""" + os.environ['REPOSITORY'] = '' + self.assertEqual(self.env.project_repo_name, '') + + def test_repository(self): + """Tests that the correct result is returned when repository contains the + just the repo name (as it does outside of GitHub).""" + os.environ['REPOSITORY'] = self.repo_name + self.assertEqual(self.env.project_repo_name, self.repo_name) + + +class ProjectSrcPathTest(unittest.TestCase): + """Tests for project_src_path.""" + + def setUp(self): + test_helpers.patch_environ(self) + + def test_not_github(self): + """Tests that project_src_path returns the correct result not on + GitHub.""" + project_src_path = 'project-src' + os.environ['PROJECT_SRC_PATH'] = project_src_path + generic_ci_env = platform_config.BasePlatformConfig() + self.assertEqual(generic_ci_env.project_src_path, project_src_path) + + +class GetGitUrlTest(unittest.TestCase): + """Tests for BasePlatformConfig.git_url.""" + + def setUp(self): + test_helpers.patch_environ(self) + self.env = platform_config.BasePlatformConfig() + + def test_unset_repository(self): + """Tests that the correct result is returned when repository is not set.""" + self.assertEqual(self.env.git_url, None) + + def test_repository(self): + """Tests that the correct result is returned when GITHUB_REPOSITORY is + set.""" + os.environ['GITHUB_REPOSITORY'] = 'repo/owner' + self.assertIsNone(self.env.git_url) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/prow.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/prow.py new file mode 100644 index 0000000000000000000000000000000000000000..69c035ae80194e86b3fff7759db0f0f0aca4a64b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/prow.py @@ -0,0 +1,64 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for getting the configuration CIFuzz needs to run on prow.""" +import logging +import os + +import platform_config + +# pylint: disable=too-few-public-methods + + +class PlatformConfig(platform_config.BasePlatformConfig): + """CI environment for Prow.""" + + @property + def project_src_path(self): + """Returns the manually checked out path of the project's source if + specified or the current directory if not. Prow will run ClusterfuzzLite + at the directory head for the repo.""" + project_src_path = os.getenv('PROJECT_SRC_PATH', os.getcwd()) + logging.debug('PROJECT_SRC_PATH: %s.', project_src_path) + return project_src_path + + @property + def workspace(self): + """Returns the workspace.""" + # Let Prow user override workspace, but default to using artifacts dir + return os.getenv('WORKSPACE', os.getenv('ARTIFACTS')) + + @property + def base_ref(self): + """Returns the base branch to diff against (pr fuzzing).""" + return os.getenv('PULL_BASE_REF') + + @property + def project_repo_name(self): + """Returns the project repo name.""" + return os.getenv('REPO_NAME') + + @property + def base_commit(self): + """Returns the base commit to diff against (commit fuzzing).""" + return os.getenv('PULL_BASE_SHA') + + @property + def docker_in_docker(self): + """Returns True if using Docker in Docker.""" + return True + + @property + def filestore(self): + """Returns the filestore used to store persistent data.""" + return os.environ.get('FILESTORE', 'gsutil') diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/standalone.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/standalone.py new file mode 100644 index 0000000000000000000000000000000000000000..1975dfb1b842a28b9f2ceea809ef2f0d4d7a4a49 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/platform_config/standalone.py @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for getting the configuration CIFuzz needs to run standalone.""" +import os + +import platform_config + +# pylint: disable=too-few-public-methods + + +class PlatformConfig(platform_config.BasePlatformConfig): + """CI environment for Standalone.""" + + @property + def filestore(self): + """Returns the filestore used to store persistent data.""" + return os.environ.get('FILESTORE', 'filesystem') + + @property + def filestore_root_dir(self): + """Returns the filestore used to store persistent data.""" + return os.environ['FILESTORE_ROOT_DIR'] diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/run_fuzzers.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/run_fuzzers.py new file mode 100644 index 0000000000000000000000000000000000000000..a136f6991083426eb939f2c1cdef8afd28dc96ea --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/run_fuzzers.py @@ -0,0 +1,321 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for running fuzzers.""" +import enum +import logging +import os +import sys +import time + +import clusterfuzz_deployment +import fuzz_target +import generate_coverage_report +import workspace_utils +import sarif_utils + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import utils + + +class RunFuzzersResult(enum.Enum): + """Enum result from running fuzzers.""" + ERROR = 0 + BUG_FOUND = 1 + NO_BUG_FOUND = 2 + + +class BaseFuzzTargetRunner: + """Base class for fuzzer runners.""" + + def __init__(self, config): + self.config = config + self.workspace = workspace_utils.Workspace(config) + self.clusterfuzz_deployment = ( + clusterfuzz_deployment.get_clusterfuzz_deployment( + self.config, self.workspace)) + + # Set by the initialize method. + self.fuzz_target_paths = None + + def get_fuzz_targets(self): + """Returns fuzz targets in out directory.""" + return utils.get_fuzz_targets(self.workspace.out) + + def initialize(self): + """Initialization method. Must be called before calling run_fuzz_targets. + Returns True on success.""" + # Use a separate initialization function so we can return False on failure + # instead of exceptioning like we need to do if this were done in the + # __init__ method. + + logging.info('Using %s sanitizer.', self.config.sanitizer) + + # TODO(metzman) Add a check to ensure we aren't over time limit. + if not self.config.fuzz_seconds or self.config.fuzz_seconds < 1: + logging.error( + 'Fuzz_seconds argument must be greater than 1, but was: %s.', + self.config.fuzz_seconds) + return False + + if not os.path.exists(self.workspace.out): + logging.error('Out directory: %s does not exist.', self.workspace.out) + return False + + if not os.path.exists(self.workspace.artifacts): + os.makedirs(self.workspace.artifacts) + elif (not os.path.isdir(self.workspace.artifacts) or + os.listdir(self.workspace.artifacts)): + logging.error('Artifacts path: %s exists and is not an empty directory.', + self.workspace.artifacts) + return False + + self.fuzz_target_paths = self.get_fuzz_targets() + logging.info('Fuzz targets: %s', self.fuzz_target_paths) + if not self.fuzz_target_paths: + logging.error('No fuzz targets were found in out directory: %s.', + self.workspace.out) + return False + + return True + + def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use + """Cleans up after running |fuzz_target_obj|.""" + raise NotImplementedError('Child class must implement method.') + + def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use + """Fuzzes with |fuzz_target_obj| and returns the result.""" + raise NotImplementedError('Child class must implement method.') + + @property + def quit_on_bug_found(self): + """Property that is checked to determine if fuzzing should quit after first + bug is found.""" + raise NotImplementedError('Child class must implement method.') + + def create_fuzz_target_obj(self, target_path, run_seconds): + """Returns a fuzz target object.""" + return fuzz_target.FuzzTarget(target_path, run_seconds, self.workspace, + self.clusterfuzz_deployment, self.config) + + def run_fuzz_targets(self): + """Runs fuzz targets. Returns True if a bug was found.""" + fuzzers_left_to_run = len(self.fuzz_target_paths) + + # Make a copy since we will mutate it. + fuzz_seconds = self.config.fuzz_seconds + + min_seconds_per_fuzzer = fuzz_seconds // fuzzers_left_to_run + bug_found = False + for target_path in self.fuzz_target_paths: + # By doing this, we can ensure that every fuzz target runs for at least + # min_seconds_per_fuzzer, but that other fuzzers will have longer to run + # if one ends early. + run_seconds = max(fuzz_seconds // fuzzers_left_to_run, + min_seconds_per_fuzzer) + + target = self.create_fuzz_target_obj(target_path, run_seconds) + start_time = time.time() + result = self.run_fuzz_target(target) + self.cleanup_after_fuzz_target_run(target) + + # It's OK if this goes negative since we take max when determining + # run_seconds. + fuzz_seconds -= time.time() - start_time + + fuzzers_left_to_run -= 1 + if not result.testcase or not result.stacktrace: + logging.info('Fuzzer %s finished running without reportable crashes.', + target.target_name) + continue + + bug_found = True + if self.quit_on_bug_found: + logging.info('Bug found. Stopping fuzzing.') + break + + # pylint: disable=undefined-loop-variable + if not target_path: + logging.error('Ran no fuzz targets.') + elif self.config.output_sarif: + # TODO(metzman): Handle multiple crashes. + write_fuzz_result_to_sarif(result, target_path, self.workspace) + self.clusterfuzz_deployment.upload_crashes() + return bug_found + + +def write_fuzz_result_to_sarif(fuzz_result, target_path, workspace): + """Write results of fuzzing to SARIF.""" + logging.info('Writing sarif results.') + sarif_utils.write_stacktrace_to_sarif(fuzz_result.stacktrace, target_path, + workspace) + + +class PruneTargetRunner(BaseFuzzTargetRunner): + """Runner that prunes corpora.""" + + @property + def quit_on_bug_found(self): + return False + + def run_fuzz_target(self, fuzz_target_obj): + """Prunes with |fuzz_target_obj| and returns the result.""" + result = fuzz_target_obj.prune() + logging.debug('Corpus path contents: %s.', os.listdir(result.corpus_path)) + self.clusterfuzz_deployment.upload_corpus(fuzz_target_obj.target_name, + result.corpus_path, + replace=True) + return result + + def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use + """Cleans up after pruning with |fuzz_target_obj|.""" + fuzz_target_obj.free_disk_if_needed() + + +NON_FUZZ_TARGETS_FOR_COVERAGE = { + 'llvm-symbolizer', + 'jazzer_agent_deploy.jar', + 'jazzer_driver', + 'jazzer_driver_with_sanitizer', +} + + +def is_coverage_fuzz_target(file_path): + """Returns whether |file_path| is a fuzz target binary for the purposes of a + coverage report. Inspired by infra/base-images/base-runner/coverage.""" + if not os.path.isfile(file_path): + return False + if not utils.is_executable(file_path): + return False + filename = os.path.basename(file_path) + return filename not in NON_FUZZ_TARGETS_FOR_COVERAGE + + +def get_coverage_fuzz_targets(out): + """Returns a list of fuzz targets in |out| for coverage.""" + # We only want fuzz targets from the root because during the coverage build, + # a lot of the image's filesystem is copied into /out for the purpose of + # generating coverage reports. + fuzz_targets = [] + for filename in os.listdir(out): + file_path = os.path.join(out, filename) + if is_coverage_fuzz_target(file_path): + fuzz_targets.append(file_path) + return fuzz_targets + + +class CoverageTargetRunner(BaseFuzzTargetRunner): + """Runner that runs the 'coverage' command.""" + + @property + def quit_on_bug_found(self): + raise NotImplementedError('Not implemented for CoverageTargetRunner.') + + def get_fuzz_targets(self): + """Returns fuzz targets in out directory.""" + return get_coverage_fuzz_targets(self.workspace.out) + + def run_fuzz_targets(self): + """Generates a coverage report. Always returns False since it never finds + any bugs.""" + generate_coverage_report.generate_coverage_report( + self.fuzz_target_paths, self.workspace, self.clusterfuzz_deployment, + self.config) + return False + + def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use + """Fuzzes with |fuzz_target_obj| and returns the result.""" + raise NotImplementedError('Not implemented for CoverageTargetRunner.') + + def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use + """Cleans up after running |fuzz_target_obj|.""" + raise NotImplementedError('Not implemented for CoverageTargetRunner.') + + +class CiFuzzTargetRunner(BaseFuzzTargetRunner): + """Runner for fuzz targets used in CI (patch-fuzzing) context.""" + + @property + def quit_on_bug_found(self): + return True + + def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use + """Cleans up after running |fuzz_target_obj|.""" + fuzz_target_obj.free_disk_if_needed() + + def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use + return fuzz_target_obj.fuzz() + + +class BatchFuzzTargetRunner(BaseFuzzTargetRunner): + """Runner for fuzz targets used in batch fuzzing context.""" + + @property + def quit_on_bug_found(self): + return False + + def run_fuzz_target(self, fuzz_target_obj): + """Fuzzes with |fuzz_target_obj| and returns the result.""" + result = fuzz_target_obj.fuzz(batch=True) + logging.debug('Corpus path contents: %s.', os.listdir(result.corpus_path)) + self.clusterfuzz_deployment.upload_corpus(fuzz_target_obj.target_name, + result.corpus_path) + return result + + def cleanup_after_fuzz_target_run(self, fuzz_target_obj): + """Cleans up after running |fuzz_target_obj|.""" + # This must be done after we upload the corpus, otherwise it will be deleted + # before we get a chance to upload it. We can't delete the fuzz target + # because it is needed when we upload the build. + fuzz_target_obj.free_disk_if_needed(delete_fuzz_target=False) + + +_MODE_RUNNER_MAPPING = { + 'batch': BatchFuzzTargetRunner, + 'coverage': CoverageTargetRunner, + 'prune': PruneTargetRunner, + 'code-change': CiFuzzTargetRunner, +} + + +def get_fuzz_target_runner(config): + """Returns a fuzz target runner object based on the mode of + |config|.""" + runner = _MODE_RUNNER_MAPPING[config.mode](config) + logging.info('run fuzzers MODE is: %s. Runner: %s.', config.mode, runner) + return runner + + +def run_fuzzers(config): # pylint: disable=too-many-locals + """Runs fuzzers for a specific OSS-Fuzz project. + + Args: + config: A RunFuzzTargetsConfig. + + Returns: + A RunFuzzersResult enum value indicating what happened during fuzzing. + """ + fuzz_target_runner = get_fuzz_target_runner(config) + if not fuzz_target_runner.initialize(): + # We didn't fuzz at all because of internal (CIFuzz) errors. And we didn't + # find any bugs. + return RunFuzzersResult.ERROR + + if not fuzz_target_runner.run_fuzz_targets(): + # We fuzzed successfully, but didn't find any bugs (in the fuzz target). + return RunFuzzersResult.NO_BUG_FOUND + + # We fuzzed successfully and found bug(s) in the fuzz targets. + return RunFuzzersResult.BUG_FOUND diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/sarif_utils_test.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/sarif_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e8870887c071e4694a6550af0a4f432ad3838a8f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/sarif_utils_test.py @@ -0,0 +1,128 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for sarif_utils.py""" +import os +import unittest +from unittest import mock + +import sarif_utils + +CRASH_INFO_FILELINE = 403 + +TEST_DATA = os.path.join(os.path.dirname(__file__), 'test_data') + + +class GetSarifDataTest(unittest.TestCase): + """Tests for get_sarif_data.""" + + def setUp(self): + self.maxDiff = None # pylint: disable=invalid-name + + def test_get_sarif_data_none(self): + """Tests get_sarif_data when there was no crash.""" + self.assertEqual(sarif_utils.get_sarif_data(None, '/root/target'), + sarif_utils.SARIF_DATA) + + def test_ordinary_case(self): + stacktrace_filename = os.path.join(TEST_DATA, + 'sarif_utils_systemd_stack.txt') + with open(stacktrace_filename, 'r') as fp: + stacktrace = fp.read() + expected_result = { + 'level': 'error', + 'message': { + 'text': 'Heap-buffer-overflow\nREAD 4' + }, + 'locations': [{ + 'physicalLocation': { + 'artifactLocation': { + 'uri': 'src/core/fuzz-unit-file.c', + 'index': 0 + }, + 'region': { + 'startLine': 30, + # We don't have this granualarity fuzzing. + 'startColumn': 1, + } + } + }], + 'ruleId': 'heap-buffer-overflow', + 'ruleIndex': 2 + } + actual_result = sarif_utils.get_sarif_data( + stacktrace, '/root/target')['runs'][0]['results'][0] + self.assertEqual(actual_result, expected_result) + + def test_llvmfuzzertestoneinput_case(self): + stacktrace_filename = os.path.join(TEST_DATA, + 'sarif_utils_only_llvmfuzzer_stack.txt') + with open(stacktrace_filename, 'r') as fp: + stacktrace = fp.read() + actual_result = sarif_utils.get_sarif_data( + stacktrace, '/root/target')['runs'][0]['results'] + self.assertEqual(actual_result, []) + + def test_msan(self): + """Tests that MSAN stacktraces don't exception.""" + stacktrace_filename = os.path.join(TEST_DATA, 'sarif_utils_msan_stack.txt') + with open(stacktrace_filename, 'r') as fp: + stacktrace = fp.read() + + actual_result = sarif_utils.get_sarif_data(stacktrace, '/root/target') + + +class RedactSrcPathTest(unittest.TestCase): + """Tests for redact_src_path.""" + + def test_redact_src_path(self): + """Tests redact_src_path.""" + path = '/src/src-repo/subdir/file' + self.assertEqual(sarif_utils.redact_src_path(path), 'subdir/file') + + +def _get_mock_crash_info(): + """Returns a mock crash_info to be used in tests.""" + stack_frame = mock.MagicMock() + stack_frame.filename = '/src/repo-dir/sub/vuln.cc' + stack_frame.function_name = 'vuln_func' + stack_frame.fileline = CRASH_INFO_FILELINE + crash1_frames = [stack_frame, stack_frame] + frames = [crash1_frames] + crash_info = mock.MagicMock() + crash_info.frames = frames + crash_info.crash_state = 'vuln_func\nvuln_func0\nvuln_func1' + return crash_info + + +class GetErrorSourceInfoTest(unittest.TestCase): + """Tests for get_error_source_info.""" + + def test_redact_src_path(self): + """Tests that get_error_source_info finds the right source info.""" + crash_info = _get_mock_crash_info() + source_info = sarif_utils.get_error_source_info(crash_info) + expected_source_info = ('sub/vuln.cc', CRASH_INFO_FILELINE) + self.assertEqual(source_info, expected_source_info) + + +class GetRuleIndexTest(unittest.TestCase): + """Tests for get_rule_index.""" + CRASH_INFO_CRASH_TYPE = 'Heap-use-after-free READ 8' + + def test_get_rule_index(self): + """Tests that get_rule_index finds the right rule index.""" + index = sarif_utils.get_rule_index(self.CRASH_INFO_CRASH_TYPE) + self.assertEqual(sarif_utils.SARIF_RULES[index]['id'], + 'heap-use-after-free') + self.assertEqual(sarif_utils.get_rule_index('no-crashes'), 0) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/TimeoutFuzzer.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/TimeoutFuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3034d300ed52a6487df3f42a3e83325bae403ad9 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/TimeoutFuzzer.cpp @@ -0,0 +1,36 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Example of a standalone runner for "fuzz targets". +// It reads all files passed as parameters and feeds their contents +// one by one into the fuzz target (LLVMFuzzerTestOneInput). +// This runner does not do any fuzzing, but allows us to run the fuzz target +// on the test corpus (e.g. "do_stuff_test_data") or on a single file, +// e.g. the one that comes from a bug report. + +// This is a fuzz target that times out on every input by infinite looping. +// This is used for testing. +// Build instructions: +// 1. clang++ -fsanitize=fuzzer TimeoutFuzzer.cpp -o timeout_fuzzer +// 2. strip timeout_fuzzer +// The binary is stripped to save space in the git repo. + +#include +#include + +extern "C" int LLVMFuzzerTestOneInput(uint8_t* data, size_t size) { + while (true) + ; + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/example_crash_fuzzer_bug_summary.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/example_crash_fuzzer_bug_summary.txt new file mode 100644 index 0000000000000000000000000000000000000000..8caebad0cbf1d7778b7d7ea0346410ba012f8acd --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/example_crash_fuzzer_bug_summary.txt @@ -0,0 +1,68 @@ +AddressSanitizer: heap-buffer-overflow on address 0x62500001b530 at pc 0x00000052138a bp 0x7ffe62db2c10 sp 0x7ffe62db23d8 +READ of size 52 at 0x62500001b530 thread T0 +SCARINESS: 26 (multi-byte-read-heap-buffer-overflow) + #0 0x521389 in __asan_memcpy /src/llvm-project/compiler-rt/lib/asan/asan_interceptors_memintrinsics.cpp:22:3 + #1 0x567590 in yr_object_set_string /src/yara/libyara/object.c:1122:5 + #2 0x5afced in dex_parse /src/yara/libyara/modules/dex/dex.c:781:5 + #3 0x5b4a8b in dex__load /src/yara/libyara/modules/dex/dex.c:1218:7 + #4 0x56537c in yr_modules_load /src/yara/libyara/modules.c:179:16 + #5 0x5d6583 in yr_execute_code /src/yara/libyara/exec.c:1276:18 + #6 0x56f5c0 in yr_scanner_scan_mem_blocks /src/yara/libyara/scanner.c:444:3 + #7 0x56bf23 in yr_rules_scan_mem_blocks /src/yara/libyara/rules.c:235:12 + #8 0x56c182 in yr_rules_scan_mem /src/yara/libyara/rules.c:285:10 + #9 0x5548d2 in LLVMFuzzerTestOneInput /src/yara/tests/oss-fuzz/dex_fuzzer.cc:40:3 + #10 0x45a3b1 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:556:15 + #11 0x459ad5 in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:470:3 + #12 0x45be77 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:698:19 + #13 0x45cc05 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:830:5 + #14 0x44ac88 in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:824:6 + #15 0x474ab2 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:19:10 + #16 0x7f4409b7a82f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x2082f) + #17 0x41e348 in _start (out/dex_fuzzer+0x41e348) + +0x62500001b530 is located 0 bytes to the right of 9264-byte region [0x625000019100,0x62500001b530) +allocated by thread T0 here: + #0 0x521f4d in malloc /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:145:3 + #1 0x4331b7 in operator new(unsigned long) (out/dex_fuzzer+0x4331b7) + #2 0x459ad5 in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:470:3 + #3 0x45be77 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:698:19 + #4 0x45cc05 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:830:5 + #5 0x44ac88 in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:824:6 + #6 0x474ab2 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:19:10 + #7 0x7f4409b7a82f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x2082f) + +SUMMARY: AddressSanitizer: heap-buffer-overflow /src/llvm-project/compiler-rt/lib/asan/asan_interceptors_memintrinsics.cpp:22:3 in __asan_memcpy +Shadow bytes around the buggy address: + 0x0c4a7fffb650: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 + 0x0c4a7fffb660: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 + 0x0c4a7fffb670: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 + 0x0c4a7fffb680: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 + 0x0c4a7fffb690: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 +=>0x0c4a7fffb6a0: 00 00 00 00 00 00[fa]fa fa fa fa fa fa fa fa fa + 0x0c4a7fffb6b0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c4a7fffb6c0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c4a7fffb6d0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c4a7fffb6e0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c4a7fffb6f0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + +Shadow byte legend (one shadow byte represents 8 application bytes): + Addressable: 00 + Partially addressable: 01 02 03 04 05 06 07 + Heap left redzone: fa + Freed heap region: fd + Stack left redzone: f1 + Stack mid redzone: f2 + Stack right redzone: f3 + Stack after return: f5 + Stack use after scope: f8 + Global redzone: f9 + Global init order: f6 + Poisoned by user: f7 + Container overflow: fc + Array cookie: ac + Intra object redzone: bb + ASan internal: fe + Left alloca redzone: ca + Right alloca redzone: cb + Shadow gap: cc +==12==ABORTING \ No newline at end of file diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/example_crash_fuzzer_output.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/example_crash_fuzzer_output.txt new file mode 100644 index 0000000000000000000000000000000000000000..64cb0af8ceda12d9b7fbbb11a4463cd7b10c476a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/example_crash_fuzzer_output.txt @@ -0,0 +1,202 @@ +/github/workspace/build-out/do_stuff_fuzzer -timeout=25 -rss_limit_mb=2560 -dict=/github/workspace/build-out/do_stuff_fuzzer.dict -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmp9p1w4p8r/ -max_total_time=600 -print_final_stats=1 /github/workspace/cifuzz-corpus/do_stuff_fuzzer >fuzz-0.log 2>&1 +================== Job 0 exited with exit code 77 ============ +Dictionary: 3 entries +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 1 modules (86 inline 8-bit counters): 86 [0x6150e0, 0x615136), +INFO: Loaded 1 PC tables (86 PCs): 86 [0x5c8b08,0x5c9068), +INFO: 5 files found in /github/workspace/cifuzz-corpus/do_stuff_fuzzer +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 4096 bytes +INFO: seed corpus: files: 5 min: 3b max: 44b total: 64b rss: 31Mb +#6 INITED cov: 64 ft: 70 corp: 5/64b exec/s: 0 rss: 32Mb +#9 NEW cov: 64 ft: 72 corp: 6/74b lim: 4096 exec/s: 0 rss: 32Mb L: 10/44 MS: 3 ChangeBit-InsertByte-ManualDict- DE: "foo"- +#17 NEW cov: 64 ft: 74 corp: 7/86b lim: 4096 exec/s: 0 rss: 32Mb L: 12/44 MS: 3 ChangeBit-ManualDict-PersAutoDict- DE: "ouch"-"foo"- +#18 NEW cov: 64 ft: 76 corp: 8/100b lim: 4096 exec/s: 0 rss: 32Mb L: 14/44 MS: 1 CrossOver- +#22 NEW cov: 64 ft: 78 corp: 9/141b lim: 4096 exec/s: 0 rss: 32Mb L: 41/44 MS: 4 InsertRepeatedBytes-CopyPart-ChangeBinInt-ChangeBit- +#24 NEW cov: 64 ft: 80 corp: 10/151b lim: 4096 exec/s: 0 rss: 32Mb L: 10/44 MS: 2 CrossOver-CopyPart- +#28 NEW cov: 64 ft: 81 corp: 11/171b lim: 4096 exec/s: 0 rss: 32Mb L: 20/44 MS: 4 InsertByte-PersAutoDict-ShuffleBytes-ManualDict- DE: "foo"-"ouch"- +#43 NEW cov: 65 ft: 83 corp: 12/175b lim: 4096 exec/s: 0 rss: 32Mb L: 4/44 MS: 5 InsertByte-ChangeBit-ManualDict-ManualDict-EraseBytes- DE: "bar"-"bar"- +#54 NEW cov: 65 ft: 84 corp: 13/191b lim: 4096 exec/s: 0 rss: 32Mb L: 16/44 MS: 1 CrossOver- +#64 NEW cov: 65 ft: 86 corp: 14/209b lim: 4096 exec/s: 0 rss: 32Mb L: 18/44 MS: 5 InsertByte-ChangeBinInt-CMP-PersAutoDict-CopyPart- DE: "\000\000"-"foo"- +#75 REDUCE cov: 65 ft: 86 corp: 14/203b lim: 4096 exec/s: 0 rss: 32Mb L: 35/44 MS: 1 EraseBytes- +#76 REDUCE cov: 65 ft: 86 corp: 14/195b lim: 4096 exec/s: 0 rss: 32Mb L: 36/36 MS: 1 EraseBytes- +================================================================= +==26==ERROR: AddressSanitizer: heap-buffer-overflow on address 0x603000003114 at pc 0x00000056d1db bp 0x7fffd6a72bf0 sp 0x7fffd6a72be8 +READ of size 4 at 0x603000003114 thread T0 +SCARINESS: 17 (4-byte-read-heap-buffer-overflow) + #0 0x56d1da in DoStuff(std::__1::basic_string, std::__1::allocator > const&) /src/cifuzz-example/my_api.cpp:26:10 + #1 0x56c5d6 in LLVMFuzzerTestOneInput /src/cifuzz-example/do_stuff_fuzzer.cpp:15:3 + #2 0x43de23 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x43d60a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x43ecd9 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:757:19 + #5 0x43f9a5 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:895:5 + #6 0x42ed0f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x458362 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7fcfaa2aa082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + #9 0x41f74d in _start (build-out/do_stuff_fuzzer+0x41f74d) + +DEDUP_TOKEN: DoStuff(std::__1::basic_string, std::__1::allocator > const&)--LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +0x603000003114 is located 0 bytes to the right of 20-byte region [0x603000003100,0x603000003114) +allocated by thread T0 here: + #0 0x569dfd in operator new(unsigned long) /src/llvm-project/compiler-rt/lib/asan/asan_new_delete.cpp:95:3 + #1 0x56c9d4 in __libcpp_operator_new /usr/local/bin/../include/c++/v1/new:245:10 + #2 0x56c9d4 in __libcpp_allocate /usr/local/bin/../include/c++/v1/new:271:10 + #3 0x56c9d4 in allocate /usr/local/bin/../include/c++/v1/__memory/allocator.h:105:38 + #4 0x56c9d4 in allocate /usr/local/bin/../include/c++/v1/__memory/allocator_traits.h:262:20 + #5 0x56c9d4 in __vallocate /usr/local/bin/../include/c++/v1/vector:931:37 + #6 0x56c9d4 in vector /usr/local/bin/../include/c++/v1/vector:1236:9 + #7 0x56c9d4 in DoStuff(std::__1::basic_string, std::__1::allocator > const&) /src/cifuzz-example/my_api.cpp:14:20 + #8 0x56c5d6 in LLVMFuzzerTestOneInput /src/cifuzz-example/do_stuff_fuzzer.cpp:15:3 + #9 0x43de23 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #10 0x43d60a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #11 0x43ecd9 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:757:19 + #12 0x43f9a5 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:895:5 + #13 0x42ed0f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #14 0x458362 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #15 0x7fcfaa2aa082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: operator new(unsigned long)--__libcpp_operator_new--__libcpp_allocate +SUMMARY: AddressSanitizer: heap-buffer-overflow /src/cifuzz-example/my_api.cpp:26:10 in DoStuff(std::__1::basic_string, std::__1::allocator > const&) +Shadow bytes around the buggy address: + 0x0c067fff85d0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff85e0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff85f0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8600: fa fa fd fd fd fa fa fa fd fd fd fa fa fa fd fd + 0x0c067fff8610: fd fa fa fa fd fd fd fa fa fa 00 00 00 00 fa fa +=>0x0c067fff8620: 00 00[04]fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8630: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8640: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8650: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8660: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8670: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa +Shadow byte legend (one shadow byte represents 8 application bytes): + Addressable: 00 + Partially addressable: 01 02 03 04 05 06 07 + Heap left redzone: fa + Freed heap region: fd + Stack left redzone: f1 + Stack mid redzone: f2 + Stack right redzone: f3 + Stack after return: f5 + Stack use after scope: f8 + Global redzone: f9 + Global init order: f6 + Poisoned by user: f7 + Container overflow: fc + Array cookie: ac + Intra object redzone: bb + ASan internal: fe + Left alloca redzone: ca + Right alloca redzone: cb +==26==ABORTING +MS: 4 PersAutoDict-ManualDict-CrossOver-PersAutoDict- DE: "foo"-"ouch"-"bar"-; base unit: 1a25c32303e03f997166aee1c665b5ae9f66ec7d +0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x66,0x6f,0x6f,0xff,0x1,0x62,0x61,0x72,0x63,0x68,0x6f,0x75,0x63,0x68,0x66,0x6f,0x66,0x6e,0x6f,0x6d,0x67,0x6d, +\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377foo\377\001barchouchfofnomgm +artifact_prefix='/tmp/tmp9p1w4p8r/'; Test unit written to /tmp/tmp9p1w4p8r/crash-1c750df432fe458b738987ba69cd20a598708775 +Base64: //////////////////////9mb2//AWJhcmNob3VjaGZvZm5vbWdt +stat::number_of_executed_units: 90 +stat::average_exec_per_sec: 0 +stat::new_units_added: 11 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 33 +/github/workspace/build-out/do_stuff_fuzzer -timeout=25 -rss_limit_mb=2560 -dict=/github/workspace/build-out/do_stuff_fuzzer.dict -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmp9p1w4p8r/ -max_total_time=600 -print_final_stats=1 /github/workspace/cifuzz-corpus/do_stuff_fuzzer >fuzz-1.log 2>&1 +================== Job 1 exited with exit code 77 ============ +Dictionary: 3 entries +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 1 modules (86 inline 8-bit counters): 86 [0x6150e0, 0x615136), +INFO: Loaded 1 PC tables (86 PCs): 86 [0x5c8b08,0x5c9068), +INFO: 15 files found in /github/workspace/cifuzz-corpus/do_stuff_fuzzer +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 4096 bytes +INFO: seed corpus: files: 15 min: 3b max: 44b total: 239b rss: 31Mb +#16 INITED cov: 65 ft: 86 corp: 13/159b exec/s: 0 rss: 32Mb +#38 NEW cov: 65 ft: 88 corp: 14/188b lim: 4096 exec/s: 0 rss: 32Mb L: 29/35 MS: 2 InsertByte-CopyPart- +#43 REDUCE cov: 65 ft: 88 corp: 14/187b lim: 4096 exec/s: 0 rss: 32Mb L: 2/35 MS: 5 InsertByte-ChangeByte-ChangeBinInt-EraseBytes-EraseBytes- +#64 NEW cov: 66 ft: 89 corp: 15/191b lim: 4096 exec/s: 0 rss: 32Mb L: 4/35 MS: 1 ChangeByte- +#78 REDUCE cov: 66 ft: 89 corp: 15/190b lim: 4096 exec/s: 0 rss: 32Mb L: 15/35 MS: 4 ChangeBinInt-CopyPart-EraseBytes-EraseBytes- +#95 REDUCE cov: 66 ft: 89 corp: 15/188b lim: 4096 exec/s: 0 rss: 32Mb L: 16/35 MS: 2 ChangeBinInt-EraseBytes- +#110 REDUCE cov: 66 ft: 89 corp: 15/184b lim: 4096 exec/s: 0 rss: 32Mb L: 31/31 MS: 5 CrossOver-ChangeByte-ShuffleBytes-InsertByte-EraseBytes- +#146 REDUCE cov: 66 ft: 89 corp: 15/183b lim: 4096 exec/s: 0 rss: 32Mb L: 1/31 MS: 1 EraseBytes- +#157 NEW cov: 66 ft: 90 corp: 16/210b lim: 4096 exec/s: 0 rss: 32Mb L: 27/31 MS: 1 CrossOver- +#230 REDUCE cov: 66 ft: 92 corp: 17/318b lim: 4096 exec/s: 0 rss: 32Mb L: 108/108 MS: 3 CopyPart-InsertRepeatedBytes-CrossOver- +#277 NEW cov: 66 ft: 94 corp: 18/429b lim: 4096 exec/s: 0 rss: 32Mb L: 111/111 MS: 2 ManualDict-CMP- DE: "bar"-"\000\000\000\000"- +#337 NEW cov: 67 ft: 95 corp: 19/439b lim: 4096 exec/s: 0 rss: 32Mb L: 10/111 MS: 5 ChangeByte-ChangeBit-ManualDict-CopyPart-EraseBytes- DE: "bar"- +================================================================= +==30==ERROR: AddressSanitizer: heap-buffer-overflow on address 0x603000007d94 at pc 0x00000056d1db bp 0x7ffdb9da4b50 sp 0x7ffdb9da4b48 +READ of size 4 at 0x603000007d94 thread T0 +SCARINESS: 17 (4-byte-read-heap-buffer-overflow) + #0 0x56d1da in DoStuff(std::__1::basic_string, std::__1::allocator > const&) /src/cifuzz-example/my_api.cpp:26:10 + #1 0x56c5d6 in LLVMFuzzerTestOneInput /src/cifuzz-example/do_stuff_fuzzer.cpp:15:3 + #2 0x43de23 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x43d60a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x43ecd9 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:757:19 + #5 0x43f9a5 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:895:5 + #6 0x42ed0f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x458362 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7fc2c6bd3082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + #9 0x41f74d in _start (build-out/do_stuff_fuzzer+0x41f74d) + +DEDUP_TOKEN: DoStuff(std::__1::basic_string, std::__1::allocator > const&)--LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +0x603000007d94 is located 0 bytes to the right of 20-byte region [0x603000007d80,0x603000007d94) +allocated by thread T0 here: + #0 0x569dfd in operator new(unsigned long) /src/llvm-project/compiler-rt/lib/asan/asan_new_delete.cpp:95:3 + #1 0x56c9d4 in __libcpp_operator_new /usr/local/bin/../include/c++/v1/new:245:10 + #2 0x56c9d4 in __libcpp_allocate /usr/local/bin/../include/c++/v1/new:271:10 + #3 0x56c9d4 in allocate /usr/local/bin/../include/c++/v1/__memory/allocator.h:105:38 + #4 0x56c9d4 in allocate /usr/local/bin/../include/c++/v1/__memory/allocator_traits.h:262:20 + #5 0x56c9d4 in __vallocate /usr/local/bin/../include/c++/v1/vector:931:37 + #6 0x56c9d4 in vector /usr/local/bin/../include/c++/v1/vector:1236:9 + #7 0x56c9d4 in DoStuff(std::__1::basic_string, std::__1::allocator > const&) /src/cifuzz-example/my_api.cpp:14:20 + #8 0x56c5d6 in LLVMFuzzerTestOneInput /src/cifuzz-example/do_stuff_fuzzer.cpp:15:3 + #9 0x43de23 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #10 0x43d60a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #11 0x43ecd9 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:757:19 + #12 0x43f9a5 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:895:5 + #13 0x42ed0f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #14 0x458362 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #15 0x7fc2c6bd3082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: operator new(unsigned long)--__libcpp_operator_new--__libcpp_allocate +SUMMARY: AddressSanitizer: heap-buffer-overflow /src/cifuzz-example/my_api.cpp:26:10 in DoStuff(std::__1::basic_string, std::__1::allocator > const&) +Shadow bytes around the buggy address: + 0x0c067fff8f60: fa fa fd fd fd fa fa fa fd fd fd fa fa fa fd fd + 0x0c067fff8f70: fd fa fa fa fd fd fd fa fa fa fd fd fd fa fa fa + 0x0c067fff8f80: fd fd fd fa fa fa fd fd fd fa fa fa fd fd fd fa + 0x0c067fff8f90: fa fa fd fd fd fa fa fa fd fd fd fa fa fa fd fd + 0x0c067fff8fa0: fd fa fa fa fd fd fd fa fa fa fd fd fd fa fa fa +=>0x0c067fff8fb0: 00 00[04]fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8fc0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8fd0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8fe0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff8ff0: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c067fff9000: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa +Shadow byte legend (one shadow byte represents 8 application bytes): + Addressable: 00 + Partially addressable: 01 02 03 04 05 06 07 + Heap left redzone: fa + Freed heap region: fd + Stack left redzone: f1 + Stack mid redzone: f2 + Stack right redzone: f3 + Stack after return: f5 + Stack use after scope: f8 + Global redzone: f9 + Global init order: f6 + Poisoned by user: f7 + Container overflow: fc + Array cookie: ac + Intra object redzone: bb + ASan internal: fe + Left alloca redzone: ca + Right alloca redzone: cb +==30==ABORTING +MS: 4 ShuffleBytes-ManualDict-CrossOver-PersAutoDict- DE: "foo"-"bar"-; base unit: 8fc7da8d952ed1aa5c7415e21b3d52b42402973d +0x66,0x6f,0x6f,0x63,0x6f,0x75,0x6f,0x75,0x63,0x68,0x63,0x68,0x66,0x62,0x61,0x72,0x6f,0x6f,0x6d,0x67,0x66,0x6f,0x6f,0x2f,0x99,0x6f,0x6f,0x90,0x6f,0x6f,0x6d,0x0,0x0,0x67,0x2f, +foocououchchfbaroomgfoo/\231oo\220oom\000\000g/ +artifact_prefix='/tmp/tmp9p1w4p8r/'; Test unit written to /tmp/tmp9p1w4p8r/crash-f8a2a1e84f58a2d676e5ad8078119a39b78df901 +Base64: Zm9vY291b3VjaGNoZmJhcm9vbWdmb28vmW9vkG9vbQAAZy8= +stat::number_of_executed_units: 366 +stat::average_exec_per_sec: 0 +stat::new_units_added: 11 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 33 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..30add4d3a8d3cd9d8efc9d8b97983e161a62205b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile @@ -0,0 +1,22 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +FROM ghcr.io/aixcc-finals/base-builder +RUN apt-get update && apt-get install -y make + +COPY . $SRC/external-project +WORKDIR $SRC/external-project +COPY .clusterfuzzlite/build.sh $SRC/ diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/build.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..2c52ef90f4cf6ea50746586d7f5c0ffdc5291435 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/build.sh @@ -0,0 +1,24 @@ +#!/bin/bash -eu +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +make clean # Not strictly necessary, since we are building in a fresh dir. +make -j$(nproc) all # Build the fuzz targets. + +# Copy the fuzzer executables, zip-ed corpora, option and dictionary files to $OUT +find . -name '*_fuzzer' -exec cp -v '{}' $OUT ';' +find . -name '*_fuzzer.dict' -exec cp -v '{}' $OUT ';' # If you have dictionaries. +find . -name '*_fuzzer.options' -exec cp -v '{}' $OUT ';' # If you have custom options. diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/Makefile b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..2c177377627adacda8eda47a1f13830cb1f3998b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/Makefile @@ -0,0 +1,44 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); + +# Simple example of a build file that nicely integrates a fuzz target +# with the rest of the project. +# +# We use 'make' as the build system, but these ideas are applicable +# to any other build system + +# By default, use our own standalone_fuzz_target_runner. +# This runner does no fuzzing, but simply executes the inputs +# provided via parameters. +# Run e.g. "make all LIB_FUZZING_ENGINE=/path/to/libFuzzer.a" +# to link the fuzzer(s) against a real fuzzing engine. +# +# OSS-Fuzz will define its own value for LIB_FUZZING_ENGINE. +LIB_FUZZING_ENGINE ?= standalone_fuzz_target_runner.o + +# Values for CC, CFLAGS, CXX, CXXFLAGS are provided by OSS-Fuzz. +# Outside of OSS-Fuzz use the ones you prefer or rely on the default values. +# Do not use the -fsanitize=* flags by default. +# OSS-Fuzz will use different -fsanitize=* flags for different builds (asan, ubsan, msan, ...) + +# You may add extra compiler flags like this: +CXXFLAGS += -std=c++11 + +all: do_stuff_fuzzer + +clean: + rm -fv *.a *.o *_fuzzer crash-* *.zip + +# Fuzz target, links against $LIB_FUZZING_ENGINE, so that +# you may choose which fuzzing engine to use. +do_stuff_fuzzer: do_stuff_fuzzer.cpp my_api.a standalone_fuzz_target_runner.o + ${CXX} ${CXXFLAGS} $< my_api.a ${LIB_FUZZING_ENGINE} -o $@ + + +# The library itself. +my_api.a: my_api.cpp my_api.h + ${CXX} ${CXXFLAGS} $< -c + ar ruv my_api.a my_api.o + +# The standalone fuzz target runner. +standalone_fuzz_target_runner.o: standalone_fuzz_target_runner.cpp diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/do_stuff_fuzzer.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/do_stuff_fuzzer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..eaf70137c3b8aa58a6c09f89eecdba7f5ebbd010 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/do_stuff_fuzzer.cpp @@ -0,0 +1,24 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include "my_api.h" + +#include + +// Simple fuzz target for DoStuff(). +// See https://llvm.org/docs/LibFuzzer.html for details. +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) { + std::string str(reinterpret_cast(data), size); + DoStuff(str); // Disregard the output. + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/do_stuff_fuzzer.dict b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/do_stuff_fuzzer.dict new file mode 100644 index 0000000000000000000000000000000000000000..6a82ac4b3aa09ea8bbe6fc127e1bad4ed1c28ebb --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/do_stuff_fuzzer.dict @@ -0,0 +1,6 @@ +# A dictionary for more efficient fuzzing of DoStuff(). +# If the inputs contain multi-byte tokens, list them here. +# See https://llvm.org/docs/LibFuzzer.html#dictionaries +"foo" +"bar" +"ouch" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/my_api.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/my_api.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9a2c1bc1c0d8a7d1a2473e0563b1a385b11ed621 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/my_api.cpp @@ -0,0 +1,36 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Implementation of "my_api". +#include "my_api.h" + +#include + +// Do some computations with 'str', return the result. +// This function contains a bug. Can you spot it? +size_t DoStuff(const std::string &str) { + std::vector Vec({0, 1, 2, 3, 4}); + size_t Idx = 0; + if (str.size() > 5) + Idx++; + if (str.find("foo") != std::string::npos) + Idx++; + if (str.find("bar") != std::string::npos) + Idx++; + if (str.find("ouch") != std::string::npos) + Idx++; + if (str.find("omg") != std::string::npos) + Idx++; + return Vec[Idx]; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/my_api.h b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/my_api.h new file mode 100644 index 0000000000000000000000000000000000000000..325aa15ccc7f6fbed196437aeb75c94a88a630a6 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/my_api.h @@ -0,0 +1,19 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// A library that does ... stuff. +// Serves as an example of good fuzz testing and OSS-Fuzz integration. +#include + +size_t DoStuff(const std::string &str); diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/standalone_fuzz_target_runner.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/standalone_fuzz_target_runner.cpp new file mode 100644 index 0000000000000000000000000000000000000000..38a0454f0ac98e74052670c5eac9e5a86df9dfc7 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/standalone_fuzz_target_runner.cpp @@ -0,0 +1,47 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Example of a standalone runner for "fuzz targets". +// It reads all files passed as parameters and feeds their contents +// one by one into the fuzz target (LLVMFuzzerTestOneInput). +// This runner does not do any fuzzing, but allows us to run the fuzz target +// on the test corpus (e.g. "do_stuff_test_data") or on a single file, +// e.g. the one that comes from a bug report. + +#include +#include +#include +#include + +// Forward declare the "fuzz target" interface. +// We deliberately keep this inteface simple and header-free. +extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size); + +int main(int argc, char **argv) { + for (int i = 1; i < argc; i++) { + std::ifstream in(argv[i]); + in.seekg(0, in.end); + size_t length = in.tellg(); + in.seekg (0, in.beg); + std::cout << "Reading " << length << " bytes from " << argv[i] << std::endl; + // Allocate exactly length bytes so that we reliably catch buffer overflows. + std::vector bytes(length); + in.read(bytes.data(), bytes.size()); + assert(in); + LLVMFuzzerTestOneInput(reinterpret_cast(bytes.data()), + bytes.size()); + std::cout << "Execution successful" << std::endl; + } + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/msan_crash_fuzzer_bug_summary.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/msan_crash_fuzzer_bug_summary.txt new file mode 100644 index 0000000000000000000000000000000000000000..b55e9c6b73b5d645ca7fba6a196f2485bd423b24 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/msan_crash_fuzzer_bug_summary.txt @@ -0,0 +1,22 @@ +MemorySanitizer: use-of-uninitialized-value +#0 0x52675f in LLVMFuzzerTestOneInput /src/cifuzz-example/do_stuff_fuzzer.cpp:13:7 +#1 0x45a431 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:599:15 +#2 0x45ba46 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:792:3 +#3 0x45bed9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:845:3 +#4 0x44a4bc in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:906:6 +#5 0x474432 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 +#6 0x7eff5562683f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x2083f) +#7 0x41eab8 in _start (out/do_stuff_fuzzer+0x41eab8) + +DEDUP_TOKEN: LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) +Uninitialized value was created by a heap allocation +#0 0x4d57ad in malloc /src/llvm-project/compiler-rt/lib/msan/msan_interceptors.cpp:901:3 +#1 0x437c07 in operator new(unsigned long) (out/do_stuff_fuzzer+0x437c07) +#2 0x45ba46 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:792:3 +#3 0x45bed9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:845:3 +#4 0x44a4bc in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:906:6 +#5 0x474432 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 +#6 0x7eff5562683f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x2083f) +DEDUP_TOKEN: malloc--operator new(unsigned long)--fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) + +SUMMARY: \ No newline at end of file diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/msan_crash_fuzzer_output.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/msan_crash_fuzzer_output.txt new file mode 100644 index 0000000000000000000000000000000000000000..c803bfb1c928e7613baa0f7dfd39238e59ecc9b4 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/msan_crash_fuzzer_output.txt @@ -0,0 +1,39 @@ +Dictionary: 3 entries +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 1 modules (184 inline 8-bit counters): 184 [0x829300, 0x8293b8), +INFO: Loaded 1 PC tables (184 PCs): 184 [0x5dc910,0x5dd490), +INFO: 5 files found in /tmp/do_stuff_fuzzer_corpus +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 4096 bytes +==13==WARNING: MemorySanitizer: use-of-uninitialized-value +#0 0x52675f in LLVMFuzzerTestOneInput /src/cifuzz-example/do_stuff_fuzzer.cpp:13:7 +#1 0x45a431 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:599:15 +#2 0x45ba46 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:792:3 +#3 0x45bed9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:845:3 +#4 0x44a4bc in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:906:6 +#5 0x474432 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 +#6 0x7eff5562683f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x2083f) +#7 0x41eab8 in _start (out/do_stuff_fuzzer+0x41eab8) + +DEDUP_TOKEN: LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) +Uninitialized value was created by a heap allocation +#0 0x4d57ad in malloc /src/llvm-project/compiler-rt/lib/msan/msan_interceptors.cpp:901:3 +#1 0x437c07 in operator new(unsigned long) (out/do_stuff_fuzzer+0x437c07) +#2 0x45ba46 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:792:3 +#3 0x45bed9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:845:3 +#4 0x44a4bc in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:906:6 +#5 0x474432 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 +#6 0x7eff5562683f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x2083f) +DEDUP_TOKEN: malloc--operator new(unsigned long)--fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) + +SUMMARY: MemorySanitizer: use-of-uninitialized-value /src/cifuzz-example/do_stuff_fuzzer.cpp:13:7 in LLVMFuzzerTestOneInput +Unique heap origins: 65 +Stack depot allocated bytes: 4424 +Unique origin histories: 29 +History depot allocated bytes: 696 +Exiting +MS: 0 ; base unit: 0000000000000000000000000000000000000000 + + +artifact_prefix='./'; Test unit written to ./crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 +Base64: diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_msan_stack.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_msan_stack.txt new file mode 100644 index 0000000000000000000000000000000000000000..9214d8f81f9c77c12b77b6da46db80473941f9fd --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_msan_stack.txt @@ -0,0 +1,62 @@ +Dictionary: 3 entries +2024-05-07 18:49:02,343 - root - INFO - Fuzzer: do_stuff_fuzzer. Detected bug. +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 1 modules (83 inline 8-bit counters): 83 [0x563ff0e03e98, 0x563ff0e03eeb), +INFO: Loaded 1 PC tables (83 PCs): 83 [0x563ff0e03ef0,0x563ff0e04420), +INFO: 0 files found in /github/workspace/cifuzz-corpus/do_stuff_fuzzer +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 4096 bytes +INFO: A corpus is not provided, starting from an empty corpus +#2 INITED cov: 2 ft: 2 corp: 1/1b exec/s: 0 rss: 69Mb +#4 NEW cov: 3 ft: 3 corp: 2/3b lim: 4096 exec/s: 0 rss: 69Mb L: 2/2 MS: 2 ChangeBit-InsertByte- +==23==WARNING: MemorySanitizer: use-of-uninitialized-value + #0 0x563ff0d278f0 in LLVMFuzzerTestOneInput /src/cifuzz-external-example/do_stuff_fuzzer.cpp:24:1 + #1 0x563ff0c24340 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:614:13 + #2 0x563ff0c23b65 in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:516:7 + #3 0x563ff0c25335 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:760:19 + #4 0x563ff0c26125 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector>&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:905:5 + #5 0x563ff0c14436 in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:914:6 + #6 0x563ff0c40962 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #7 0x7f4c156ed082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 87b331c034a6458c64ce09c03939e947212e18ce) + #8 0x563ff0c055ad in _start (build-out/do_stuff_fuzzer+0x4a5ad) + +DEDUP_TOKEN: LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) + Uninitialized value was stored to memory at + #0 0x563ff0d278e9 in LLVMFuzzerTestOneInput /src/cifuzz-external-example/do_stuff_fuzzer.cpp + #1 0x563ff0c24340 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:614:13 + #2 0x563ff0c23b65 in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:516:7 + #3 0x563ff0c25335 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:760:19 + #4 0x563ff0c26125 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector>&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:905:5 + #5 0x563ff0c14436 in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:914:6 + #6 0x563ff0c40962 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #7 0x7f4c156ed082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 87b331c034a6458c64ce09c03939e947212e18ce) + +DEDUP_TOKEN: LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) + Uninitialized value was created by a heap deallocation + #0 0x563ff0cc7480 in free /src/llvm-project/compiler-rt/lib/msan/msan_interceptors.cpp:218:3 + #1 0x563ff0d27800 in LLVMFuzzerTestOneInput /src/cifuzz-external-example/do_stuff_fuzzer.cpp:21:5 + #2 0x563ff0c24340 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:614:13 + #3 0x563ff0c23b65 in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:516:7 + #4 0x563ff0c25335 in fuzzer::Fuzzer::MutateAndTestOne() /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:760:19 + #5 0x563ff0c26125 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector>&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:905:5 + #6 0x563ff0c14436 in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:914:6 + #7 0x563ff0c40962 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7f4c156ed082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 87b331c034a6458c64ce09c03939e947212e18ce) + +DEDUP_TOKEN: __interceptor_free--LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +SUMMARY: MemorySanitizer: use-of-uninitialized-value /src/cifuzz-external-example/do_stuff_fuzzer.cpp:24:1 in LLVMFuzzerTestOneInput +Unique heap origins: 201 +Stack depot allocated bytes: 9764880 +Unique origin histories: 181 +History depot allocated bytes: 196608 +Exiting +MS: 5 ShuffleBytes-CopyPart-InsertByte-CopyPart-ManualDict- DE: "bar"-; base unit: adc83b19e793491b1c6ea0fd8b46cd9f32e592fc +0xa,0x62,0x61,0x72,0xa, +\012bar\012 +artifact_prefix='/tmp/tmp1tc5b3m2/'; Test unit written to /tmp/tmp1tc5b3m2/crash-c935b6724d7e27401ac9af3773700ca86ac38dea +Base64: CmJhcgo= +stat::number_of_executed_units: 159 +stat::average_exec_per_sec: 0 +stat::new_units_added: 1 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 69 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_only_llvmfuzzer_stack.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_only_llvmfuzzer_stack.txt new file mode 100644 index 0000000000000000000000000000000000000000..71f6e29ad3bf104ca54293b7395f3c0057c205a0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_only_llvmfuzzer_stack.txt @@ -0,0 +1,302 @@ +/github/workspace/build-out/fuzz-bus-label -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpsngf52cy/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bus-label >fuzz-0.log 2>&1 +================== Job 0 exited with exit code 0 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (82577 inline 8-bit counters): 82573 [0x7fc55e6bedf0, 0x7fc55e6d307d), 4 [0x5e9c28, 0x5e9c2c), +2023-05-04 01:02:55,065 - root - INFO - Not reporting crash in fuzz-bus-label because process timed out. +2023-05-04 01:02:55,069 - root - INFO - Deleting corpus and seed corpus of fuzz-bus-label to save disk. +INFO: Loaded 2 PC tables (82577 PCs): 82573 [0x7fc55e6d3080,0x7fc55e815950), 4 [0x5a6c78,0x5a6cb8), +INFO: 256 files found in /github/workspace/cifuzz-corpus/fuzz-bus-label +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 1048576 bytes +INFO: seed corpus: files: 256 min: 1b max: 5242880b total: 30968483b rss: 48Mb +#257 INITED cov: 41 ft: 230 corp: 111/4360b exec/s: 0 rss: 73Mb +#131072 pulse cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43690 rss: 76Mb +#262144 pulse cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43690 rss: 76Mb +#524288 pulse cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43690 rss: 77Mb +#609082 DONE cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43505 rss: 77Mb +Done 609082 runs in 14 second(s) +stat::number_of_executed_units: 609082 +stat::average_exec_per_sec: 43505 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 77 +/github/workspace/build-out/fuzz-bus-label -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpsngf52cy/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bus-label >fuzz-1.log 2>&1 +2023-05-04 01:02:55,082 - root - INFO - Deleting fuzz target: fuzz-bus-label. +2023-05-04 01:02:55,083 - root - INFO - Done deleting. +2023-05-04 01:02:55,084 - root - INFO - Fuzzer fuzz-bus-label finished running without reportable crashes. +2023-05-04 01:02:55,084 - root - INFO - Running fuzzer: fuzz-dhcp-server. +2023-05-04 01:02:55,085 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-dhcp-server/public.zip +2023-05-04 01:02:55,088 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:02:55,145 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-dhcp-server/public.zip HTTP/1.1" 200 806749 +2023-05-04 01:02:55,350 - root - INFO - Starting fuzzing +Fuzzing logs: +/github/workspace/build-out/fuzz-dhcp-server -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpyxgwwsvr/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-dhcp-server >fuzz-0.log 2>&1 +================== Job 0 exited with exit code 0 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (83640 inline 8-bit counters): 82573 [0x7fe86ff70df0, 0x7fe86ff8507d), 1067 [0x60c638, 0x60ca63), +INFO: Loaded 2 PC tables (83640 PCs): 82573 [0x7fe86ff85080,0x7fe8700c7950), 1067 [0x60ca68,0x610d18), +INFO: 1153 files found in /github/workspace/cifuzz-corpus/fuzz-dhcp-server +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 1048576 bytes +INFO: seed corpus: files: 1153 min: 1b max: 5242879b total: 17056071b rss: 50Mb +#1155 INITED cov: 577 ft: 1116 corp: 203/54Kb exec/s: 0 rss: 86Mb +#1167 NEW cov: 577 ft: 1117 corp: 204/54Kb lim: 1048576 exec/s: 0 rss: 86Mb L: 243/1071 MS: 2 ChangeASCIIInt-CrossOver- +#1509 NEW cov: 577 ft: 1118 corp: 205/54Kb lim: 1048576 exec/s: 0 rss: 86Mb L: 243/1071 MS: 2 ChangeBit-CopyPart- +#63018 NEW cov: 577 ft: 1119 corp: 206/55Kb lim: 1048576 exec/s: 21006 rss: 86Mb L: 245/1071 MS: 4 InsertByte-ChangeByte-CMP-InsertByte- DE: "2\000\000\000\000\000\000\000"- +#65536 pulse cov: 577 ft: 1119 corp: 206/55Kb lim: 1048576 exec/s: 21845 rss: 86Mb +Loaded 1024/1155 files from /github/workspace/cifuzz-corpus/fuzz-dhcp-server +#99784 NEW cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 19956 rss: 105Mb L: 243/1071 MS: 2 ChangeASCIIInt-ChangeBinInt- +#131072 pulse cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 21845 rss: 105Mb +#262144 pulse cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 20164 rss: 105Mb +#275639 DONE cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 19688 rss: 105Mb +###### Recommended dictionary. ###### +"2\000\000\000\000\000\000\000" # Uses: 17879 +###### End of recommended dictionary. ###### +Done 275639 runs in 14 second(s) +stat::number_of_executed_units: 275639 +stat::average_exec_per_sec: 19688 +stat::new_units_added: 4 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 105 +/github/workspace/build-out/fuzz-dhcp-server -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpyxgwwsvr/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-dhcp-server >fuzz-1.log 2>&1 +2023-05-04 01:03:18,374 - root - INFO - Not reporting crash in fuzz-dhcp-server because process timed out. +2023-05-04 01:03:18,377 - root - INFO - Deleting corpus and seed corpus of fuzz-dhcp-server to save disk. +2023-05-04 01:03:18,399 - root - INFO - Deleting fuzz target: fuzz-dhcp-server. +2023-05-04 01:03:18,400 - root - INFO - Done deleting. +2023-05-04 01:03:18,400 - root - INFO - Fuzzer fuzz-dhcp-server finished running without reportable crashes. +2023-05-04 01:03:18,401 - root - INFO - Running fuzzer: fuzz-bcd. +2023-05-04 01:03:18,401 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-bcd/public.zip +2023-05-04 01:03:18,404 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:03:18,582 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-bcd/public.zip HTTP/1.1" 200 2293254 +2023-05-04 01:03:18,804 - root - INFO - Starting fuzzing +Fuzzing logs: +/github/workspace/build-out/fuzz-bcd -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpgvqlyr8q/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bcd >fuzz-0.log 2>&1 +================== Job 0 exited with exit code 0 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (83175 inline 8-bit counters): 82573 [0x7f1b5387edf0, 0x7f1b5389307d), 602 [0x5f8718, 0x5f8972), +INFO: Loaded 2 PC tables (83175 PCs): 82573 [0x7f1b53893080,0x7f1b539d5950), 602 [0x5f8978,0x5faf18), +INFO: 1260 files found in /github/workspace/cifuzz-corpus/fuzz-bcd +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 946175 bytes +INFO: seed corpus: files: 1260 min: 1b max: 946175b total: 10373697b rss: 48Mb +#1261 INITED cov: 137 ft: 653 corp: 282/1416Kb exec/s: 0 rss: 91Mb +#65536 pulse cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 32768 rss: 98Mb +#131072 pulse cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 32768 rss: 98Mb +#262144 pulse cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 37449 rss: 98Mb +#475081 DONE cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 33934 rss: 98Mb +Done 475081 runs in 14 second(s) +stat::number_of_executed_units: 475081 +stat::average_exec_per_sec: 33934 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 98 +/github/workspace/build-out/fuzz-bcd -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpgvqlyr8q/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bcd >fuzz-1.log 2>&1 +2023-05-04 01:03:41,821 - root - INFO - Not reporting crash in fuzz-bcd because process timed out. +2023-05-04 01:03:41,822 - root - INFO - Deleting corpus and seed corpus of fuzz-bcd to save disk. +2023-05-04 01:03:41,848 - root - INFO - Deleting fuzz target: fuzz-bcd. +2023-05-04 01:03:41,848 - root - INFO - Done deleting. +2023-05-04 01:03:41,848 - root - INFO - Fuzzer fuzz-bcd finished running without reportable crashes. +2023-05-04 01:03:41,848 - root - INFO - Running fuzzer: fuzz-varlink. +2023-05-04 01:03:41,849 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-varlink/public.zip +2023-05-04 01:03:41,852 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:03:41,906 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-varlink/public.zip HTTP/1.1" 200 2586434 +2023-05-04 01:03:42,671 - root - INFO - Starting fuzzing +2023-05-04 01:04:05,709 - root - INFO - Not reporting crash in fuzz-varlink because process timed out. +Fuzzing logs: +/github/workspace/build-out/fuzz-varlink -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpbtf4f932/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-varlink >fuzz-0.log 2>&1 +2023-05-04 01:04:05,709 - root - INFO - Deleting corpus and seed corpus of fuzz-varlink to save disk. +2023-05-04 01:04:05,806 - root - INFO - Deleting fuzz target: fuzz-varlink. +2023-05-04 01:04:05,806 - root - INFO - Done deleting. +2023-05-04 01:04:05,806 - root - INFO - Fuzzer fuzz-varlink finished running without reportable crashes. +2023-05-04 01:04:05,806 - root - INFO - Running fuzzer: fuzz-unit-file. +2023-05-04 01:04:05,807 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-unit-file/public.zip +2023-05-04 01:04:05,809 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:04:05,897 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-unit-file/public.zip HTTP/1.1" 200 4999974 +2023-05-04 01:04:07,869 - root - INFO - Starting fuzzing +2023-05-04 01:04:09,516 - root - INFO - Fuzzer: fuzz-unit-file. Detected bug. +Fuzzing logs: +/github/workspace/build-out/fuzz-unit-file -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpjhswtxiw/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-unit-file >fuzz-0.log 2>&1 +2023-05-04 01:04:09,517 - root - INFO - Trying to reproduce crash using: /tmp/tmpjhswtxiw/crash-7be9e0f3a71d95f8d5e67c5a9d84273d6f4e3c03. +================== Job 0 exited with exit code 77 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (82576 inline 8-bit counters): 82573 [0x7f3b0de4cdf0, 0x7f3b0de6107d), 3 [0x5e9b28, 0x5e9b2b), +INFO: Loaded 2 PC tables (82576 PCs): 82573 [0x7f3b0de61080,0x7f3b0dfa3950), 3 [0x5a6af8,0x5a6b28), +INFO: 20907 files found in /github/workspace/cifuzz-corpus/fuzz-unit-file +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 604973 bytes +INFO: seed corpus: files: 20907 min: 1b max: 604973b total: 16005340b rss: 61Mb +================================================================= +==74==ERROR: AddressSanitizer: heap-buffer-overflow on address 0x602000005078 at pc 0x0000004ddffa bp 0x7fffeea64fe0 sp 0x7fffeea64fd8 +READ of size 4 at 0x602000005078 thread T0 +SCARINESS: 27 (4-byte-read-heap-buffer-overflow-far-from-bounds) + #0 0x4ddff9 in LLVMFuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 + #1 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #2 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #3 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #4 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #5 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #6 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #7 0x7f3b0ce20082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + #8 0x41f6ed in _start (build-out/fuzz-unit-file+0x41f6ed) + +DEDUP_TOKEN: LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) +0x602000005078 is located 38 bytes to the right of 2-byte region [0x602000005050,0x602000005052) +freed by thread T0 here: + #0 0x4a0882 in __interceptor_free /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:52:3 + #1 0x4ddfb7 in LLVMFuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:29:9 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7f3b0ce20082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: __interceptor_free--LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +previously allocated by thread T0 here: + #0 0x4a0b26 in malloc /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:69:3 + #1 0x4ddfac in LLVMFuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:28:25 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7f3b0ce20082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: malloc--LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +SUMMARY: AddressSanitizer: heap-buffer-overflow /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 in LLVMFuzzerTestOneInput +Shadow bytes around the buggy address: + 0x0c047fff89b0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89c0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89d0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89e0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89f0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd +=>0x0c047fff8a00: fa fa 00 02 fa fa 00 02 fa fa fd fa fa fa fa[fa] + 0x0c047fff8a10: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a20: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a30: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a40: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a50: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa +Shadow byte legend (one shadow byte represents 8 application bytes): + Addressable: 00 + Partially addressable: 01 02 03 04 05 06 07 + Heap left redzone: fa + Freed heap region: fd + Stack left redzone: f1 + Stack mid redzone: f2 + Stack right redzone: f3 + Stack after return: f5 + Stack use after scope: f8 + Global redzone: f9 + Global init order: f6 + Poisoned by user: f7 + Container overflow: fc + Array cookie: ac + Intra object redzone: bb + ASan internal: fe + Left alloca redzone: ca + Right alloca redzone: cb +==74==ABORTING +MS: 0 ; base unit: 0000000000000000000000000000000000000000 +0x61,0x75,0x74,0x6f,0x6d,0x6f,0x75,0x6e,0x74,0x48, +automountH +artifact_prefix='/tmp/tmpjhswtxiw/'; Test unit written to /tmp/tmpjhswtxiw/crash-7be9e0f3a71d95f8d5e67c5a9d84273d6f4e3c03 +Base64: YXV0b21vdW50SA== +stat::number_of_executed_units: 314 +stat::average_exec_per_sec: 0 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 67 +/github/workspace/build-out/fuzz-unit-file -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpjhswtxiw/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-unit-file >fuzz-1.log 2>&1 +================== Job 1 exited with exit code 77 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (82576 inline 8-bit counters): 82573 [0x7ff1ba038df0, 0x7ff1ba04d07d), 3 [0x5e9b28, 0x5e9b2b), +INFO: Loaded 2 PC tables (82576 PCs): 82573 [0x7ff1ba04d080,0x7ff1ba18f950), 3 [0x5a6af8,0x5a6b28), +INFO: 20907 files found in /github/workspace/cifuzz-corpus/fuzz-unit-file +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 604973 bytes +INFO: seed corpus: files: 20907 min: 1b max: 604973b total: 16005340b rss: 61Mb +================================================================= +==78==ERROR: AddressSanitizer: heap-buffer-overflow on address 0x602000005078 at pc 0x0000004ddffa bp 0x7ffc2e6825c0 sp 0x7ffc2e6825b8 +READ of size 4 at 0x602000005078 thread T0 +SCARINESS: 27 (4-byte-read-heap-buffer-overflow-far-from-bounds) + #0 0x4ddff9 in LLVMFuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 + #1 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #2 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #3 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #4 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #5 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #6 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #7 0x7ff1b900c082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + #8 0x41f6ed in _start (build-out/fuzz-unit-file+0x41f6ed) + +DEDUP_TOKEN: LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) +0x602000005078 is located 38 bytes to the right of 2-byte region [0x602000005050,0x602000005052) +freed by thread T0 here: + #0 0x4a0882 in __interceptor_free /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:52:3 + #1 0x4ddfb7 in LLVMFuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:29:9 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7ff1b900c082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: __interceptor_free--LLVMFuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +previously allocated by thread T0 here: + #0 0x4a0b26 in malloc /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:69:3 + #1 0x4ddfac in LLVMFuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:28:25 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7ff1b900c082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: malloc--samLLVM2FuzzerTestOneInput --fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +SUMMARY: AddressSanitizer: heap-buffer-overflow /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 in samsamLLVM2FuzzerTestOneInput +Shadow bytes around the buggy address: + 0x0c047fff89b0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89c0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89d0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89e0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89f0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd +=>0x0c047fff8a00: fa fa 00 02 fa fa 00 02 fa fa fd fa fa fa fa[fa] + 0x0c047fff8a10: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a20: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a30: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a40: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a50: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa +Shadow byte legend (one shadow byte represents 8 application bytes): + Addressable: 00 + Partially addressable: 01 02 03 04 05 06 07 + Heap left redzone: fa + Freed heap region: fd + Stack left redzone: f1 + Stack mid redzone: f2 + Stack right redzone: f3 + Stack after return: f5 + Stack use after scope: f8 + Global redzone: f9 + Global init order: f6 + Poisoned by user: f7 + Container overflow: fc + Array cookie: ac + Intra object redzone: bb + ASan internal: fe + Left alloca redzone: ca + Right alloca redzone: cb +==78==ABORTING +MS: 0 ; base unit: 0000000000000000000000000000000000000000 +0x61,0x75,0x74,0x6f,0x6d,0x6f,0x75,0x6e,0x74,0x48, +automountH +artifact_prefix='/tmp/tmpjhswtxiw/'; Test unit written to /tmp/tmpjhswtxiw/crash-7be9e0f3a71d95f8d5e67c5a9d84273d6f4e3c03 +Base64: YXV0b21vdW50SA== +stat::number_of_executed_units: 314 +stat::average_exec_per_sec: 0 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 68 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_systemd_stack.txt b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_systemd_stack.txt new file mode 100644 index 0000000000000000000000000000000000000000..e632d3e659b53090d776c36a46885c6934d51f23 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/cifuzz/test_data/sarif_utils_systemd_stack.txt @@ -0,0 +1,302 @@ +/github/workspace/build-out/fuzz-bus-label -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpsngf52cy/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bus-label >fuzz-0.log 2>&1 +================== Job 0 exited with exit code 0 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (82577 inline 8-bit counters): 82573 [0x7fc55e6bedf0, 0x7fc55e6d307d), 4 [0x5e9c28, 0x5e9c2c), +2023-05-04 01:02:55,065 - root - INFO - Not reporting crash in fuzz-bus-label because process timed out. +2023-05-04 01:02:55,069 - root - INFO - Deleting corpus and seed corpus of fuzz-bus-label to save disk. +INFO: Loaded 2 PC tables (82577 PCs): 82573 [0x7fc55e6d3080,0x7fc55e815950), 4 [0x5a6c78,0x5a6cb8), +INFO: 256 files found in /github/workspace/cifuzz-corpus/fuzz-bus-label +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 1048576 bytes +INFO: seed corpus: files: 256 min: 1b max: 5242880b total: 30968483b rss: 48Mb +#257 INITED cov: 41 ft: 230 corp: 111/4360b exec/s: 0 rss: 73Mb +#131072 pulse cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43690 rss: 76Mb +#262144 pulse cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43690 rss: 76Mb +#524288 pulse cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43690 rss: 77Mb +#609082 DONE cov: 41 ft: 230 corp: 111/4360b lim: 1048576 exec/s: 43505 rss: 77Mb +Done 609082 runs in 14 second(s) +stat::number_of_executed_units: 609082 +stat::average_exec_per_sec: 43505 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 77 +/github/workspace/build-out/fuzz-bus-label -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpsngf52cy/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bus-label >fuzz-1.log 2>&1 +2023-05-04 01:02:55,082 - root - INFO - Deleting fuzz target: fuzz-bus-label. +2023-05-04 01:02:55,083 - root - INFO - Done deleting. +2023-05-04 01:02:55,084 - root - INFO - Fuzzer fuzz-bus-label finished running without reportable crashes. +2023-05-04 01:02:55,084 - root - INFO - Running fuzzer: fuzz-dhcp-server. +2023-05-04 01:02:55,085 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-dhcp-server/public.zip +2023-05-04 01:02:55,088 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:02:55,145 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-dhcp-server/public.zip HTTP/1.1" 200 806749 +2023-05-04 01:02:55,350 - root - INFO - Starting fuzzing +Fuzzing logs: +/github/workspace/build-out/fuzz-dhcp-server -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpyxgwwsvr/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-dhcp-server >fuzz-0.log 2>&1 +================== Job 0 exited with exit code 0 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (83640 inline 8-bit counters): 82573 [0x7fe86ff70df0, 0x7fe86ff8507d), 1067 [0x60c638, 0x60ca63), +INFO: Loaded 2 PC tables (83640 PCs): 82573 [0x7fe86ff85080,0x7fe8700c7950), 1067 [0x60ca68,0x610d18), +INFO: 1153 files found in /github/workspace/cifuzz-corpus/fuzz-dhcp-server +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 1048576 bytes +INFO: seed corpus: files: 1153 min: 1b max: 5242879b total: 17056071b rss: 50Mb +#1155 INITED cov: 577 ft: 1116 corp: 203/54Kb exec/s: 0 rss: 86Mb +#1167 NEW cov: 577 ft: 1117 corp: 204/54Kb lim: 1048576 exec/s: 0 rss: 86Mb L: 243/1071 MS: 2 ChangeASCIIInt-CrossOver- +#1509 NEW cov: 577 ft: 1118 corp: 205/54Kb lim: 1048576 exec/s: 0 rss: 86Mb L: 243/1071 MS: 2 ChangeBit-CopyPart- +#63018 NEW cov: 577 ft: 1119 corp: 206/55Kb lim: 1048576 exec/s: 21006 rss: 86Mb L: 245/1071 MS: 4 InsertByte-ChangeByte-CMP-InsertByte- DE: "2\000\000\000\000\000\000\000"- +#65536 pulse cov: 577 ft: 1119 corp: 206/55Kb lim: 1048576 exec/s: 21845 rss: 86Mb +Loaded 1024/1155 files from /github/workspace/cifuzz-corpus/fuzz-dhcp-server +#99784 NEW cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 19956 rss: 105Mb L: 243/1071 MS: 2 ChangeASCIIInt-ChangeBinInt- +#131072 pulse cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 21845 rss: 105Mb +#262144 pulse cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 20164 rss: 105Mb +#275639 DONE cov: 577 ft: 1120 corp: 207/55Kb lim: 1048576 exec/s: 19688 rss: 105Mb +###### Recommended dictionary. ###### +"2\000\000\000\000\000\000\000" # Uses: 17879 +###### End of recommended dictionary. ###### +Done 275639 runs in 14 second(s) +stat::number_of_executed_units: 275639 +stat::average_exec_per_sec: 19688 +stat::new_units_added: 4 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 105 +/github/workspace/build-out/fuzz-dhcp-server -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpyxgwwsvr/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-dhcp-server >fuzz-1.log 2>&1 +2023-05-04 01:03:18,374 - root - INFO - Not reporting crash in fuzz-dhcp-server because process timed out. +2023-05-04 01:03:18,377 - root - INFO - Deleting corpus and seed corpus of fuzz-dhcp-server to save disk. +2023-05-04 01:03:18,399 - root - INFO - Deleting fuzz target: fuzz-dhcp-server. +2023-05-04 01:03:18,400 - root - INFO - Done deleting. +2023-05-04 01:03:18,400 - root - INFO - Fuzzer fuzz-dhcp-server finished running without reportable crashes. +2023-05-04 01:03:18,401 - root - INFO - Running fuzzer: fuzz-bcd. +2023-05-04 01:03:18,401 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-bcd/public.zip +2023-05-04 01:03:18,404 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:03:18,582 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-bcd/public.zip HTTP/1.1" 200 2293254 +2023-05-04 01:03:18,804 - root - INFO - Starting fuzzing +Fuzzing logs: +/github/workspace/build-out/fuzz-bcd -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpgvqlyr8q/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bcd >fuzz-0.log 2>&1 +================== Job 0 exited with exit code 0 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (83175 inline 8-bit counters): 82573 [0x7f1b5387edf0, 0x7f1b5389307d), 602 [0x5f8718, 0x5f8972), +INFO: Loaded 2 PC tables (83175 PCs): 82573 [0x7f1b53893080,0x7f1b539d5950), 602 [0x5f8978,0x5faf18), +INFO: 1260 files found in /github/workspace/cifuzz-corpus/fuzz-bcd +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 946175 bytes +INFO: seed corpus: files: 1260 min: 1b max: 946175b total: 10373697b rss: 48Mb +#1261 INITED cov: 137 ft: 653 corp: 282/1416Kb exec/s: 0 rss: 91Mb +#65536 pulse cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 32768 rss: 98Mb +#131072 pulse cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 32768 rss: 98Mb +#262144 pulse cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 37449 rss: 98Mb +#475081 DONE cov: 137 ft: 653 corp: 282/1416Kb lim: 946175 exec/s: 33934 rss: 98Mb +Done 475081 runs in 14 second(s) +stat::number_of_executed_units: 475081 +stat::average_exec_per_sec: 33934 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 98 +/github/workspace/build-out/fuzz-bcd -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpgvqlyr8q/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-bcd >fuzz-1.log 2>&1 +2023-05-04 01:03:41,821 - root - INFO - Not reporting crash in fuzz-bcd because process timed out. +2023-05-04 01:03:41,822 - root - INFO - Deleting corpus and seed corpus of fuzz-bcd to save disk. +2023-05-04 01:03:41,848 - root - INFO - Deleting fuzz target: fuzz-bcd. +2023-05-04 01:03:41,848 - root - INFO - Done deleting. +2023-05-04 01:03:41,848 - root - INFO - Fuzzer fuzz-bcd finished running without reportable crashes. +2023-05-04 01:03:41,848 - root - INFO - Running fuzzer: fuzz-varlink. +2023-05-04 01:03:41,849 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-varlink/public.zip +2023-05-04 01:03:41,852 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:03:41,906 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-varlink/public.zip HTTP/1.1" 200 2586434 +2023-05-04 01:03:42,671 - root - INFO - Starting fuzzing +2023-05-04 01:04:05,709 - root - INFO - Not reporting crash in fuzz-varlink because process timed out. +Fuzzing logs: +/github/workspace/build-out/fuzz-varlink -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpbtf4f932/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-varlink >fuzz-0.log 2>&1 +2023-05-04 01:04:05,709 - root - INFO - Deleting corpus and seed corpus of fuzz-varlink to save disk. +2023-05-04 01:04:05,806 - root - INFO - Deleting fuzz target: fuzz-varlink. +2023-05-04 01:04:05,806 - root - INFO - Done deleting. +2023-05-04 01:04:05,806 - root - INFO - Fuzzer fuzz-varlink finished running without reportable crashes. +2023-05-04 01:04:05,806 - root - INFO - Running fuzzer: fuzz-unit-file. +2023-05-04 01:04:05,807 - root - INFO - Downloading corpus from OSS-Fuzz: https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-unit-file/public.zip +2023-05-04 01:04:05,809 - urllib3.connectionpool - DEBUG - Starting new HTTPS connection (1): storage.googleapis.com:443 +2023-05-04 01:04:05,897 - urllib3.connectionpool - DEBUG - https://storage.googleapis.com:443 "GET /systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_fuzz-unit-file/public.zip HTTP/1.1" 200 4999974 +2023-05-04 01:04:07,869 - root - INFO - Starting fuzzing +2023-05-04 01:04:09,516 - root - INFO - Fuzzer: fuzz-unit-file. Detected bug. +Fuzzing logs: +/github/workspace/build-out/fuzz-unit-file -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpjhswtxiw/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-unit-file >fuzz-0.log 2>&1 +2023-05-04 01:04:09,517 - root - INFO - Trying to reproduce crash using: /tmp/tmpjhswtxiw/crash-7be9e0f3a71d95f8d5e67c5a9d84273d6f4e3c03. +================== Job 0 exited with exit code 77 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (82576 inline 8-bit counters): 82573 [0x7f3b0de4cdf0, 0x7f3b0de6107d), 3 [0x5e9b28, 0x5e9b2b), +INFO: Loaded 2 PC tables (82576 PCs): 82573 [0x7f3b0de61080,0x7f3b0dfa3950), 3 [0x5a6af8,0x5a6b28), +INFO: 20907 files found in /github/workspace/cifuzz-corpus/fuzz-unit-file +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 604973 bytes +INFO: seed corpus: files: 20907 min: 1b max: 604973b total: 16005340b rss: 61Mb +================================================================= +==74==ERROR: AddressSanitizer: heap-buffer-overflow on address 0x602000005078 at pc 0x0000004ddffa bp 0x7fffeea64fe0 sp 0x7fffeea64fd8 +READ of size 4 at 0x602000005078 thread T0 +SCARINESS: 27 (4-byte-read-heap-buffer-overflow-far-from-bounds) + #0 0x4ddff9 in samLLVM2FuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 + #1 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #2 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #3 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #4 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #5 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #6 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #7 0x7f3b0ce20082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + #8 0x41f6ed in _start (build-out/fuzz-unit-file+0x41f6ed) + +DEDUP_TOKEN: samLLVM2FuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) +0x602000005078 is located 38 bytes to the right of 2-byte region [0x602000005050,0x602000005052) +freed by thread T0 here: + #0 0x4a0882 in __interceptor_free /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:52:3 + #1 0x4ddfb7 in samLLVM2FuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:29:9 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7f3b0ce20082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: __interceptor_free--samLLVM2FuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +previously allocated by thread T0 here: + #0 0x4a0b26 in malloc /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:69:3 + #1 0x4ddfac in samLLVM2FuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:28:25 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7f3b0ce20082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: malloc--samLLVM2FuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +SUMMARY: AddressSanitizer: heap-buffer-overflow /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 in samLLVM2FuzzerTestOneInput +Shadow bytes around the buggy address: + 0x0c047fff89b0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89c0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89d0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89e0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89f0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd +=>0x0c047fff8a00: fa fa 00 02 fa fa 00 02 fa fa fd fa fa fa fa[fa] + 0x0c047fff8a10: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a20: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a30: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a40: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a50: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa +Shadow byte legend (one shadow byte represents 8 application bytes): + Addressable: 00 + Partially addressable: 01 02 03 04 05 06 07 + Heap left redzone: fa + Freed heap region: fd + Stack left redzone: f1 + Stack mid redzone: f2 + Stack right redzone: f3 + Stack after return: f5 + Stack use after scope: f8 + Global redzone: f9 + Global init order: f6 + Poisoned by user: f7 + Container overflow: fc + Array cookie: ac + Intra object redzone: bb + ASan internal: fe + Left alloca redzone: ca + Right alloca redzone: cb +==74==ABORTING +MS: 0 ; base unit: 0000000000000000000000000000000000000000 +0x61,0x75,0x74,0x6f,0x6d,0x6f,0x75,0x6e,0x74,0x48, +automountH +artifact_prefix='/tmp/tmpjhswtxiw/'; Test unit written to /tmp/tmpjhswtxiw/crash-7be9e0f3a71d95f8d5e67c5a9d84273d6f4e3c03 +Base64: YXV0b21vdW50SA== +stat::number_of_executed_units: 314 +stat::average_exec_per_sec: 0 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 67 +/github/workspace/build-out/fuzz-unit-file -timeout=25 -rss_limit_mb=2560 -len_control=0 -seed=1337 -artifact_prefix=/tmp/tmpjhswtxiw/ -max_total_time=13 -print_final_stats=1 /github/workspace/cifuzz-corpus/fuzz-unit-file >fuzz-1.log 2>&1 +================== Job 1 exited with exit code 77 ============ +INFO: Running with entropic power schedule (0xFF, 100). +INFO: Seed: 1337 +INFO: Loaded 2 modules (82576 inline 8-bit counters): 82573 [0x7ff1ba038df0, 0x7ff1ba04d07d), 3 [0x5e9b28, 0x5e9b2b), +INFO: Loaded 2 PC tables (82576 PCs): 82573 [0x7ff1ba04d080,0x7ff1ba18f950), 3 [0x5a6af8,0x5a6b28), +INFO: 20907 files found in /github/workspace/cifuzz-corpus/fuzz-unit-file +INFO: -max_len is not provided; libFuzzer will not generate inputs larger than 604973 bytes +INFO: seed corpus: files: 20907 min: 1b max: 604973b total: 16005340b rss: 61Mb +================================================================= +==78==ERROR: AddressSanitizer: heap-buffer-overflow on address 0x602000005078 at pc 0x0000004ddffa bp 0x7ffc2e6825c0 sp 0x7ffc2e6825b8 +READ of size 4 at 0x602000005078 thread T0 +SCARINESS: 27 (4-byte-read-heap-buffer-overflow-far-from-bounds) + #0 0x4ddff9 in samLLVM2FuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 + #1 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #2 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #3 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #4 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #5 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #6 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #7 0x7ff1b900c082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + #8 0x41f6ed in _start (build-out/fuzz-unit-file+0x41f6ed) + +DEDUP_TOKEN: samLLVM2FuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long)--fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) +0x602000005078 is located 38 bytes to the right of 2-byte region [0x602000005050,0x602000005052) +freed by thread T0 here: + #0 0x4a0882 in __interceptor_free /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:52:3 + #1 0x4ddfb7 in samLLVM2FuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:29:9 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7ff1b900c082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: __interceptor_free--samLLVM2FuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +previously allocated by thread T0 here: + #0 0x4a0b26 in malloc /src/llvm-project/compiler-rt/lib/asan/asan_malloc_linux.cpp:69:3 + #1 0x4ddfac in samLLVM2FuzzerTestOneInput /work/build/../../src/systemd/src/core/fuzz-unit-file.c:28:25 + #2 0x4fc653 in fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:611:15 + #3 0x4fbe3a in fuzzer::Fuzzer::RunOne(unsigned char const*, unsigned long, bool, fuzzer::InputInfo*, bool, bool*) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:514:3 + #4 0x4fdca4 in fuzzer::Fuzzer::ReadAndExecuteSeedCorpora(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:826:7 + #5 0x4fded9 in fuzzer::Fuzzer::Loop(std::__Fuzzer::vector >&) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerLoop.cpp:857:3 + #6 0x4ed53f in fuzzer::FuzzerDriver(int*, char***, int (*)(unsigned char const*, unsigned long)) /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerDriver.cpp:912:6 + #7 0x516b92 in main /src/llvm-project/compiler-rt/lib/fuzzer/FuzzerMain.cpp:20:10 + #8 0x7ff1b900c082 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x24082) (BuildId: 1878e6b475720c7c51969e69ab2d276fae6d1dee) + +DEDUP_TOKEN: malloc--samLLVM2FuzzerTestOneInput--fuzzer::Fuzzer::ExecuteCallback(unsigned char const*, unsigned long) +SUMMARY: AddressSanitizer: heap-buffer-overflow /work/build/../../src/systemd/src/core/fuzz-unit-file.c:30:16 in samsamLLVM2FuzzerTestOneInput +Shadow bytes around the buggy address: + 0x0c047fff89b0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89c0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89d0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89e0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd + 0x0c047fff89f0: fa fa fd fd fa fa fd fd fa fa fd fd fa fa fd fd +=>0x0c047fff8a00: fa fa 00 02 fa fa 00 02 fa fa fd fa fa fa fa[fa] + 0x0c047fff8a10: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a20: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a30: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a40: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa + 0x0c047fff8a50: fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa fa +Shadow byte legend (one shadow byte represents 8 application bytes): + Addressable: 00 + Partially addressable: 01 02 03 04 05 06 07 + Heap left redzone: fa + Freed heap region: fd + Stack left redzone: f1 + Stack mid redzone: f2 + Stack right redzone: f3 + Stack after return: f5 + Stack use after scope: f8 + Global redzone: f9 + Global init order: f6 + Poisoned by user: f7 + Container overflow: fc + Array cookie: ac + Intra object redzone: bb + ASan internal: fe + Left alloca redzone: ca + Right alloca redzone: cb +==78==ABORTING +MS: 0 ; base unit: 0000000000000000000000000000000000000000 +0x61,0x75,0x74,0x6f,0x6d,0x6f,0x75,0x6e,0x74,0x48, +automountH +artifact_prefix='/tmp/tmpjhswtxiw/'; Test unit written to /tmp/tmpjhswtxiw/crash-7be9e0f3a71d95f8d5e67c5a9d84273d6f4e3c03 +Base64: YXV0b21vdW50SA== +stat::number_of_executed_units: 314 +stat::average_exec_per_sec: 0 +stat::new_units_added: 0 +stat::slowest_unit_time_sec: 0 +stat::peak_rss_mb: 68 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/Makefile b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..04db9976eac198bd430650da656998169912dc2f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/Makefile @@ -0,0 +1,41 @@ +.POSIX: +CXX = clang++ +CFLAGS = -std=c++17 -Wall -Wextra -O3 -g3 -Werror + +all: SystemSan target target_file target_dns + +SystemSan: SystemSan.cpp inspect_dns.cpp inspect_utils.cpp + $(CXX) $(CFLAGS) -lpthread -o $@ $^ + +# Needs atheris. +python-test: all + ./SystemSan python shell_injection_poc_fuzzer.py + +target: target.cpp + $(CXX) $(CFLAGS) -fsanitize=address,fuzzer -o $@ $^ + +target_file: target_file.cpp + $(CXX) $(CFLAGS) -fsanitize=address,fuzzer -o $@ $^ + +target_dns: target_dns.cpp + $(CXX) $(CFLAGS) -fsanitize=address,fuzzer -o $@ $^ + +test: all vuln.dict + ./SystemSan ./target -dict=vuln.dict + ./SystemSan ./target_file -dict=vuln.dict + ./SystemSan ./target_dns -dict=vuln.dict + +pytorch-lightning-1.5.10: + cp SystemSan.cpp PoEs/pytorch-lightning-1.5.10/; \ + cd PoEs/pytorch-lightning-1.5.10/; \ + docker build . --tag syssan_pytorch-lightning; \ + docker run -t systemsan_pytorch-lightning:latest; + +node-shell-quote-v1.7.3: + cp SystemSan.cpp PoEs/node-shell-quote-v1.7.3/; \ + cd PoEs/node-shell-quote-v1.7.3/; \ + docker build . --tag systemsan_node-shell-quote; \ + docker run -t systemsan_node-shell-quote:latest; + +clean: + rm -f SystemSan /tmp/tripwire target target_file target_dns diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/build.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/build.sh new file mode 100644 index 0000000000000000000000000000000000000000..1c9b1adbf5a1b1241e50b8e88a03529cfb69e0b0 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/build.sh @@ -0,0 +1,25 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Build and install project (using current CFLAGS, CXXFLAGS). +cd pytorch-lightning +pip3 install . + +# Build fuzzers in $OUT. +for fuzzer in $(find $SRC -name '*_fuzzer.py'); do + compile_python_fuzzer $fuzzer +done diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/vuln.dict b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/vuln.dict new file mode 100644 index 0000000000000000000000000000000000000000..0d19aa407357659d0beb8487443c65eead7dab6a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/vuln.dict @@ -0,0 +1 @@ +"os.system('/tmp/tripwire')" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/README.md b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/README.md new file mode 100644 index 0000000000000000000000000000000000000000..48e111d1abc6d96661659f9a19dd7fc6f969baad --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/README.md @@ -0,0 +1,66 @@ +# System Sanitizers + +We use `ptrace` to instrument system calls made by the target program to detect +various vulnerabilities. + +## Command injection + +This detector currently works by + +- Checking if `execve` is called with `/tmp/tripwire` (which comes from our dictionary). +- Checking if `execve` is invoking a shell with invalid syntax. This is likely + caused by our input. + +## Arbitrary file open + +TODO: documentation. + +## Proof of concept + +### Cleanup +Note this will delete /tmp/tripwire if it exists. +```shell +make clean +``` + +### Run test +Note this will overwrite /tmp/tripwire if it exists. +```shell +make test +``` + +Look for one of the following lines: + +> ===BUG DETECTED: Shell injection=== + +which indicates the detection of executing the planted `/tmp/tripwire`. + + +> ===BUG DETECTED: Shell corruption=== + +which indicates the detection of executing a syntactic erroneous command. + + +### Command injection PoC in Python with `pytorch-lightning` +With `SystemSan`, [`Artheris`](https://github.com/google/atheris) can detect a shell injection bug in [version v1.5.10 of `pytorch-lightning`](https://github.com/PyTorchLightning/pytorch-lightning/tree/1.5.0). +```shell +make pytorch-lightning-1.5.10 +``` + +### Command injection PoC in JavaScript with `shell-quote` +With `SystemSan`, [`Jsfuzz`](https://gitlab.com/gitlab-org/security-products/analyzers/fuzzers/jsfuzz) can detect a shell corrpution bug in [the latest version (v1.7.3) of `shell-quote`](https://github.com/substack/node-shell-quote) without any seed. +```shell +make node-shell-quote-v1.7.3 +``` +This is based on [a shell injection exploit report](https://wh0.github.io/2021/10/28/shell-quote-rce-exploiting.html) of [version v1.7.2 of `shell-quote`](https://github.com/substack/node-shell-quote/tree/v1.7.2). +`SystemSan` can also discover the same shell injection bug with a corpus file containing: +``` +`:`/tmp/tripwire``:` +``` + +## Trophies + +- +- + + diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/SystemSan.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/SystemSan.cpp new file mode 100644 index 0000000000000000000000000000000000000000..27e8795c55df12d3c17399eda52c738c1b914240 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/SystemSan.cpp @@ -0,0 +1,493 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* A detector that uses ptrace to identify shell injection vulnerabilities. */ + +/* C standard library */ +#include +#include +#include +#include + +/* POSIX */ +#include +#include +#include +#include + +/* Linux */ +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "inspect_utils.h" +#include "inspect_dns.h" + +#define DEBUG_LOGS 0 + +#if DEBUG_LOGS +#define debug_log(...) \ + do { \ + fprintf(stderr, __VA_ARGS__); \ + fflush(stdout); \ + fputc('\n', stderr); \ + } while (0) +#else +#define debug_log(...) +#endif + +#define fatal_log(...) \ + do { \ + fprintf(stderr, __VA_ARGS__); \ + fputc('\n', stderr); \ + exit(EXIT_FAILURE); \ + } while (0) + +// The magic string that we'll use to detect full control over the command +// executed. +const std::string kTripWire = "/tmp/tripwire"; +// Shell injection bug confirmed with /tmp/tripwire. +const std::string kInjectionError = "Shell injection"; +// Shell corruption bug detected based on syntax error. +const std::string kCorruptionError = "Shell corruption"; +// The magic string that we'll use to detect arbitrary file open +const std::string kFzAbsoluteDirectory = "/fz/"; +// Arbitrary file open in /fz/ +const std::string kArbitraryFileOpenError = "Arbitrary file open"; +// Assuming only shorter (than this constant) top dir are legitly used. +constexpr int kRootDirMaxLength = 16; + +// The PID of the root process we're fuzzing. +pid_t g_root_pid; + +// Map of a PID/TID its PID/TID creator and wether it ran exec. +std::map root_pids; + +// Assuming the longest pathname is "/bin/bash". +constexpr int kShellPathnameLength = 20; + +// Syntax error messages of each shell. +const std::map> kShellSyntaxErrors = { + {"bash", + { + ": command not found", // General + ": syntax error", // Unfinished " or ' or ` or if, leading | or ; + ": missing `]'", // Unfinished [ + ": event not found", // ! leads large numbers + ": No such file or directory", // Leading < or / + }}, + {"csh", + { + ": Command not found.", // General + ": Missing }.", // Unfinished { + "Too many ('s.", // Unfinished ( + "Invalid null command.", // Leading | or < or > + "Missing name for redirect.", // Single < or > + ": No match.", // Leading ? or [ or * + "Modifier failed.", // Leading ^ + "No previous left hand side.", // A ^ + ": No such job.", // Leading % + ": No current job.", // A % + ": Undefined variable.", // Containing $ + ": Event not found.", // ! leads large numbers + // TODO: Make this more specific. + "Unmatched", // Unfinished " or ' or `, leading ; + }}, + {"dash", + { + "not found", // General + "Syntax error", // Unfinished " or ' or ` or if, leading | or ; or & + "missing ]", // Unfinished [ + "No such file", // Leading < + }}, + {"zsh", + { + ": command not found", // General + ": syntax error", // Unfinished " or ' or ` + ": ']' expected", // Unfinished [ + ": no such file or directory", // Leading < or / + ": parse error near", // Leading |, or & + ": no such user or named directory", // Leading ~ + }}, +}; + +// Shells used by Processes. +std::map g_shell_pids; + +struct Tracee { + pid_t pid; + bool syscall_enter = true; + + Tracee(pid_t pid) : pid(pid) {} +}; + +pid_t run_child(char **argv) { + // Run the program under test with its args as a child process + pid_t pid = fork(); + switch (pid) { + case -1: + fatal_log("Fork failed: %s", strerror(errno)); + case 0: + raise(SIGSTOP); + execvp(argv[0], argv); + fatal_log("execvp: %s", strerror(errno)); + } + return pid; +} + +// Construct a string with the memory specified in a register. +std::string read_string(pid_t pid, unsigned long reg, unsigned long length) { + auto memory = read_memory(pid, reg, length); + if (!memory.size()) { + return ""; + } + + std::string content(reinterpret_cast(memory.data()), + std::min(memory.size(), length)); + return content; +} + +void inspect_for_injection(pid_t pid, const user_regs_struct ®s) { + // Inspect a PID's registers for the sign of shell injection. + std::string path = read_string(pid, regs.rdi, kTripWire.length()); + if (!path.length()) { + return; + } + debug_log("inspecting"); + if (path == kTripWire) { + report_bug(kInjectionError, pid); + } +} + +std::string get_pathname(pid_t pid, const user_regs_struct ®s) { + // Parse the pathname from the memory specified in the RDI register. + std::string pathname = read_string(pid, regs.rdi, kShellPathnameLength); + debug_log("Pathname is %s (len %lu)\n", pathname.c_str(), pathname.length()); + return pathname; +} + +std::string match_shell(std::string binary_pathname); + +// Identify the exact shell behind sh +std::string identify_sh(std::string path) { + char shell_pathname[kShellPathnameLength]; + auto written = readlink(path.c_str(), shell_pathname, kShellPathnameLength - 1); + if (written == -1) { + std::cerr << "Cannot query which shell is behind sh: readlink failed on " + << path << ": " + << strerror(errno) << "\n"; + std::cerr << "Assuming the shell is dash\n"; + return "dash"; + } + shell_pathname[written] = '\0'; + + debug_log("sh links to %s\n", shell_pathname); + std::string shell_pathname_str(shell_pathname); + + return match_shell(shell_pathname_str); +} + +std::string match_shell(std::string binary_pathname) { + // Identify the name of the shell used in the pathname. + if (!binary_pathname.length()) { + return ""; + } + + // We use c_str() to accept only the null terminated string. + std::string binary_name = binary_pathname.substr( + binary_pathname.find_last_of("/") + 1).c_str(); + + debug_log("Binary is %s (%lu)\n", binary_name.c_str(), + binary_name.length()); + + for (const auto &item : kShellSyntaxErrors) { + std::string known_shell = item.first; + if (binary_name == "sh") { + debug_log("Matched sh: Needs to identify which specific shell it is.\n"); + return identify_sh(binary_pathname); + } + if (binary_name == known_shell) { + debug_log("Matched %s\n", binary_name.c_str()); + return known_shell; + } + } + return ""; +} + +std::string get_shell(pid_t pid, const user_regs_struct ®s) { + // Get shell name used in a process. + std::string binary_pathname = get_pathname(pid, regs); + return match_shell(binary_pathname); +} + +void match_error_pattern(std::string buffer, std::string shell, pid_t pid) { + auto error_patterns = kShellSyntaxErrors.at(shell); + for (const auto &pattern : error_patterns) { + if (buffer.find(pattern) != std::string::npos) { + std::cerr << "--- Found a sign of shell corruption ---\n" + << buffer.c_str() + << "\n----------------------------------------\n"; + // If a shell corruption error happens, kill its parent. + auto parent = root_pids[pid]; + while (!parent.ran_exec) { + if (parent.parent_tid == g_root_pid) { + break; + } + parent = root_pids[parent.parent_tid]; + } + report_bug(kCorruptionError, parent.parent_tid); + } + } +} + +void inspect_for_corruption(pid_t pid, const user_regs_struct ®s) { + // Inspect a PID's registers for shell corruption. + std::string buffer = read_string(pid, regs.rsi, regs.rdx); + debug_log("Write buffer: %s\n", buffer.c_str()); + match_error_pattern(buffer, g_shell_pids[pid], pid); +} + +void log_file_open(std::string path, int flags, pid_t pid) { + report_bug(kArbitraryFileOpenError, pid); + std::cerr << "===File opened: " << path.c_str() << ", flags = " << flags << ","; + switch (flags & 3) { + case O_RDONLY: + std::cerr << "O_RDONLY"; + break; + case O_WRONLY: + std::cerr << "O_WRONLY"; + break; + case O_RDWR: + std::cerr << "O_RDWR"; + break; + default: + std::cerr << "unknown"; + } + std::cerr << "===\n"; +} + +bool has_unprintable(const std::string &value) { + for (size_t i = 0; i < value.length(); i++) { + if (value[i] & 0x80) { + return true; + } + } + return false; +} + +void inspect_for_arbitrary_file_open(pid_t pid, const user_regs_struct ®s) { + // Inspect a PID's register for the sign of arbitrary file open. + std::string path = read_string(pid, regs.rsi, kRootDirMaxLength); + if (!path.length()) { + return; + } + if (path.substr(0, kFzAbsoluteDirectory.length()) == kFzAbsoluteDirectory) { + log_file_open(path, regs.rdx, pid); + return; + } + if (path[0] == '/' && path.length() > 1) { + std::string path_absolute_topdir = path; + size_t root_dir_end = path.find('/', 1); + if (root_dir_end != std::string::npos) { + path_absolute_topdir = path.substr(0, root_dir_end); + } + if (has_unprintable(path_absolute_topdir)) { + struct stat dirstat; + if (stat(path_absolute_topdir.c_str(), &dirstat) != 0) { + log_file_open(path, regs.rdx, pid); + } + } + } +} + +int trace(std::map pids) { + unsigned long exit_status = 0; + while (!pids.empty()) { + std::vector new_pids; + + auto it = pids.begin(); + + while (it != pids.end()) { + auto pid = it->first; + auto &tracee = it->second; + int status = 0; + + int result = waitpid(pid, &status, __WALL | WNOHANG); + if (result == -1) { + it = pids.erase(it); + continue; + } + + if (result == 0) { + // Nothing to report yet. + ++it; + continue; + } + + if (WIFEXITED(status) || WIFSIGNALED(status)) { + debug_log("%d exited", pid); + it = pids.erase(it); + // Remove pid from the watchlist when it exits + g_shell_pids.erase(pid); + root_pids.erase(pid); + continue; + } + + // ptrace sets 0x80 for syscalls (with PTRACE_O_TRACESYSGOOD set). + bool is_syscall = + WIFSTOPPED(status) && WSTOPSIG(status) == (SIGTRAP | 0x80); + int sig = 0; + if (!is_syscall) { + // Handle generic signal. + siginfo_t siginfo; + if (ptrace(PTRACE_GETSIGINFO, pid, nullptr, &siginfo) == -1) { + debug_log("ptrace(PTRACE_GETSIGINFO, %d): %s", pid, strerror(errno)); + continue; + } + sig = siginfo.si_signo; + debug_log("forwarding signal %d to %d", sig, pid); + } + + if ((status >> 8 == (SIGTRAP | (PTRACE_EVENT_EXIT << 8)))) { + debug_log("%d exiting", pid); + if (pid == g_root_pid) { + if (ptrace(PTRACE_GETEVENTMSG, pid, 0, &exit_status) == -1) { + debug_log("ptrace(PTRACE_GETEVENTMSG, %d): %s", pid, strerror(errno)); + } + debug_log("got exit status from root process: %lu", exit_status); + } + + if (ptrace(PTRACE_DETACH, pid, 0, 0) == -1) { + debug_log("ptrace(PTRACE_DETACH, %d): %s", pid, strerror(errno)); + } + continue; + } + + if (WIFSTOPPED(status) && + (status >> 8 == (SIGTRAP | (PTRACE_EVENT_CLONE << 8)) || + status >> 8 == (SIGTRAP | (PTRACE_EVENT_FORK << 8)) || + status >> 8 == (SIGTRAP | (PTRACE_EVENT_VFORK << 8)))) { + long new_pid; + if (ptrace(PTRACE_GETEVENTMSG, pid, 0, &new_pid) == -1) { + debug_log("ptrace(PTRACE_GETEVENTMSG, %d): %s", pid, strerror(errno)); + continue; + } + debug_log("forked %ld", new_pid); + new_pids.push_back(new_pid); + root_pids.emplace(new_pid, ThreadParent(pid)); + } + + if (is_syscall) { + user_regs_struct regs; + if (ptrace(PTRACE_GETREGS, pid, 0, ®s) == -1) { + debug_log("ptrace(PTRACE_GETREGS, %d): %s", pid, strerror(errno)); + continue; + } + + if (tracee.syscall_enter) { + if (regs.orig_rax == __NR_execve) { + // This is a new process. + auto parent = root_pids[pid]; + parent.ran_exec = true; + root_pids[pid] = parent; + inspect_for_injection(pid, regs); + std::string shell = get_shell(pid, regs); + if (shell != "") { + debug_log("Shell parsed: %s", shell.c_str()); + g_shell_pids.insert(std::make_pair(pid, shell)); + } + } + + inspect_dns_syscalls(pid, regs); + + if (regs.orig_rax == __NR_openat) { + // TODO(metzman): Re-enable this once we have config/flag support. + // inspect_for_arbitrary_file_open(pid, regs); + } + + if (regs.orig_rax == __NR_write && + g_shell_pids.find(pid) != g_shell_pids.end()) { + debug_log("Inspecting the `write` buffer of shell process %d.", + pid); + inspect_for_corruption(pid, regs); + } + } + + // TODO: Check for commands with invalid syntax passed to /bin/sh and + // other shells. + // TODO: It's possible the process we're fuzzing can communicate with + // another process to execute code. Our check wouldn't catch this + // currently. + tracee.syscall_enter = !tracee.syscall_enter; + } + + if (ptrace(PTRACE_SYSCALL, pid, nullptr, sig) == -1) { + debug_log("ptrace(PTRACE_SYSCALL, %d): %s", pid, strerror(errno)); + continue; + } + + ++it; + } + + for (const auto &pid : new_pids) { + pids.emplace(pid, Tracee(pid)); + } + } + return static_cast(exit_status >> 8); +} + +int main(int argc, char **argv) { + if (argc <= 1) { + fatal_log("Expecting at least one arguments, received %d", argc - 1); + } + + // Create an executable tripwire file, as programs may check for existence + // before actually calling exec. + std::ofstream tripwire(kTripWire); + tripwire.close(); + chmod(kTripWire.c_str(), 0755); + + pid_t pid = run_child(argv + 1); + + long options = PTRACE_O_TRACESYSGOOD | PTRACE_O_TRACEFORK | + PTRACE_O_TRACEVFORK | PTRACE_O_TRACECLONE | + PTRACE_O_TRACEEXIT; + + if (ptrace(PTRACE_SEIZE, pid, nullptr, options) == -1) { + fatal_log("ptrace(PTRACE_SEIZE): %s", strerror(errno)); + } + + if (waitpid(pid, nullptr, __WALL) == -1) { + fatal_log("waitpid: %s", strerror(errno)); + } + + if (ptrace(PTRACE_SYSCALL, pid, 0, 0) == -1) { + fatal_log("ptrace(PTRACE_SYSCALL): %s", strerror(errno)); + } + + g_root_pid = pid; + std::map pids; + pids.emplace(pid, Tracee(pid)); + root_pids.emplace(pid, ThreadParent(pid)); + return trace(pids); +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.cpp new file mode 100644 index 0000000000000000000000000000000000000000..030446345096fedc53fe7ea5826a36982a460c42 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.cpp @@ -0,0 +1,236 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* A detector that uses ptrace to identify shell injection vulnerabilities. */ + +/* POSIX */ +#include +#include + +/* Linux */ +#include +#include +#include + +#include + +#include "inspect_utils.h" + + +// Arbitrary domain name resolution. +const std::string kArbitraryDomainNameResolution = "Arbitrary domain name resolution"; + +// Global constant for one file descriptor about of a DNS socket +int kFdDns = 0; +const size_t kDnsHeaderLen = 12; + + +void inspect_for_arbitrary_dns_connect(pid_t pid, const user_regs_struct ®s) { + auto memory = read_memory(pid, regs.rsi, sizeof(struct sockaddr_in)); + if (memory.size()) { + struct sockaddr_in * sa = reinterpret_cast(memory.data()); + if (sa->sin_family == AF_INET && htons(sa->sin_port) == 53) { + // save file descriptor for later sendmmsg + kFdDns = regs.rdi; + } + } +} + +struct DnsHeader { + uint16_t tx_id; + uint16_t flags; + uint16_t questions; + uint16_t answers; + uint16_t nameservers; + uint16_t additional; +}; + +struct DnsHeader parse_dns_header(std::vector data) { + struct DnsHeader h; + h.tx_id = (((uint16_t) data[0]) << 8) | ((uint16_t) data[1]); + h.flags = (((uint16_t) data[2]) << 8) | ((uint16_t) data[3]); + h.questions = (((uint16_t) data[4]) << 8) | ((uint16_t) data[5]); + h.answers = (((uint16_t) data[6]) << 8) | ((uint16_t) data[7]); + h.nameservers = (((uint16_t) data[8]) << 8) | ((uint16_t) data[9]); + h.additional = (((uint16_t) data[10]) << 8) | ((uint16_t) data[11]); + return h; +} + +bool dns_flags_standard_query(uint16_t flags) { + if ((flags & 0x8000) == 0) { + // Query, not response. + if (((flags & 0x7800) >> 11) == 0) { + // Opcode 0 is standard query. + if ((flags & 0x0200) == 0) { + // Message is not truncated. + if ((flags & 0x0040) == 0) { + // Z-bit reserved flag is unset. + return true; + } + } + } + } + return false; +} + +struct DnsRequest { + // Start of name in the byte vector. + size_t offset; + // End of name in the byte vector. + size_t end; + // Length of top level domain. + uint8_t tld_size; + // Number of levels/dots in domain name. + size_t nb_levels; + // DNS type like A is 1. + uint16_t dns_type; + // DNS class like IN is 1. + uint16_t dns_class; +}; + +struct DnsRequest parse_dns_request(std::vector data, size_t offset) { + struct DnsRequest r; + r.offset = offset; + r.tld_size = 0; + r.nb_levels = 0; + while(offset < data.size()) { + uint8_t rlen = uint8_t(data[offset]); + if (rlen == 0) { + offset++; + break; + } + r.nb_levels++; + offset += rlen+1; + r.tld_size = rlen; + } + if (offset <= 4 + data.size()) { + r.end = offset; + r.dns_type = (((uint16_t) data[offset]) << 8) | ((uint16_t) data[offset+1]); + r.dns_class = (((uint16_t) data[offset+2]) << 8) | ((uint16_t) data[offset+3]); + } else { + r.end = data.size(); + } + return r; +} + +void log_dns_request(struct DnsRequest r, std::vector data) { + size_t offset = r.offset; + std::cerr << "===Domain resolved: "; + while(offset < r.end) { + uint8_t rlen = uint8_t(data[offset]); + if (rlen == 0) { + break; + } + std::cerr << '.'; + for (uint8_t i = 1; i < rlen+1; i++) { + std::cerr << (char) data[offset + i]; + } + offset += rlen+1; + } + std::cerr << "===\n"; + std::cerr << "===DNS request type: " << r.dns_type << ", class: " << r.dns_class << "===\n"; +} + +void inspect_for_arbitrary_dns_pkt(std::vector data, pid_t pid) { + if (data.size() < kDnsHeaderLen + 1) { + return; + } + struct DnsHeader h = parse_dns_header(data); + if (h.questions != 1) { + return; + } + if (h.answers != 0 || h.nameservers != 0) { + return; + } + if (!dns_flags_standard_query(h.flags)) { + return; + } + + struct DnsRequest req = parse_dns_request(data, kDnsHeaderLen); + // Alert if the top level domain is only one character and + // if there is more than just the TLD. + if (req.tld_size == 1 && req.nb_levels > 1 && req.end < data.size()) { + report_bug(kArbitraryDomainNameResolution, pid); + log_dns_request(req, data); + } +} + +void inspect_for_arbitrary_dns_fdbuffer(pid_t pid, const user_regs_struct ®s) { + if (kFdDns > 0 && kFdDns == (int) regs.rdi) { + auto memory = read_memory(pid, regs.rsi, regs.rdx); + if (memory.size()) { + inspect_for_arbitrary_dns_pkt(memory, pid); + } + } +} + +void inspect_for_arbitrary_dns_iov(pid_t pid, unsigned long iov) { + auto memory = read_memory(pid, iov, sizeof(struct iovec)); + if (memory.size()) { + struct iovec * iovec = reinterpret_cast(memory.data()); + memory = read_memory(pid, (unsigned long) iovec->iov_base, iovec->iov_len); + if (memory.size()) { + inspect_for_arbitrary_dns_pkt(memory, pid); + } + } +} + +void inspect_for_arbitrary_dns_sendmsg(pid_t pid, const user_regs_struct ®s) { + if (kFdDns > 0 && kFdDns == (int) regs.rdi) { + auto memory = read_memory(pid, regs.rsi, sizeof(struct msghdr)); + if (memory.size()) { + struct msghdr * msg = reinterpret_cast(memory.data()); + if (msg->msg_iovlen == 1) { + inspect_for_arbitrary_dns_iov(pid, (unsigned long) msg->msg_iov); + } + } + } +} + +void inspect_for_arbitrary_dns_sendmmsg(pid_t pid, const user_regs_struct ®s) { + if (kFdDns > 0 && kFdDns == (int) regs.rdi) { + auto memory = read_memory(pid, regs.rsi, sizeof(struct mmsghdr)); + if (memory.size()) { + struct mmsghdr * msg = reinterpret_cast(memory.data()); + if (msg->msg_hdr.msg_iovlen == 1) { + inspect_for_arbitrary_dns_iov(pid, (unsigned long) msg->msg_hdr.msg_iov); + } + } + } +} + +void inspect_dns_syscalls(pid_t pid, const user_regs_struct ®s) { + switch (regs.orig_rax) { + case __NR_connect: + inspect_for_arbitrary_dns_connect(pid, regs); + break; + case __NR_close: + if (kFdDns > 0 && kFdDns == (int) regs.rdi) { + // reset DNS file descriptor on close + kFdDns = 0; + } + break; + case __NR_sendmmsg: + inspect_for_arbitrary_dns_sendmmsg(pid, regs); + break; + case __NR_sendmsg: + inspect_for_arbitrary_dns_sendmsg(pid, regs); + break; + case __NR_sendto: + // fallthrough + case __NR_write: + inspect_for_arbitrary_dns_fdbuffer(pid, regs); + } +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.h b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.h new file mode 100644 index 0000000000000000000000000000000000000000..849af4e980679c3b8cbbd875216a06ba7b909f3a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.h @@ -0,0 +1,26 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* A detector that uses ptrace to identify DNS arbitrary resolutions. */ + + +/* POSIX */ +#include + +/* Linux */ +#include + + +void inspect_dns_syscalls(pid_t pid, const user_regs_struct ®s); diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..713d61d757c0c135271591bf17d9a1c108f37c27 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.cpp @@ -0,0 +1,73 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* A detector that uses ptrace to identify DNS arbitrary resolutions. */ + +/* C standard library */ +#include + +/* POSIX */ +#include + +/* Linux */ +#include + +#include +#include +#include +#include + +#include "inspect_utils.h" + +extern pid_t g_root_pid; +extern std::map root_pids; + +std::vector read_memory(pid_t pid, unsigned long long address, + size_t size) { + std::vector memory; + + for (size_t i = 0; i < size; i += sizeof(long)) { + long word = ptrace(PTRACE_PEEKTEXT, pid, address + i, 0); + if (word == -1) { + return memory; + } + + std::byte *word_bytes = reinterpret_cast(&word); + memory.insert(memory.end(), word_bytes, word_bytes + sizeof(long)); + } + + return memory; +} + +void report_bug(std::string bug_type, pid_t tid) { + // Report the bug found based on the bug code. + std::cerr << "===BUG DETECTED: " << bug_type.c_str() << "===\n"; + // Rely on sanitizers/libFuzzer to produce a stacktrace by sending SIGABRT + // to the root process. + // Note: this may not be reliable or consistent if shell injection happens + // in an async way. + // Find the thread group id, that is the pid. + pid_t pid = tid; + auto parent = root_pids[tid]; + while (!parent.ran_exec) { + // Find the first parent which ran exec syscall. + if (parent.parent_tid == g_root_pid) { + break; + } + pid = parent.parent_tid; + parent = root_pids[parent.parent_tid]; + } + tgkill(pid, tid, SIGABRT); +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.h b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..a0737f28b1ae8de07b002f03d56fdab471674723 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.h @@ -0,0 +1,39 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/* A detector that uses ptrace to identify DNS arbitrary resolutions. */ + + +/* POSIX */ +#include + +#include +#include + +// Structure to know which thread id triggered the bug. +struct ThreadParent { + // Parent thread ID, ie creator. + pid_t parent_tid; + // Current thread ID ran exec to become another process. + bool ran_exec = false; + + ThreadParent() : parent_tid(0) {} + ThreadParent(pid_t tid) : parent_tid(tid) {} +}; + +std::vector read_memory(pid_t pid, unsigned long long address, + size_t size); + +void report_bug(std::string bug_type, pid_t tid); diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target.cpp new file mode 100644 index 0000000000000000000000000000000000000000..eb1b1dd0c2488c39f2d6de94f861dd1a96775b3a --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target.cpp @@ -0,0 +1,28 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +/* A sample target program under test, + * /tmp/tripwire or other commands will be injected into its shell command */ + +#include +#include +#include + +extern "C" int LLVMFuzzerTestOneInput(char* data, size_t size) { + std::string str(data, size); + std::cout << "INPUT" << str << std::endl; + system(str.c_str()); + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target_dns.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target_dns.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7d7ef1ceef23bcb5ab7bca9a1933f6742fb054b7 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target_dns.cpp @@ -0,0 +1,39 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#include +#include +#include +#include + +#include +#include +#include +#include + +extern "C" int LLVMFuzzerTestOneInput(char* data, size_t size) { + std::string str(data, size); + std::cout << "INPUT" << str << std::endl; + + struct addrinfo *result = NULL; + + getaddrinfo(str.c_str(), NULL, NULL, &result); + if (result) { + freeaddrinfo(result); + } + + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target_file.cpp b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target_file.cpp new file mode 100644 index 0000000000000000000000000000000000000000..dc55eb7f4c3e9778b378c16355a5f0fafeefbaf6 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/SystemSan/target_file.cpp @@ -0,0 +1,32 @@ +/* + * Copyright 2022 Google LLC + + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + + * http://www.apache.org/licenses/LICENSE-2.0 + + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +/* A sample target program under test, + * /tmp/tripwire or other commands will be injected into its shell command */ + +#include +#include +#include +#include + +extern "C" int LLVMFuzzerTestOneInput(char* data, size_t size) { + std::string str(data, size); + std::cout << "INPUT" << str << std::endl; + FILE *fp = fopen(str.c_str(), "r"); + if (fp) { + fclose(fp); + } + return 0; +} diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/README.md b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..67c59f71bd6108f0d12d1125e39f5a2a1eda9128 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/README.md @@ -0,0 +1,49 @@ +# Chronos: rebuilding OSS-Fuzz harnesses using cached builds + +## Pre-built images. + +Daily pre-built images are available at: + +- `us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/-ofg-cached-address` +- `us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/-ofg-cached-coverage` + +They can be used as drop-in replacements for the usual `gcr.io/oss-fuzz/` images. + +These images are generated in 2 ways: +- (Preferred) [Generate](https://github.com/google/oss-fuzz/blob/master/infra/base-images/base-builder/bash_parser.py) + a replay build script that can be re-run alongside existing build artifacts, + leveraging existing build system mechanisms to avoid rebuilding (e.g. running + `make` twice should not actually rebuild everything). This is error-prone, so + we validate the script works by running it. +- (Fallback, if the replay build script didn't work). We leverage + [ccache](https://ccache.dev/), to provide a compiler cache. This is often not + as fast as the replay build script, because some project builds spend + significant time doing non-compiler tasks (e.g. checking out submodules, + running configure scripts). + +Note: this mechanism does not work for every single OSS-Fuzz project today. The +resulting image may either: +- Not provide much performance improvement compared with a normal image, or +- Not exist at all (if neither approach worked). + +Stats from a recent run: +(Feb 3 2025). + +## Usage locally + +**Example 1: htslib** + +From the OSS-Fuzz root + +```sh +$ RUN_ALL=1 ./infra/experimental/chronos/build_cache_local.sh htslib c address +... +... +Vanilla compile time: +17 +Replay worked +Replay compile time: +2 +Ccache compile time: +9 +``` diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/build_cache_local.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/build_cache_local.sh new file mode 100644 index 0000000000000000000000000000000000000000..8f3f20804f4ef9e9dbedd3275286bb4353ef422d --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/build_cache_local.sh @@ -0,0 +1,163 @@ +#!/bin/bash -eu +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +_PROJECT=$1 +_FUZZING_LANGUAGE=$2 +_SANITIZER=${3:-address} + +BASE=$PWD + +# Final image is either ccache or replay script, depending on which worked. +FINAL_IMAGE_NAME=us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-cached-${_SANITIZER} + +# Always build an image with ccache. +CCACHE_IMAGE_NAME=us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-ccache-${_SANITIZER} + +# Step 1: build the base image +cd projects/${_PROJECT} +docker build -t gcr.io/oss-fuzz/${_PROJECT} . + + +# Step 2: create a container where `compile` has run which enables ccaching +# and also generates a replay build script. +cd ${BASE} +mkdir -p ccaches/${_PROJECT} +mkdir -p build/out/${_PROJECT} +B_START=$SECONDS + +docker container rm -f ${_PROJECT}-origin-${_SANITIZER} + +docker run \ + --env=SANITIZER=${_SANITIZER} \ + --env=CCACHE_DIR=/workspace/ccache \ + --env=FUZZING_LANGUAGE=${_FUZZING_LANGUAGE} \ + --env=CAPTURE_REPLAY_SCRIPT=1 \ + --name=${_PROJECT}-origin-${_SANITIZER} \ + -v=$PWD/ccaches/${_PROJECT}/ccache:/workspace/ccache \ + -v=$PWD/build/out/${_PROJECT}/:/out/ \ + gcr.io/oss-fuzz/${_PROJECT} \ + /bin/bash -c \ + "export PATH=/ccache/bin:\$PATH && compile" +B_TIME=$(($SECONDS - $B_START)) + +# Step 3: save (commit, locally) the cached container as an image +docker container commit -c "ENV REPLAY_ENABLED=1" -c "ENV CAPTURE_REPLAY_SCRIPT=" ${_PROJECT}-origin-${_SANITIZER} $FINAL_IMAGE_NAME + +# Step 4: save the list of executables created from a vanilla build. This is +# needed for validating if replay and ccaching works. +# notes: run a shell the container with e.g. +# `docker run --entrypoint /bin/bash -it local/ossfuzz/htslib-origin-address` +executables_vanilla="$(find ./build/out/${_PROJECT} -executable -type f | sort)" + + +# Step 5: Build with replay enabled, and validate the executables are the same +# in terms of naming. +# Note that an important step is removing everything in $OUT/ which is done +# in the docker command. +R_START=$SECONDS +docker run \ + --rm \ + --env=SANITIZER=${_SANITIZER} \ + --env=FUZZING_LANGUAGE=${_FUZZING_LANGUAGE} \ + -v=$PWD/build/out/${_PROJECT}/:/out/ \ + --name=${_PROJECT}-origin-${_SANITIZER}-replay-recached \ + $FINAL_IMAGE_NAME \ + /bin/bash -c \ + "export PATH=/ccache/bin:\$PATH && rm -rf /out/* && compile" +R_TIME=$(($SECONDS - $R_START)) + +# Step 6: Extract the newly build executables +executables_replay="$(find ./build/out/${_PROJECT}/ -executable -type f | sort)" + +echo "Executables vanilla: " +echo ${executables_vanilla} + +echo "------------------------------------------------------" +echo "Executables replay: " +echo ${executables_replay} + +REPLAY_WORKED= + +# Step 7: match executables from vanilla builds and replay builds. +# If this step is successful, then the process can exit as it's ready. +if [[ "$executables_replay" == "$executables_vanilla" ]] +then + REPLAY_WORKED=1 + + if [ -z "${RUN_ALL+1}" ]; then + echo "${_PROJECT}: Replay worked." + echo "${_PROJECT}: Compile times: Vanilla=${B_TIME}; Replay=${R_TIME};" + exit 0 + fi +else + echo "${_PROJECT}: Replay did not work" + R_TIME="N/A" +fi + +# Step 8: prepare Dockerfile for ccache +cp -rf ccaches/${_PROJECT}/ccache ./projects/${_PROJECT}/ccache-cache + +infra/experimental/chronos/prepare-ccache ${_PROJECT} + +cd projects/${_PROJECT} + +# Step 9: Build an image with CCache's new items (modifications are done on the +# dockerfile) +docker build -t $CCACHE_IMAGE_NAME . + +cd ${BASE} + +# Step 10: Run a `compile` with ccache's image. +# Run the ccache build +A_START=$SECONDS +docker run \ + --rm \ + --env=SANITIZER=${_SANITIZER} \ + --env=FUZZING_LANGUAGE=${_FUZZING_LANGUAGE} \ + --name=${_PROJECT}-origin-${_SANITIZER}-recached \ + -v=$PWD/build/out/${_PROJECT}/:/out/ \ + $CCACHE_IMAGE_NAME \ + /bin/bash -c \ + "export PATH=/ccache/bin:\$PATH && rm -rf /out/* && compile" +A_TIME=$(($SECONDS - $A_START)) + +# Step 11: extract the executables from the ccache build +executables_ccache="$(find ./build/out/${_PROJECT}/ -executable -type f | sort)" + + +# Step 12: validate the ccache builds are successful +if [[ "$executables_ccache" == "$executables_vanilla" ]] +then + echo "${_PROJECT}: Compile times: Vanilla=${B_TIME}; Replay=${R_TIME}; CCache=${A_TIME};" + + if [[ -z "${REPLAY_WORKED}" || ${R_TIME} -gt ${A_TIME} ]]; then + if [ ${R_TIME} -gt ${A_TIME} ]; then + echo "Replay was slower than ccache." + fi + + # Replay didn't work or was slower, so make the default "cached" image use the ccache one. + docker image tag \ + $CCACHE_IMAGE_NAME \ + $FINAL_IMAGE_NAME + fi + + exit 0 +else + echo "${_PROJECT}: Replay and ccaching did not work." + exit 1 +fi + diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/build_on_cloudbuild.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/build_on_cloudbuild.sh new file mode 100644 index 0000000000000000000000000000000000000000..ddcb567a53961994b68f0f112cf5ccdf1f6061d5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/build_on_cloudbuild.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +PROJECT=$1 +FUZZING_LANGUAGE=$2 + +gcloud builds submit "https://github.com/google/oss-fuzz" \ + --async \ + --git-source-revision=master \ + --config=cloudbuild.yaml \ + --substitutions=_PROJECT=$PROJECT,_FUZZING_LANGUAGE=$FUZZING_LANGUAGE \ + --project=oss-fuzz \ + --region=us-central1 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/chronos.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/chronos.sh new file mode 100644 index 0000000000000000000000000000000000000000..bd83b49095eafd5264d16195d5f80c4d204e8dbd --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/chronos.sh @@ -0,0 +1,81 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# This script records the ENV and commands needed for fuzz target recompilation. +# It intercepts bash commands to save: 1) the ENV variable values before +# building the fuzz target (`recompile_env.sh`) and 2) all subsequent bash +# commands from that point (`recompile`). Combined with Docker, this setup +# allows for recompiling the fuzz target without rebuilding the entire project. +# Usage: +# 1. Set FUZZ_TARGET (e.g., in project's Dockerfile) +# 2. Source this file before compiling the fuzz target (e.g., source chronos.sh +# at the beginning of project's build.sh). + +export START_RECORDING="false" +RECOMPILE_ENV="/usr/local/bin/recompile_env.sh" + + +# Initialize the recompile script as compile in case Chronos did not trap any +# command containing the fuzz target. +initialize_recompile_script() { + export RECOMPILE_SCRIPT="/usr/local/bin/recompile" + cp "/usr/local/bin/compile" "$RECOMPILE_SCRIPT" +} + +reset_recompile_script() { + rm "$RECOMPILE_SCRIPT" + echo "#!/bin/bash" > "$RECOMPILE_SCRIPT" + echo "source $RECOMPILE_ENV" >> "$RECOMPILE_SCRIPT" + chmod +x "$RECOMPILE_SCRIPT" +} + + +# Execute or record command for recompilation. +execute_or_record_command() { + record_command() { + echo "cd \"$(pwd)\"" >> "$RECOMPILE_SCRIPT" + echo "$@" >> "$RECOMPILE_SCRIPT" + } + + # Check if any element in the command array contains the FUZZ_TARGET. + if [[ "$BASH_COMMAND" == *"$FUZZ_TARGET"* ]]; then + export START_RECORDING="true" + # Save all environment variables, excluding read-only ones + reset_recompile_script + declare -p | grep -Ev 'declare -[^ ]*r[^ ]*' > "$RECOMPILE_ENV" + fi + + if [[ "$START_RECORDING" == "true" ]]; then + record_command "$BASH_COMMAND" + echo "Recorded execution of: $BASH_COMMAND" + fi +} + + +main() { + # Initialize. + initialize_recompile_script + + # Set up trap for DEBUG to intercept commands. + trap 'execute_or_record_command' DEBUG + + # Enable extended debugging mode + shopt -s extdebug + # Ensure trap works in subshells and functions. + set -T +} + +main diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/cloudbuild.yaml b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/cloudbuild.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7f584fc77bf03a7a1a71c784be1fce9c83aa5cd5 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/cloudbuild.yaml @@ -0,0 +1,58 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +# CloudBuild for generating Chronos-cached images. +# Supports building by way of ccache now. +# High-level steps: +# 1) Build image for project +# 2) Run an ASAN build and store ccache +# 3) Copy ccache cache to host and copy into project's OSS-Fuzz folder +# 4) Build image for project and copy ccache in, storing image as *-ofg-cache-address +# 5) Run an coverage build and store ccache +# 6) Copy ccache cache to host and copy into project's OSS-Fuzz folder +# 7) Build image for project and copy ccache in, storing image as *-ofg-cache-coverage +# TODO (David): add support for use of dedicated replay_build.sh +steps: +- name: 'gcr.io/cloud-builders/docker' + entrypoint: /bin/bash + args: + - /workspace/infra/experimental/chronos/build_cache_local.sh + - ${_PROJECT} + - ${_FUZZING_LANGUAGE} + - address + env: + - RUN_ALL=1 +- name: 'gcr.io/cloud-builders/docker' + entrypoint: /bin/bash + args: + - /workspace/infra/experimental/chronos/build_cache_local.sh + - ${_PROJECT} + - ${_FUZZING_LANGUAGE} + - coverage + env: + - RUN_ALL=1 +images: +- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-cached-address +- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-cached-coverage +- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-ccache-address +- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-ccache-coverage +timeout: 72000s # 20 hours, same as build_lib.py +logsBucket: oss-fuzz-gcb-logs +tags: +- ${_PROJECT} +- chronos +options: + pool: + name: projects/oss-fuzz/locations/us-central1/workerPools/buildpool-chronos diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/cloudbuild_all.yaml b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/cloudbuild_all.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e336525bedafd2843b82f45f5cc92721375362c3 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/cloudbuild_all.yaml @@ -0,0 +1,34 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +# CloudBuild for generating Chronos-cached images. +steps: +- name: 'gcr.io/cloud-builders/docker' + args: + - build + - -t + - gcloud + - . + dir: infra/experimental/chronos +- name: 'gcloud' + args: + - infra/experimental/chronos/build_all.sh + entrypoint: /bin/bash +timeout: 1800s +serviceAccount: 'projects/oss-fuzz/serviceAccounts/llm-eval@oss-fuzz.iam.gserviceaccount.com' +options: + logging: CLOUD_LOGGING_ONLY +tags: +- chronos-all \ No newline at end of file diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/e2e-replay-build.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/e2e-replay-build.sh new file mode 100644 index 0000000000000000000000000000000000000000..f48b335c552a0dd5347d72dcd5d0b566e6782ab6 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/e2e-replay-build.sh @@ -0,0 +1,63 @@ +#!/usr/bin/bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Sample projects: simd, wt, libheif, htslib +PROJECT=liblouis +LOG=replay-${PROJECT}.txt +OUT1=replay-out-${PROJECT}-1 +OUT2=replay-out-${PROJECT}-2 +python infra/helper.py build_image --no-pull "$PROJECT" + +# AddressSanitizer. +mkdir -p build/out/${PROJECT} +echo "start" >> ${LOG} +echo $(date +%Y:%m:%d:%H:%M:%S) >> ${LOG} +# Remove container name we are about to use. +docker container rm "${PROJECT}-origin-asan" + +# Build once, clean container if needed +docker run -v $PWD/build/out/${PROJECT}:/out \ + -ti --entrypoint="/bin/sh" \ + --env FUZZING_LANGUAGE=c --env SANITIZER="address" \ + --name "${PROJECT}-origin-asan" \ + "gcr.io/oss-fuzz/${PROJECT}" -c "compile" + +# Copy outs and log data +cp -rf $PWD/build/out/${PROJECT} ${OUT1} +rm -rf $PWD/build/out/${PROJECT} +ls -la $PWD/build/out/ >> ${LOG} +echo "next" >> ${LOG} +echo $(date +%Y:%m:%d:%H:%M:%S) >> ${LOG} +docker commit "${PROJECT}-origin-asan" "gcr.io/oss-fuzz/${PROJECT}-ofg-cached-asan" + +# Run the replay command +docker run -v $PWD/build/out/${PROJECT}:/out \ + -e REPLAY_ENABLED=1 -ti --entrypoint="/bin/sh" \ + --env FUZZING_LANGUAGE=c --env SANITIZER="address" \ + "gcr.io/oss-fuzz/${PROJECT}-ofg-cached-asan" -c "compile" +echo "finish" >> ${LOG} +echo $(date +%Y:%m:%d:%H:%M:%S) >> ${LOG} +cp -rf $PWD/build/out/${PROJECT} ${OUT2} + +# Now match the artifacts +SUCCESS=$(infra/experimental/chronos/match_artifacts $OUT1 $OUT2) + +if [[ $SUCCESS -eq 0 ]]; then + echo "SUCCESS REPLAY" >> ${LOG} +else + echo "FAIL REPLAY" >> ${LOG} +fi diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/match_artifacts.sh b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/match_artifacts.sh new file mode 100644 index 0000000000000000000000000000000000000000..39822c98ea26518efdc8cc770f983574827bddbf --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/match_artifacts.sh @@ -0,0 +1,35 @@ +#!/usr/bin/bash -eux +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +CMP1=$1 +CMP2=$2 + +for exec1 in $(find $CMP1/ -type f -executable); do + base=$(basename $exec1) + + exec2=$CMP2/${base} + if [ ! -f ${exec2} ]; then + exit 1 + fi + + comparison=$(cmp --silent $exec1 $exec2; echo $?) + if [[ $comparison -ne 0 ]]; then + exit 1 + fi +done + +exit 0 diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/prepare-ccache b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/prepare-ccache new file mode 100644 index 0000000000000000000000000000000000000000..3b0f56719680dc7cbda30a7ab2b18b61365de5bb --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/prepare-ccache @@ -0,0 +1,22 @@ +#!/usr/bin/bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +PROJECT=$1 +{ + echo "COPY ccache-cache/ /ccache/cache"; + echo "ENV PATH=\"/ccache/bin:\$PATH\"" +} >> "projects/$PROJECT/Dockerfile" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/prepare-replay-rebuild b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/prepare-replay-rebuild new file mode 100644 index 0000000000000000000000000000000000000000..ebb4451f18c6ffda192e2dc6d5cfd5701a3b02a1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/experimental/chronos/prepare-replay-rebuild @@ -0,0 +1,21 @@ +#!/usr/bin/bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +PROJECT=$1 +{ + echo "ENV REPLAY_ENABLED=1"; +} >> "projects/$PROJECT/Dockerfile" diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/.gitignore b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..30d9eff972346c4978622c6f567e7808ad21764d --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/.gitignore @@ -0,0 +1 @@ +wycheproof.zip \ No newline at end of file diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/generate_job.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/generate_job.py new file mode 100644 index 0000000000000000000000000000000000000000..508162e3110522d09ec4f671c7ac2c2e188e5ee7 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/generate_job.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Script for generating an OSS-Fuzz job for a wycheproof project.""" +import sys + + +def main(): + """Usage generate_job.py .""" + project = sys.argv[1] + print(f'Name: wycheproof_nosanitizer_{project}') + job_definition = f"""CUSTOM_BINARY = False +BAD_BUILD_CHECK = False +APP_NAME = WycheproofTarget.bash +THREAD_ALIVE_CHECK_INTERVAL = 10 +TEST_TIMEOUT = 3600 +CRASH_RETRIES = 1 +AGGREGATE_COVERAGE = False +TESTCASE_COVERAGE = False +FILE_GITHUB_ISSUE = False +MANAGED = False +MAX_FUZZ_THREADS = 1 +RELEASE_BUILD_BUCKET_PATH = gs://clusterfuzz-builds-wycheproof/{project}/{project}-none-([0-9]+).zip +PROJECT_NAME = {project} +SUMMARY_PREFIX = {project} +REVISION_VARS_URL = https://commondatastorage.googleapis.com/clusterfuzz-builds-wycheproof/{project}/{project}-none-%s.srcmap.json +FUZZ_LOGS_BUCKET = {project}-logs.clusterfuzz-external.appspot.com +CORPUS_BUCKET = {project}-corpus.clusterfuzz-external.appspot.com +QUARANTINE_BUCKET = {project}-quarantine.clusterfuzz-external.appspot.com +BACKUP_BUCKET = {project}-backup.clusterfuzz-external.appspot.com +AUTOMATIC_LABELS = Proj-{project},Engine-wycheproof +""" + print(job_definition) + + +if __name__ == '__main__': + main() diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/launcher.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/launcher.py new file mode 100644 index 0000000000000000000000000000000000000000..8ca8435d12c20fd7f5f932cda0588ca10ff1070b --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/launcher.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Script for running wycheproof.""" + +import logging +import sys +import subprocess + + +def main(): + """Runs wycheproof.""" + if len(sys.argv) < 3: + logging.error('Usage: %s .', sys.argv[0]) + return 1 + + return subprocess.run(sys.argv[1:], check=False).returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/package.bash b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/package.bash new file mode 100644 index 0000000000000000000000000000000000000000..842222587d1c414fba9451a9ac573f9cf35c93e1 --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/package.bash @@ -0,0 +1,18 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +# Script for zipping ClusterFuzz's blackbox fuzzer for wycheproof. +zip wycheproof.zip run.py launcher.py diff --git a/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/run.py b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/run.py new file mode 100644 index 0000000000000000000000000000000000000000..8ca11ef3e1a6c49744094776babbd0e029809c6f --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/infra/tools/wycheproof/run.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Script for creating "testcases" to run wycheproof on.""" + +import argparse +import os +import sys + + +def get_args(): + """Returns parsed program arguments.""" + parser = argparse.ArgumentParser() + parser.add_argument( + '--input_dir', + help='Ignored.', + ) + parser.add_argument('--output_dir', + help='Directory for writing testcases.', + required=True) + parser.add_argument('--no_of_files', type=int, help='Ignored.') + return parser.parse_args() + + +def main(): + """Generates a dummy testcase for use by a ClusterFuzz blackbox fuzzer.""" + args = get_args() + if not os.path.exists(args.output_dir): + os.mkdir(args.output_dir) + testcase = os.path.join(args.output_dir, 'fuzz-0') + with open(testcase, 'w') as file_handle: + file_handle.write(' ') + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-libxml2-delta-02/fuzz-tooling/tools/vscode-extension/.prettierrc.js b/local-test-libxml2-delta-02/fuzz-tooling/tools/vscode-extension/.prettierrc.js new file mode 100644 index 0000000000000000000000000000000000000000..e69f029e5137fd57624fa84a881284fc5ed7b74d --- /dev/null +++ b/local-test-libxml2-delta-02/fuzz-tooling/tools/vscode-extension/.prettierrc.js @@ -0,0 +1,18 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +//////////////////////////////////////////////////////////////////////////////// +module.exports = { + ...require('gts/.prettierrc.json') +}