diff --git a/check_and_update_conf.py b/check_and_update_conf.py index c20ea56..fb3d707 100755 --- a/check_and_update_conf.py +++ b/check_and_update_conf.py @@ -34,7 +34,7 @@ import subprocess import argparse -LANGS=[ "ADA", "D", "FORTRAN", "CXX", "GO", "C", "OBJC", "OBJCXX" ] +LANGS = ["ADA", "D", "FORTRAN", "CXX", "GO", "C", "OBJC", "OBJCXX"] CT_LANGS = { "ADA": "ADA", @@ -42,35 +42,47 @@ "FORTRAN": "FORTRAN", "CXX": "CXX", "GO": "GOLANG", - "C" : "C", - "OBJC" : "OBJC", - "OBJCXX" : "OBJCXX", + "C": "C", + "OBJC": "OBJC", + "OBJCXX": "OBJCXX", } -parser = argparse.ArgumentParser(description='Update and check config files.') -parser.add_argument ('-a', '--arch', required=True, metavar="ARCH") -parser.add_argument ('-l', '--lang', help="Only handle LANG instead of trying them all", required=False, metavar="LANG") -parser.add_argument ('--inplace', default=False, action='store_true', help='write change inplace') -parser.add_argument ('--output', required=False, metavar="OUTPUT") -parser.add_argument ('--error-if-missing-previous', action='store_true') -parser.add_argument ('--version', required=True, metavar="VERSION") -parser.add_argument ('--guess-previous', required=False, action='store_true') -parser.add_argument ('--previous-version', required=False, action='append', metavar="PREVVERSION") -parser.add_argument ('--config', required=False, metavar="CONFIG") -parser.add_argument ('--config-dir', required=False, metavar="CONFIGDIR") -parser.add_argument ('--config-todo', required=False, metavar="TODO_PATH") -parser.add_argument ('--summary', required=False, metavar="SUMMARY_PATH") - -parser.add_argument ('--create-api-tests', required=False, metavar="TESTS_PATH") -parser.add_argument ('--api-test-host', default="http://localhost:10240", metavar="TEST_HOST") +parser = argparse.ArgumentParser(description="Update and check config files.") +parser.add_argument("-a", "--arch", required=True, metavar="ARCH") +parser.add_argument( + "-l", + "--lang", + help="Only handle LANG instead of trying them all", + required=False, + metavar="LANG", +) +parser.add_argument( + "--inplace", default=False, action="store_true", help="write change inplace" +) +parser.add_argument("--output", required=False, metavar="OUTPUT") +parser.add_argument("--error-if-missing-previous", action="store_true") +parser.add_argument("--version", required=True, metavar="VERSION") +parser.add_argument("--guess-previous", required=False, action="store_true") +parser.add_argument( + "--previous-version", required=False, action="append", metavar="PREVVERSION" +) +parser.add_argument("--config", required=False, metavar="CONFIG") +parser.add_argument("--config-dir", required=False, metavar="CONFIGDIR") +parser.add_argument("--config-todo", required=False, metavar="TODO_PATH") +parser.add_argument("--summary", required=False, metavar="SUMMARY_PATH") + +parser.add_argument("--create-api-tests", required=False, metavar="TESTS_PATH") +parser.add_argument( + "--api-test-host", default="http://localhost:10240", metavar="TEST_HOST" +) PREVIOUS_VERSIONS = defaultdict(None) PREVIOUS_VERSIONS.default_factory = lambda: None -PROP_RE = re.compile(r'[^#]*=.*') -COMPILERS_LIST_RE = re.compile(r'compilers=(.*)') -COMPILER_EXE_RE = re.compile(r'compiler\.(.*?)\.exe=(.*)') -COMPILER_ANYPROP_RE = re.compile(r'compiler\.(?P.*?)\.(?P.*)=(?P.*)') +PROP_RE = re.compile(r"[^#]*=.*") +COMPILERS_LIST_RE = re.compile(r"compilers=(.*)") +COMPILER_EXE_RE = re.compile(r"compiler\.(.*?)\.exe=(.*)") +COMPILER_ANYPROP_RE = re.compile(r"compiler\.(?P.*?)\.(?P.*)=(?P.*)") API_TESTS_OUTPUT = None @@ -106,16 +118,14 @@ "OBJCXX": "int f(void){return 0;} int main (){return f();}", } -def create_test (arch, lang, compilerId): + +def create_test(arch, lang, compilerId): print(f"lang is {lang}") json_content = { - "source" : TEST_FOR_LANG[lang], + "source": TEST_FOR_LANG[lang], "options": { "userArguments": "-O0", - "compilerOptions": { - "skipAsm": False, - "executorRequest": False - }, + "compilerOptions": {"skipAsm": False, "executorRequest": False}, "filters": { "commentOnly": True, "demangle": True, @@ -124,16 +134,16 @@ def create_test (arch, lang, compilerId): "intel": True, "labels": True, "libraryCode": False, - "trim": False - } - } + "trim": False, + }, + }, } - curl_cmd = f'''if curl -s "$CEHOST/api/compiler/{compilerId}/compile" --header "Accept: application/json"\\ + curl_cmd = f"""if curl -s "$CEHOST/api/compiler/{compilerId}/compile" --header "Accept: application/json"\\ -X POST -H"Content-Type: application/json"\\ -d\'{json.dumps(json_content)}\' |\\ - ''' - curl_cmd_end=''' + """ + curl_cmd_end = """ jq ".code" |\\ grep -q 0 then @@ -141,34 +151,35 @@ def create_test (arch, lang, compilerId): else printf "%s [FAIL]\\n" "${line:${#NAME}}" >> test.result; fi - ''' + """ API_TESTS_OUTPUT.write(f"NAME='{arch} {lang} {compilerId} ASM'\n") - API_TESTS_OUTPUT.write(f'## Test for {compilerId}, {lang}/{arch} ASM only\n') + API_TESTS_OUTPUT.write(f"## Test for {compilerId}, {lang}/{arch} ASM only\n") API_TESTS_OUTPUT.write(f'echo -n "$NAME" >> test.result\n') API_TESTS_OUTPUT.write(curl_cmd) API_TESTS_OUTPUT.write(curl_cmd_end) - API_TESTS_OUTPUT.write(f'## Test for {compilerId}, {lang}/{arch} ASM + BINARY\n') + API_TESTS_OUTPUT.write(f"## Test for {compilerId}, {lang}/{arch} ASM + BINARY\n") API_TESTS_OUTPUT.write(f"NAME='{arch} {lang} {compilerId} ASM+BINARY'\n") API_TESTS_OUTPUT.write(f'echo -n "$NAME" >> test.result\n') - curl_test_bin = f'''if curl -s "$CEHOST/api/compilers?fields=id,supportsBinary" --header "Accept: application/json" |\\ + curl_test_bin = f"""if curl -s "$CEHOST/api/compilers?fields=id,supportsBinary" --header "Accept: application/json" |\\ jq '.[] | select(.id=="{compilerId}") | .supportsBinary'|\\ - ''' - curl_test_bin_end=''' + """ + curl_test_bin_end = """ grep -q false then printf "%s [SKIPPED (not supported)]\n" "${line:${#NAME}}" >> test.result; else - ''' + """ API_TESTS_OUTPUT.write(curl_test_bin) API_TESTS_OUTPUT.write(curl_test_bin_end) - json_content["options"]["filters"]['binary'] = True + json_content["options"]["filters"]["binary"] = True API_TESTS_OUTPUT.write(curl_cmd) API_TESTS_OUTPUT.write(curl_cmd_end) - API_TESTS_OUTPUT.write('\nfi\n') + API_TESTS_OUTPUT.write("\nfi\n") + class Line: def __init__(self, line_number, text): @@ -176,20 +187,23 @@ def __init__(self, line_number, text): self.text = text.strip() def __str__(self): - return f'<{self.number}> {self.text}' + return f"<{self.number}> {self.text}" + def match_and_add(line: Line, expr, s): match = expr.match(line.text) if match: - s[match.group(1)]=line + s[match.group(1)] = line return match -def match_and_update(line: Line, expr, s, split=':'): + +def match_and_update(line: Line, expr, s, split=":"): match = expr.match(line.text) if match: s.update(match.group(1).split(split)) return match + def parse_file(file: str): listed_compilers = {} compilers_exe = {} @@ -209,77 +223,78 @@ def parse_file(file: str): match_compilers = COMPILERS_LIST_RE.search(line.text) if match_compilers: - ids = match_compilers.group(1).split(':') + ids = match_compilers.group(1).split(":") for elem_id in ids: - if elem_id.startswith('&'): + if elem_id.startswith("&"): pass - elif '@' not in elem_id: + elif "@" not in elem_id: listed_compilers[elem_id] = line m = match_and_add(line, COMPILER_EXE_RE, compilers_exe) m = COMPILER_ANYPROP_RE.match(line.text) if m: - last_compilers_prop[m.group('name')]=line - if m.group('prop') == 'name': - compilers_name_prop[m.group('name')]=m.group('value') + last_compilers_prop[m.group("name")] = line + if m.group("prop") == "name": + compilers_name_prop[m.group("name")] = m.group("value") m = match_and_add(line, COMPILER_ANYPROP_RE, last_compilers_prop) return { - 'listed_compilers': listed_compilers, - 'compilers_exe': compilers_exe, - 'last_compilers_prop': last_compilers_prop, - 'compilers_name_prop': compilers_name_prop, + "listed_compilers": listed_compilers, + "compilers_exe": compilers_exe, + "last_compilers_prop": last_compilers_prop, + "compilers_name_prop": compilers_name_prop, } + # because some arch (ie. riscv) are using a nicer naming {arch}-bla. But we # don't want to change all the others... So special casing here. COMPILER_ID_PATTERN = defaultdict(None) COMPILER_ID_PATTERN.default_factory = lambda: { - 'D': 'gdc{arch}{version}', - 'ADA': 'gnat{arch}{version}', - 'C': 'c{arch}g{version}', - 'CXX': '{arch}g{version}', - 'FORTRAN': 'f{arch}g{version}', - 'GO': 'gccgo{arch}{version}', - 'OBJC': 'objc{arch}g{version}', - 'OBJCXX': 'objcpp{arch}g{version}', + "D": "gdc{arch}{version}", + "ADA": "gnat{arch}{version}", + "C": "c{arch}g{version}", + "CXX": "{arch}g{version}", + "FORTRAN": "f{arch}g{version}", + "GO": "gccgo{arch}{version}", + "OBJC": "objc{arch}g{version}", + "OBJCXX": "objcpp{arch}g{version}", } -COMPILER_ID_PATTERN['riscv64'] = { - 'D': 'gdc{arch}{version}', - 'ADA': 'gnat{arch}{version}', - 'C': 'rv64-cgcc{version}', - 'CXX': 'rv64-gcc{version}', - 'FORTRAN': 'f{arch}g{version}', - 'GO': 'gccgo{arch}{version}', - 'OBJC': 'objcrv32g{version}', - 'OBJCXX': 'objcppgccrv64{version}', +COMPILER_ID_PATTERN["riscv64"] = { + "D": "gdc{arch}{version}", + "ADA": "gnat{arch}{version}", + "C": "rv64-cgcc{version}", + "CXX": "rv64-gcc{version}", + "FORTRAN": "f{arch}g{version}", + "GO": "gccgo{arch}{version}", + "OBJC": "objcrv32g{version}", + "OBJCXX": "objcppgccrv64{version}", } -COMPILER_ID_PATTERN['riscv32'] = { - 'D': 'gdc{arch}{version}', - 'ADA': 'gnat{arch}{version}', - 'C': 'rv32-cgcc{version}', - 'CXX': 'rv32-gcc{version}', - 'FORTRAN': 'f{arch}g{version}', - 'GO': 'gccgo{arch}{version}', - 'OBJC': 'objcrv32g{version}', - 'OBJCXX': 'objcppgccrv32{version}', +COMPILER_ID_PATTERN["riscv32"] = { + "D": "gdc{arch}{version}", + "ADA": "gnat{arch}{version}", + "C": "rv32-cgcc{version}", + "CXX": "rv32-gcc{version}", + "FORTRAN": "f{arch}g{version}", + "GO": "gccgo{arch}{version}", + "OBJC": "objcrv32g{version}", + "OBJCXX": "objcppgccrv32{version}", } -COMPILER_ID_PATTERN['arm-unknown'] = { - 'D': 'gdc{arch}u{version}', - 'ADA': 'gnat{arch}u{version}', - 'C': 'c{arch}ug{version}', - 'CXX': '{arch}ug{version}', - 'FORTRAN': 'f{arch}ug{version}', - 'GO': 'gccgo{arch}u{version}', - 'OBJC': 'objc{arch}ug{version}', - 'OBJCXX': 'objcpp{arch}ug{version}', +COMPILER_ID_PATTERN["arm-unknown"] = { + "D": "gdc{arch}u{version}", + "ADA": "gnat{arch}u{version}", + "C": "c{arch}ug{version}", + "CXX": "{arch}ug{version}", + "FORTRAN": "f{arch}ug{version}", + "GO": "gccgo{arch}u{version}", + "OBJC": "objc{arch}ug{version}", + "OBJCXX": "objcpp{arch}ug{version}", } -ARCH_RENAMING_IN_CONFIG={ +ARCH_RENAMING_IN_CONFIG = { "powerpc": "ppc", "powerpc64": "ppc64", "powerpc64le": "ppc64le", @@ -289,62 +304,70 @@ def parse_file(file: str): } FILEPREFIX = { - 'D': 'd', - 'ADA': 'ada', - 'C': 'c', - 'CXX': 'c++', - 'FORTRAN': 'fortran', - 'GO': 'go', - 'OBJC': 'objc', - 'OBJCXX': 'objc++', + "D": "d", + "ADA": "ada", + "C": "c", + "CXX": "c++", + "FORTRAN": "fortran", + "GO": "go", + "OBJC": "objc", + "OBJCXX": "objc++", } COMPILER_SUFFIX = { - 'D': 'gdc', - 'ADA': 'gnatmake', - 'C': 'gcc', - 'CXX': 'g++', - 'FORTRAN': 'gfortran', - 'GO': 'gccgo', - 'OBJC': 'gcc', - 'OBJCXX': 'g++', + "D": "gdc", + "ADA": "gnatmake", + "C": "gcc", + "CXX": "g++", + "FORTRAN": "gfortran", + "GO": "gccgo", + "OBJC": "gcc", + "OBJCXX": "g++", } + class Woops(Exception): pass + class AlreadyDefined(Exception): pass + class ManualFixupNeeded(Exception): pass + class FixupAction(Enum): REPLACE = 1 ADD_NEW_LINE_AFTER = 2 APPEND_TO_LINE = 3 ADD_SORTED = 4 + ## takes the first number it finds in the key and use it. if no number is found, ## put the item at the end (probably some trunk compiler) -def find_int_key (key): +def find_int_key(key): if key.isdigit(): t = key - m = re.search(r'(\d+)', key) + m = re.search(r"(\d+)", key) if m: t = int(m.group(1)) else: t = 99999 return t + def compiler_version_sort(l): return sorted(l, key=find_int_key) + def add_sorted(original_line: str, new_value: str): - k,v = original_line.split('=') - values = v.split(':') + k, v = original_line.split("=") + values = v.split(":") values.append(new_value) - return '='.join ([k, ':'.join(compiler_version_sort(values))]) + return "=".join([k, ":".join(compiler_version_sort(values))]) + class Fixup: def __init__(self, line_number, text, action: FixupAction): @@ -353,20 +376,21 @@ def __init__(self, line_number, text, action: FixupAction): self.action = action def __repr__(self): - action=f"NONE{self.action}" + action = f"NONE{self.action}" match self.action: case FixupAction.REPLACE: - action="replacing it with" + action = "replacing it with" case FixupAction.ADD_NEW_LINE_AFTER: - action="adding after newline" + action = "adding after newline" case FixupAction.APPEND_TO_LINE: - action="appending after" + action = "appending after" case FixupAction.ADD_SORTED: - action="Add and natural sort the values" + action = "Add and natural sort the values" - return f'Fixup line {self.line} by {action} text: {self.text}' + return f"Fixup line {self.line} by {action} text: {self.text}" -def generateConfig (arch: str, lang: str, version: str, directory: str, name): + +def generateConfig(arch: str, lang: str, version: str, directory: str, name): objdump_path = findFile(arch, lang, version, directory, "objdump") cppfilt_path = findFile(arch, lang, version, directory, "c++filt") compiler_path = findCompiler(arch, lang, version, directory) @@ -382,33 +406,45 @@ def generateConfig (arch: str, lang: str, version: str, directory: str, name): ret += f"compiler.{compiler_id}.name={name}\n" return ret -def findFile (arch: str, lang: str, version: str, directory: str, suffix: str): - target_dir='{directory}/{arch}/gcc-{version}/**/*-{suffix}'.format(directory=directory, arch=arch, version=version, suffix=suffix) + +def findFile(arch: str, lang: str, version: str, directory: str, suffix: str): + target_dir = "{directory}/{arch}/gcc-{version}/**/*-{suffix}".format( + directory=directory, arch=arch, version=version, suffix=suffix + ) print("search in {}".format(target_dir)) for f in glob.glob(target_dir, recursive=True): return abspath(f) raise Woops("Can't find '{target_dir}, something's wrong") -def findCompiler (arch: str, lang: str, version: str, directory: str): + +def findCompiler(arch: str, lang: str, version: str, directory: str): candidate = findFile(arch, lang, version, directory, COMPILER_SUFFIX[lang]) version_found = False - lines = subprocess.check_output([candidate, "--version"]).decode("utf-8").splitlines() + lines = ( + subprocess.check_output([candidate, "--version"]).decode("utf-8").splitlines() + ) for l in lines: if re.search(version, l): version_found = True if not version_found: - text = '\n'.join(lines) - raise Woops(f"Compiler found ({candidate}) doesn't expose the correct version: {text}") + text = "\n".join(lines) + raise Woops( + f"Compiler found ({candidate}) doesn't expose the correct version: {text}" + ) return candidate -def CompilerId (arch: str, version: str, lang: str): + +def CompilerId(arch: str, version: str, lang: str): renamed_arch = arch if arch in ARCH_RENAMING_IN_CONFIG: renamed_arch = ARCH_RENAMING_IN_CONFIG[arch] - return COMPILER_ID_PATTERN[arch][lang].format(arch=renamed_arch, version=version.replace('.', '')) + return COMPILER_ID_PATTERN[arch][lang].format( + arch=renamed_arch, version=version.replace(".", "") + ) + def Do(args, lang): new_compiler_id = CompilerId(args.arch, args.version, lang) @@ -427,16 +463,20 @@ def Do(args, lang): create_test(args.arch, lang, new_compiler_id) raise e + ## Used for creating fake tests at the end. NEW_COMPILERS = {} + def Wrapped_Do(args, lang: str): fixups = defaultdict(list) if args.config: conf = args.config else: - conf = join(args.config_dir, "{lang}.amazon.properties".format(lang=FILEPREFIX[lang])) + conf = join( + args.config_dir, "{lang}.amazon.properties".format(lang=FILEPREFIX[lang]) + ) p = parse_file(conf) parse_previous_version(args.version, args.arch, lang, args.guess_previous, p) @@ -444,74 +484,93 @@ def Wrapped_Do(args, lang: str): new_compiler_id = CompilerId(args.arch, args.version, lang) NEW_COMPILERS[new_compiler_id] = { - 'arch' : args.arch, - 'version': args.version, - 'lang': lang, + "arch": args.arch, + "version": args.version, + "lang": lang, } previous_compiler_id = None if get_previous_version(args.arch, lang): - previous_compiler_id = CompilerId(args.arch, get_previous_version(args.arch, lang), lang) + previous_compiler_id = CompilerId( + args.arch, get_previous_version(args.arch, lang), lang + ) print(new_compiler_id) - if new_compiler_id in p['listed_compilers']: + if new_compiler_id in p["listed_compilers"]: msg = "{compiler} is already in {conf} at line {line}".format( compiler=new_compiler_id, conf=conf, - line=p['listed_compilers'][new_compiler_id].number) + line=p["listed_compilers"][new_compiler_id].number, + ) raise AlreadyDefined(msg) else: - print ("{compiler} not in {conf}".format(compiler=new_compiler_id, conf=conf)) - #findCompiler (args.arch, lang, args.version, '/opt/compiler-explorer') + print("{compiler} not in {conf}".format(compiler=new_compiler_id, conf=conf)) + # findCompiler (args.arch, lang, args.version, '/opt/compiler-explorer') - if previous_compiler_id not in p['listed_compilers']: - msg = "Could not find previous compiler version {version}".format(version=get_previous_version(args.arch, lang)) + if previous_compiler_id not in p["listed_compilers"]: + msg = "Could not find previous compiler version {version}".format( + version=get_previous_version(args.arch, lang) + ) if args.error_if_missing_previous: raise Woops(msg) else: - print (msg) + print(msg) - config_fixup = generateConfig(args.arch, lang, args.version, '/opt/compiler-explorer', None) - todo_msg = f'\nPlease add the following in {conf}:\n8<---8<--- BEGIN ---8<---8<---\n{config_fixup}\n8<---8<--- END ---8<---8<---\n' + config_fixup = generateConfig( + args.arch, lang, args.version, "/opt/compiler-explorer", None + ) + todo_msg = f"\nPlease add the following in {conf}:\n8<---8<--- BEGIN ---8<---8<---\n{config_fixup}\n8<---8<--- END ---8<---8<---\n" if args.config_todo: - with open(args.config_todo, 'a') as todo_f: - todo_f.write (todo_msg) + with open(args.config_todo, "a") as todo_f: + todo_f.write(todo_msg) else: print(todo_msg) raise ManualFixupNeeded() else: - previous_listed_line = p['listed_compilers'][previous_compiler_id] + previous_listed_line = p["listed_compilers"][previous_compiler_id] if not previous_listed_line: msg = f"Error, can't find where previous compiler {previous_compiler_id} is listed" raise Woops(msg) - fixups[previous_listed_line.number].append(Fixup( - previous_listed_line.number, - f'{new_compiler_id}', - FixupAction.ADD_SORTED)) - - last_line = p['last_compilers_prop'][previous_compiler_id].number - print ("Last prop set for previous {version} at line {line}".format( - version=get_previous_version(args.arch, lang), - line=last_line)) - - if previous_compiler_id in p['compilers_name_prop']: + fixups[previous_listed_line.number].append( + Fixup( + previous_listed_line.number, + f"{new_compiler_id}", + FixupAction.ADD_SORTED, + ) + ) + + last_line = p["last_compilers_prop"][previous_compiler_id].number + print( + "Last prop set for previous {version} at line {line}".format( + version=get_previous_version(args.arch, lang), line=last_line + ) + ) + + if previous_compiler_id in p["compilers_name_prop"]: ## replace previous version by new version, and try to handle case of version M.m.p with name omiting .p - name = p['compilers_name_prop'][previous_compiler_id] + name = p["compilers_name_prop"][previous_compiler_id] name = name.replace(get_previous_version(args.arch, lang), args.version) - name = name.replace(get_previous_version(args.arch, lang)[0:-2], args.version[0:-2]) + name = name.replace( + get_previous_version(args.arch, lang)[0:-2], args.version[0:-2] + ) else: name = None - fixups[last_line].append(Fixup( - last_line, - generateConfig(args.arch, lang, args.version, '/opt/compiler-explorer', name), - FixupAction.ADD_NEW_LINE_AFTER)) + fixups[last_line].append( + Fixup( + last_line, + generateConfig( + args.arch, lang, args.version, "/opt/compiler-explorer", name + ), + FixupAction.ADD_NEW_LINE_AFTER, + ) + ) output = io.StringIO() with open(conf) as f: @@ -520,17 +579,17 @@ def Wrapped_Do(args, lang: str): for fixup in fixups[line_number]: match fixup.action: case FixupAction.REPLACE: - print (fixup.text, file=output, end="") + print(fixup.text, file=output, end="") case FixupAction.ADD_NEW_LINE_AFTER: - print (text, file=output, end="") - print (fixup.text, file=output, end="") + print(text, file=output, end="") + print(fixup.text, file=output, end="") case FixupAction.APPEND_TO_LINE: - print (text.strip() + fixup.text, file=output) + print(text.strip() + fixup.text, file=output) case FixupAction.ADD_SORTED: - new_line = add_sorted (text.strip(), fixup.text) - print (new_line, file=output) + new_line = add_sorted(text.strip(), fixup.text) + print(new_line, file=output) else: - print (text, file=output, end="") + print(text, file=output, end="") # output.close() if args.inplace: @@ -541,37 +600,44 @@ def Wrapped_Do(args, lang: str): with open(output_path, "w") as f: f.write(output.getvalue()) -def get_previous_version (lang, arch): - l_a = f'{lang};{arch}'.format(lang, arch).upper() + +def get_previous_version(lang, arch): + l_a = f"{lang};{arch}".format(lang, arch).upper() if l_a in PREVIOUS_VERSIONS: return PREVIOUS_VERSIONS[l_a] return PREVIOUS_VERSIONS[lang] + def test_prev(version, arch, lang, parsed_conf): - version = version.split('.') + version = version.split(".") - for major in reversed(range(5, int(version[0])+1)): - for minor in reversed(range(1,10)): + for major in reversed(range(5, int(version[0]) + 1)): + for minor in reversed(range(1, 10)): - test_cid = CompilerId (arch, f"{major}.{minor}", lang) + test_cid = CompilerId(arch, f"{major}.{minor}", lang) ## print(f"Testing {major}.{minor}: {test_cid}") - if test_cid in parsed_conf['listed_compilers']: - conf_found = parsed_conf['listed_compilers'][test_cid] - print(f"FOUND {test_cid} in line '{conf_found.text}' at line {conf_found.number}") + if test_cid in parsed_conf["listed_compilers"]: + conf_found = parsed_conf["listed_compilers"][test_cid] + print( + f"FOUND {test_cid} in line '{conf_found.text}' at line {conf_found.number}" + ) return (f"{arch};{lang}".upper(), f"{major}.{minor}") - for patch in reversed(range(0,10)): - test_cid = CompilerId (arch, f"{major}.{minor}.{patch}", lang) + for patch in reversed(range(0, 10)): + test_cid = CompilerId(arch, f"{major}.{minor}.{patch}", lang) ## print(f"Testing {major}.{minor}.{patch}: {test_cid}") - if test_cid in parsed_conf['listed_compilers']: - conf_found = parsed_conf['listed_compilers'][test_cid] - print(f"FOUND {test_cid} in line '{conf_found.text}' at line {conf_found.number}") + if test_cid in parsed_conf["listed_compilers"]: + conf_found = parsed_conf["listed_compilers"][test_cid] + print( + f"FOUND {test_cid} in line '{conf_found.text}' at line {conf_found.number}" + ) return (f"{arch};{lang}".upper(), f"{major}.{minor}.{patch}") print(f"Did not find any previous compiler for {arch} {lang}") return None + def parse_previous_version(version, arch, lang, guess_previous, parsed_conf): if guess_previous: @@ -581,13 +647,14 @@ def parse_previous_version(version, arch, lang, guess_previous, parsed_conf): if args.previous_version: for pv in args.previous_version: - pv_s = pv.split(':') + pv_s = pv.split(":") if len(pv_s) == 1: PREVIOUS_VERSIONS.default_factory = lambda: pv_s[0] elif len(pv_s) == 2: PREVIOUS_VERSIONS[pv_s[0].upper()] = pv_s[1] -def check_lang_enabled_in_ctng (args, lang): + +def check_lang_enabled_in_ctng(args, lang): ## You can't disable C \_o< if lang == "C": @@ -595,7 +662,6 @@ def check_lang_enabled_in_ctng (args, lang): ct_ng_config = join("build", "latest", f"{args.arch}-{args.version}.config") - ct_lang = CT_LANGS[lang] print(f"check for CT_CC_LANG_{ct_lang} in {ct_ng_config}") @@ -605,7 +671,8 @@ def check_lang_enabled_in_ctng (args, lang): return True return False -if __name__ == '__main__': + +if __name__ == "__main__": args = parser.parse_args() if args.create_api_tests: @@ -614,8 +681,8 @@ def check_lang_enabled_in_ctng (args, lang): API_TESTS_OUTPUT = open(args.create_api_tests, "a") if not results_exists: - API_TESTS_OUTPUT.write('#!/bin/bash\n') - API_TESTS_OUTPUT.write('set -euo pipefail\n') + API_TESTS_OUTPUT.write("#!/bin/bash\n") + API_TESTS_OUTPUT.write("set -euo pipefail\n") API_TESTS_OUTPUT.write("line='----------------------------------------'\n") API_TESTS_OUTPUT.write(f"CEHOST='{args.api_test_host}'\n") @@ -623,7 +690,7 @@ def check_lang_enabled_in_ctng (args, lang): Do(args, args.lang) else: for lang in LANGS: - if check_lang_enabled_in_ctng (args, lang): + if check_lang_enabled_in_ctng(args, lang): try: Do(args, lang) if args.summary: @@ -649,7 +716,9 @@ def check_lang_enabled_in_ctng (args, lang): except Woops as err: if args.summary: with open(args.summary, "a") as f: - f.write(f"NOT OK (ERROR): {args.arch} {args.version} {lang}\n") + f.write( + f"NOT OK (ERROR): {args.arch} {args.version} {lang}\n" + ) f.write(str(err)) f.write("\n") @@ -660,17 +729,23 @@ def check_lang_enabled_in_ctng (args, lang): ## tests created here are supposed to FAIL. if args.create_api_tests: API_TESTS_OUTPUT.write("## Fake tests, they should all FAIL.\n") - API_TESTS_OUTPUT.write("echo -e '\\n\\n#### Fake tests, they should FAIL or be SKIPPED, but never PASS\\n' >> test.result\n") + API_TESTS_OUTPUT.write( + "echo -e '\\n\\n#### Fake tests, they should FAIL or be SKIPPED, but never PASS\\n' >> test.result\n" + ) first_cid = list(NEW_COMPILERS.keys())[0] compiler = NEW_COMPILERS[first_cid] lang = None for l in LANGS: - if l != compiler['lang'] and not (l in ["C", "CXX"] and compiler['lang'] in ["C", "Cxx"]): + if l != compiler["lang"] and not ( + l in ["C", "CXX"] and compiler["lang"] in ["C", "Cxx"] + ): lang = l break ## Mismatching lang input - create_test(compiler['arch'], lang, first_cid) + create_test(compiler["arch"], lang, first_cid) API_TESTS_OUTPUT.write("## End of fake tests.\n") - API_TESTS_OUTPUT.write("echo -e '#### End of fake tests\\n' >> test.result\n") + API_TESTS_OUTPUT.write( + "echo -e '#### End of fake tests\\n' >> test.result\n" + )