|
| 1 | +#!/usr/bin/env python3 |
| 2 | +import argparse |
| 3 | +import re |
| 4 | +import xml.etree.ElementTree as ET |
| 5 | +from pathlib import Path |
| 6 | + |
| 7 | + |
| 8 | +ROOT = Path(__file__).resolve().parent |
| 9 | +TOKENS_XML = ROOT / "ti-toolkit-8x-tokens.xml" |
| 10 | +EVO_FORMAT = ROOT / "src" / "EvoFormat.cpp" |
| 11 | + |
| 12 | + |
| 13 | +def hex_word(value): |
| 14 | + return f"0x{value:04X}" |
| 15 | + |
| 16 | + |
| 17 | +def extract_function(source, name): |
| 18 | + header = re.search(rf"static\s+bool\s+{name}\s*\([^)]*\)\s*\{{", source) |
| 19 | + if not header: |
| 20 | + raise RuntimeError(f"Could not find {name}") |
| 21 | + |
| 22 | + brace = header.end() - 1 |
| 23 | + depth = 0 |
| 24 | + for index in range(brace, len(source)): |
| 25 | + if source[index] == "{": |
| 26 | + depth += 1 |
| 27 | + elif source[index] == "}": |
| 28 | + depth -= 1 |
| 29 | + if depth == 0: |
| 30 | + return source[brace + 1:index] |
| 31 | + |
| 32 | + raise RuntimeError(f"Could not find end of {name}") |
| 33 | + |
| 34 | + |
| 35 | +def token_label(token_element): |
| 36 | + for lang in token_element.findall(".//lang"): |
| 37 | + if lang.get("code") == "en": |
| 38 | + display = lang.get("display") |
| 39 | + accessible = lang.findtext("accessible") |
| 40 | + if display: |
| 41 | + return display |
| 42 | + if accessible: |
| 43 | + return accessible |
| 44 | + return "(unnamed)" |
| 45 | + |
| 46 | + |
| 47 | +def parse_ce_tokens(path): |
| 48 | + root = ET.fromstring(path.read_text()) |
| 49 | + tokens = [] |
| 50 | + for child in root: |
| 51 | + if child.tag == "token": |
| 52 | + tokens.append((int(child.get("value")[1:], 16), token_label(child))) |
| 53 | + elif child.tag == "two-byte": |
| 54 | + high = int(child.get("value")[1:], 16) |
| 55 | + for token in child.findall("token"): |
| 56 | + low = int(token.get("value")[1:], 16) |
| 57 | + tokens.append(((high << 8) | low, token_label(token))) |
| 58 | + return sorted(tokens) |
| 59 | + |
| 60 | + |
| 61 | +def parse_direct_map(function_body): |
| 62 | + map_match = re.search( |
| 63 | + r"static\s+const\s+std::unordered_map<[^>]+>\s+direct\s*=\s*\{(?P<body>.*?)\n\s*\};", |
| 64 | + function_body, |
| 65 | + re.S, |
| 66 | + ) |
| 67 | + if not map_match: |
| 68 | + return [] |
| 69 | + |
| 70 | + pair_pattern = re.compile(r"\{\s*(0x[0-9A-Fa-f]+)\s*,\s*(0x[0-9A-Fa-f]+)\s*\}") |
| 71 | + return [ |
| 72 | + (int(left, 16), int(right, 16)) |
| 73 | + for left, right in pair_pattern.findall(map_match.group("body")) |
| 74 | + ] |
| 75 | + |
| 76 | + |
| 77 | +def parse_range_maps(function_body, source_var, dest_var): |
| 78 | + range_pattern = re.compile( |
| 79 | + rf"if\s*\(\s*{source_var}\s*>=\s*(0x[0-9A-Fa-f]+)\s*&&\s*" |
| 80 | + rf"{source_var}\s*<=\s*(0x[0-9A-Fa-f]+)\s*\)\s*\{{(?P<body>.*?)return\s+true\s*;", |
| 81 | + re.S, |
| 82 | + ) |
| 83 | + assignment_pattern = re.compile( |
| 84 | + rf"{dest_var}\s*=\s*static_cast<uint16_t>\(\s*" |
| 85 | + rf"(0x[0-9A-Fa-f]+)\s*\+\s*\(\s*{source_var}\s*-\s*" |
| 86 | + rf"(0x[0-9A-Fa-f]+)\s*\)\s*\)" |
| 87 | + ) |
| 88 | + |
| 89 | + pairs = [] |
| 90 | + for match in range_pattern.finditer(function_body): |
| 91 | + assign = assignment_pattern.search(match.group("body")) |
| 92 | + if not assign: |
| 93 | + continue |
| 94 | + |
| 95 | + source_first = int(match.group(1), 16) |
| 96 | + source_last = int(match.group(2), 16) |
| 97 | + dest_base = int(assign.group(1), 16) |
| 98 | + source_base = int(assign.group(2), 16) |
| 99 | + |
| 100 | + for source in range(source_first, source_last + 1): |
| 101 | + pairs.append((source, dest_base + (source - source_base))) |
| 102 | + return pairs |
| 103 | + |
| 104 | + |
| 105 | +def build_mappings(format_path): |
| 106 | + source = format_path.read_text() |
| 107 | + |
| 108 | + evo_to_legacy_body = extract_function(source, "direct_legacy_token_for_evo") |
| 109 | + legacy_to_evo_body = extract_function(source, "direct_evo_token_for_legacy") |
| 110 | + |
| 111 | + evo_to_legacy = dict(parse_range_maps(evo_to_legacy_body, "evoToken", "legacyToken")) |
| 112 | + evo_to_legacy.update(parse_direct_map(evo_to_legacy_body)) |
| 113 | + |
| 114 | + legacy_to_evo = dict(parse_range_maps(legacy_to_evo_body, "legacyToken", "evoToken")) |
| 115 | + legacy_to_evo.update(parse_direct_map(legacy_to_evo_body)) |
| 116 | + |
| 117 | + return evo_to_legacy, legacy_to_evo |
| 118 | + |
| 119 | + |
| 120 | +def print_section(title, rows): |
| 121 | + print(f"{title}: {len(rows)}") |
| 122 | + for token, name in rows: |
| 123 | + print(f" {hex_word(token)} {name}") |
| 124 | + |
| 125 | + |
| 126 | +def compact_ranges(rows): |
| 127 | + if not rows: |
| 128 | + return [] |
| 129 | + |
| 130 | + grouped = [] |
| 131 | + start_token, start_name = rows[0] |
| 132 | + prev_token, prev_name = rows[0] |
| 133 | + for token, name in rows[1:]: |
| 134 | + if token == prev_token + 1: |
| 135 | + prev_token, prev_name = token, name |
| 136 | + continue |
| 137 | + |
| 138 | + grouped.append((start_token, start_name, prev_token, prev_name)) |
| 139 | + start_token, start_name = token, name |
| 140 | + prev_token, prev_name = token, name |
| 141 | + |
| 142 | + grouped.append((start_token, start_name, prev_token, prev_name)) |
| 143 | + return grouped |
| 144 | + |
| 145 | + |
| 146 | +def print_range_section(title, rows): |
| 147 | + print(f"{title}: {len(rows)} tokens in {len(compact_ranges(rows))} ranges") |
| 148 | + for first_token, first_name, last_token, last_name in compact_ranges(rows): |
| 149 | + if first_token == last_token: |
| 150 | + print(f" {hex_word(first_token)} {first_name}") |
| 151 | + else: |
| 152 | + print(f" {hex_word(first_token)}..{hex_word(last_token)} {first_name}..{last_name}") |
| 153 | + |
| 154 | + |
| 155 | +def main(): |
| 156 | + parser = argparse.ArgumentParser( |
| 157 | + description="Check ti-toolkit-8x-tokens.xml CE token coverage in CE<->Evo token conversion mappings." |
| 158 | + ) |
| 159 | + parser.add_argument( |
| 160 | + "--ranges", |
| 161 | + action="store_true", |
| 162 | + help="Print missing tokens as compact contiguous ranges instead of one token per line.", |
| 163 | + ) |
| 164 | + args = parser.parse_args() |
| 165 | + |
| 166 | + tokens = parse_ce_tokens(TOKENS_XML) |
| 167 | + evo_to_legacy, legacy_to_evo = build_mappings(EVO_FORMAT) |
| 168 | + evo_to_legacy_values = set(evo_to_legacy.values()) |
| 169 | + |
| 170 | + missing_legacy_to_evo = [ |
| 171 | + (token, name) for token, name in tokens if token not in legacy_to_evo |
| 172 | + ] |
| 173 | + missing_evo_to_legacy = [ |
| 174 | + (token, name) for token, name in tokens if token not in evo_to_legacy_values |
| 175 | + ] |
| 176 | + |
| 177 | + print(f"XML CE tokens checked: {len(tokens)}") |
| 178 | + print(f"CE->Evo mapped CE tokens: {len(legacy_to_evo)}") |
| 179 | + print(f"Evo->CE produced CE tokens: {len(evo_to_legacy_values)}") |
| 180 | + print() |
| 181 | + section_printer = print_range_section if args.ranges else print_section |
| 182 | + section_printer("Missing from CE->Evo mapping", missing_legacy_to_evo) |
| 183 | + print() |
| 184 | + section_printer("Not produced by Evo->CE mapping", missing_evo_to_legacy) |
| 185 | + |
| 186 | + return 1 if missing_legacy_to_evo or missing_evo_to_legacy else 0 |
| 187 | + |
| 188 | + |
| 189 | +if __name__ == "__main__": |
| 190 | + raise SystemExit(main()) |
0 commit comments