|
#!/usr/bin/env python3 |
|
"""Fix XAML files after a UiPath UIAutomation dependency downgrade. |
|
|
|
Removes attributes and caps Version values that the older activity packs |
|
don't recognise. All edits are regex-based (no XML parsing) so the |
|
original formatting, attribute order and encoding are preserved exactly. |
|
|
|
The script dynamically computes the delta from the catalog tables in |
|
``uia_catalog.py``: given a target dependency version it collects all |
|
attrs introduced above the ceiling for each element, removes them, and |
|
caps the Version. |
|
""" |
|
from __future__ import annotations |
|
|
|
import fnmatch |
|
import os |
|
import re |
|
from dataclasses import dataclass, field |
|
from enum import Enum |
|
from pathlib import Path |
|
|
|
import typer |
|
|
|
from uia_catalog import ( |
|
DEFAULT_TARGET, |
|
DEPENDENCY_CEILINGS, |
|
DEPENDENCY_UNKNOWN_ATTRS, |
|
ELEMENT_ATTRS_INTRODUCED, |
|
) |
|
|
|
# ── helpers ────────────────────────────────────────────────────────────── |
|
|
|
DEFAULT_EXCLUDE_DIRS = { |
|
".git", ".local", ".objects", ".screenshots", |
|
"bin", "obj", ".idea", ".vs", "__pycache__", |
|
} |
|
|
|
_ATTR_RE_CACHE: dict[str, re.Pattern[str]] = {} |
|
|
|
|
|
def _attr_re(name: str) -> re.Pattern[str]: |
|
"""Compile (and cache) a regex that matches ` name="..."`.""" |
|
if name not in _ATTR_RE_CACHE: |
|
_ATTR_RE_CACHE[name] = re.compile(rf'\s+{re.escape(name)}="[^"]*"') |
|
return _ATTR_RE_CACHE[name] |
|
|
|
|
|
def _version_int(v: str) -> int: |
|
"""'V4' -> 4. Returns -1 for unparseable values.""" |
|
if v.startswith("V") and v[1:].isdigit(): |
|
return int(v[1:]) |
|
return -1 |
|
|
|
|
|
def _attrs_to_remove(element: str, ceiling: str) -> list[str]: |
|
"""Collect all attrs introduced above *ceiling* for *element*.""" |
|
ceiling_n = _version_int(ceiling) |
|
result: list[str] = [] |
|
for ver, attrs in ELEMENT_ATTRS_INTRODUCED.get(element, {}).items(): |
|
if _version_int(ver) > ceiling_n: |
|
result.extend(attrs) |
|
return result |
|
|
|
|
|
# Matches an opening tag: <prefix:LocalName or <LocalName |
|
_OPEN_TAG_RE = re.compile(r"<(?:\w+:)?(\w+)\s") |
|
|
|
# Matches Version="Vnn" and captures the parts |
|
_VERSION_VALUE_RE = re.compile(r'(Version=")V(\d+)(")') |
|
|
|
# ── file discovery ─────────────────────────────────────────────────────── |
|
|
|
|
|
def iter_files( |
|
root: Path, |
|
globs: list[str], |
|
exclude_dirs: set[str], |
|
) -> list[Path]: |
|
result: list[Path] = [] |
|
for dirpath, dirnames, filenames in os.walk(root): |
|
dirnames[:] = sorted(d for d in dirnames if d not in exclude_dirs) |
|
pdir = Path(dirpath) |
|
for name in sorted(filenames): |
|
full = pdir / name |
|
rel_posix = str(full.relative_to(root)).replace("\\", "/") |
|
if any(fnmatch.fnmatch(rel_posix, g) for g in globs): |
|
result.append(full) |
|
return result |
|
|
|
|
|
# ── core logic ─────────────────────────────────────────────────────────── |
|
|
|
|
|
@dataclass |
|
class FileChanges: |
|
"""Accumulates change counts for a single file.""" |
|
|
|
removed_global: dict[str, int] = field(default_factory=dict) |
|
removed_element: dict[str, dict[str, int]] = field(default_factory=dict) |
|
version_caps: dict[str, int] = field(default_factory=dict) |
|
|
|
@property |
|
def total(self) -> int: |
|
return ( |
|
sum(self.removed_global.values()) |
|
+ sum(n for d in self.removed_element.values() for n in d.values()) |
|
+ sum(self.version_caps.values()) |
|
) |
|
|
|
|
|
def _line_has_version_above(line: str, max_ver: str) -> bool: |
|
"""Return True if the line contains Version="Vn" with n > max.""" |
|
max_n = _version_int(max_ver) |
|
for m in _VERSION_VALUE_RE.finditer(line): |
|
if int(m.group(2)) > max_n: |
|
return True |
|
return False |
|
|
|
|
|
def _cap_version_in_line(line: str, max_ver: str) -> tuple[str, int]: |
|
"""Cap Version="Vn" on *line* to *max_ver*. |
|
Returns (new_line, count_of_replacements).""" |
|
max_n = _version_int(max_ver) |
|
count = 0 |
|
|
|
def _replacer(m: re.Match[str]) -> str: |
|
nonlocal count |
|
if int(m.group(2)) > max_n: |
|
count += 1 |
|
return f"{m.group(1)}{max_ver}{m.group(3)}" |
|
return m.group(0) |
|
|
|
new_line = _VERSION_VALUE_RE.sub(_replacer, line) |
|
return new_line, count |
|
|
|
|
|
def process_content(text: str, target_key: str) -> tuple[str, FileChanges]: |
|
"""Apply all rules for *target_key* to *text* and return (new_text, changes).""" |
|
ceilings = DEPENDENCY_CEILINGS[target_key] |
|
global_attrs = DEPENDENCY_UNKNOWN_ATTRS.get(target_key, []) |
|
|
|
changes = FileChanges() |
|
lines = text.split("\n") |
|
new_lines: list[str] = [] |
|
|
|
for line in lines: |
|
# 1) Global attribute removal (unconditional) |
|
for attr in global_attrs: |
|
pat = _attr_re(attr) |
|
line, n = pat.subn("", line) |
|
if n: |
|
changes.removed_global[attr] = ( |
|
changes.removed_global.get(attr, 0) + n |
|
) |
|
|
|
# 2) Version-gated element rules (dynamic delta) |
|
m = _OPEN_TAG_RE.search(line) |
|
if m: |
|
local_name = m.group(1) |
|
ceiling = ceilings.get(local_name) |
|
if ceiling and _line_has_version_above(line, ceiling): |
|
# version needs capping -> also strip attrs above ceiling |
|
for attr in _attrs_to_remove(local_name, ceiling): |
|
pat = _attr_re(attr) |
|
line, n = pat.subn("", line) |
|
if n: |
|
elem_d = changes.removed_element.setdefault( |
|
local_name, {}, |
|
) |
|
elem_d[attr] = elem_d.get(attr, 0) + n |
|
|
|
line, n = _cap_version_in_line(line, ceiling) |
|
if n: |
|
changes.version_caps[local_name] = ( |
|
changes.version_caps.get(local_name, 0) + n |
|
) |
|
|
|
new_lines.append(line) |
|
|
|
return "\n".join(new_lines), changes |
|
|
|
|
|
def process_file( |
|
path: Path, |
|
target_key: str, |
|
*, |
|
apply: bool, |
|
backup: bool, |
|
) -> FileChanges: |
|
raw = path.read_bytes() |
|
|
|
# detect encoding (UTF-8-BOM, UTF-16, or plain UTF-8) |
|
if raw[:3] == b"\xef\xbb\xbf": |
|
encoding = "utf-8-sig" |
|
elif raw[:2] in (b"\xff\xfe", b"\xfe\xff"): |
|
encoding = "utf-16" |
|
else: |
|
encoding = "utf-8" |
|
|
|
text = raw.decode(encoding) |
|
new_text, changes = process_content(text, target_key) |
|
|
|
if changes.total == 0: |
|
return changes |
|
|
|
if apply: |
|
if backup: |
|
bak = path.with_suffix(path.suffix + ".bak") |
|
if not bak.exists(): |
|
path.rename(bak) |
|
path.write_bytes(new_text.encode(encoding)) |
|
|
|
return changes |
|
|
|
|
|
# ── reporting ──────────────────────────────────────────────────────────── |
|
|
|
|
|
def format_file_changes(changes: FileChanges, target_key: str) -> str: |
|
ceilings = DEPENDENCY_CEILINGS[target_key] |
|
parts: list[str] = [] |
|
for attr, n in sorted(changes.removed_global.items()): |
|
parts.append(f"-{attr} x{n}") |
|
for elem, attrs in sorted(changes.removed_element.items()): |
|
for attr, n in sorted(attrs.items()): |
|
parts.append(f"-{elem}.{attr} x{n}") |
|
for elem, n in sorted(changes.version_caps.items()): |
|
parts.append(f"{elem}.Version->{ceilings[elem]} x{n}") |
|
return ", ".join(parts) |
|
|
|
|
|
# ── CLI ────────────────────────────────────────────────────────────────── |
|
|
|
app = typer.Typer( |
|
name="fix-uia", |
|
help="Fix UiPath files after UIAutomation dependency downgrade", |
|
no_args_is_help=True, |
|
) |
|
|
|
|
|
class TargetVersion(str, Enum): |
|
"""Target dependency version.""" |
|
|
|
v24_10 = "24.10" |
|
v25_10 = "25.10" |
|
|
|
|
|
def _process_files( |
|
root: Path, |
|
globs: list[str], |
|
exclude_dirs: set[str], |
|
target_key: str, |
|
apply: bool, |
|
backup: bool, |
|
) -> None: |
|
"""Core processing logic.""" |
|
typer.echo(f"target: {target_key}") |
|
|
|
files = iter_files(root, globs, exclude_dirs) |
|
files_with_hits = 0 |
|
total_changes = 0 |
|
|
|
for f in files: |
|
changes = process_file(f, target_key, apply=apply, backup=backup) |
|
if changes.total == 0: |
|
continue |
|
files_with_hits += 1 |
|
total_changes += changes.total |
|
rel = f.relative_to(root) |
|
typer.echo(f" {rel}: {format_file_changes(changes, target_key)}") |
|
|
|
mode = "APPLIED" if apply else "DRY-RUN" |
|
typer.echo(f"\n{mode}: {files_with_hits} file(s), {total_changes} edit(s)") |
|
if not apply and total_changes: |
|
typer.echo(" (re-run with --apply to write changes)") |
|
|
|
|
|
@app.command() |
|
def workflows( |
|
root: Path = typer.Option(".", "--root", help="Project root directory"), |
|
target: TargetVersion = typer.Option( |
|
TargetVersion.v24_10, "--target", help="Target dependency version" |
|
), |
|
apply: bool = typer.Option(False, "--apply", help="Write changes (default is dry-run)"), |
|
backup: bool = typer.Option(True, "--backup/--no-backup", help="Create .bak files"), |
|
exclude_dir: list[str] = typer.Option( |
|
None, "--exclude-dir", help="Extra directory names to skip" |
|
), |
|
) -> None: |
|
"""Fix workflow XAML files (*.xaml).""" |
|
_process_files( |
|
root=root.resolve(), |
|
globs=["**/*.xaml", "*.xaml"], |
|
exclude_dirs=set(DEFAULT_EXCLUDE_DIRS) | set(exclude_dir or []), |
|
target_key=target.value, |
|
apply=apply, |
|
backup=backup, |
|
) |
|
|
|
|
|
@app.command() |
|
def objects( |
|
root: Path = typer.Option(".", "--root", help="Project root directory"), |
|
target: TargetVersion = typer.Option( |
|
TargetVersion.v24_10, "--target", help="Target dependency version" |
|
), |
|
apply: bool = typer.Option(False, "--apply", help="Write changes (default is dry-run)"), |
|
backup: bool = typer.Option(True, "--backup/--no-backup", help="Create .bak files"), |
|
) -> None: |
|
"""Fix object repository .content files.""" |
|
exclude_dirs = set(DEFAULT_EXCLUDE_DIRS) - {".objects"} # Remove .objects exclusion |
|
_process_files( |
|
root=root.resolve(), |
|
globs=[".objects/**/.content"], |
|
exclude_dirs=exclude_dirs, |
|
target_key=target.value, |
|
apply=apply, |
|
backup=backup, |
|
) |
|
|
|
|
|
@app.command() |
|
def all( |
|
root: Path = typer.Option(".", "--root", help="Project root directory"), |
|
target: TargetVersion = typer.Option( |
|
TargetVersion.v24_10, "--target", help="Target dependency version" |
|
), |
|
apply: bool = typer.Option(False, "--apply", help="Write changes (default is dry-run)"), |
|
backup: bool = typer.Option(True, "--backup/--no-backup", help="Create .bak files"), |
|
) -> None: |
|
"""Fix both workflow XAML and object repository files.""" |
|
exclude_dirs = set(DEFAULT_EXCLUDE_DIRS) - {".objects"} |
|
_process_files( |
|
root=root.resolve(), |
|
globs=["**/*.xaml", "*.xaml", ".objects/**/.content"], |
|
exclude_dirs=exclude_dirs, |
|
target_key=target.value, |
|
apply=apply, |
|
backup=backup, |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
app() |