From c157e14fa9a6288a3f267a02d2ae10b859f2446e Mon Sep 17 00:00:00 2001 From: scott Date: Wed, 4 Mar 2026 21:50:00 -0500 Subject: [PATCH] Restructure into package: truenas_migrate/ MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Split single-file script into focused modules: colors.py – ANSI helpers and shared logger summary.py – Summary dataclass and report renderer archive.py – Debug archive parser (SCALE + CORE layouts) client.py – WebSocket engine, TrueNASClient, dataset utilities migrate.py – Payload builders, migrate_smb_shares, migrate_nfs_shares cli.py – Interactive wizard, argparse, run(), main() __main__.py – python -m truenas_migrate entry point truenas_migrate.py retained as a one-line compatibility shim. Both 'python truenas_migrate.py' and 'python -m truenas_migrate' work. Co-Authored-By: Claude Sonnet 4.6 --- truenas_migrate.py | 1394 +---------------- truenas_migrate/__init__.py | 1 + truenas_migrate/__main__.py | 3 + .../__pycache__/__init__.cpython-314.pyc | Bin 0 -> 174 bytes .../__pycache__/__main__.cpython-314.pyc | Bin 0 -> 228 bytes .../__pycache__/archive.cpython-314.pyc | Bin 0 -> 18650 bytes .../__pycache__/cli.cpython-314.pyc | Bin 0 -> 19830 bytes .../__pycache__/client.cpython-314.pyc | Bin 0 -> 19146 bytes .../__pycache__/colors.cpython-314.pyc | Bin 0 -> 5640 bytes .../__pycache__/migrate.cpython-314.pyc | Bin 0 -> 9324 bytes .../__pycache__/summary.cpython-314.pyc | Bin 0 -> 5616 bytes truenas_migrate/archive.py | 352 +++++ truenas_migrate/cli.py | 425 +++++ truenas_migrate/client.py | 308 ++++ truenas_migrate/colors.py | 55 + truenas_migrate/migrate.py | 154 ++ truenas_migrate/summary.py | 93 ++ 17 files changed, 1393 insertions(+), 1392 deletions(-) create mode 100644 truenas_migrate/__init__.py create mode 100644 truenas_migrate/__main__.py create mode 100644 truenas_migrate/__pycache__/__init__.cpython-314.pyc create mode 100644 truenas_migrate/__pycache__/__main__.cpython-314.pyc create mode 100644 truenas_migrate/__pycache__/archive.cpython-314.pyc create mode 100644 truenas_migrate/__pycache__/cli.cpython-314.pyc create mode 100644 truenas_migrate/__pycache__/client.cpython-314.pyc create mode 100644 truenas_migrate/__pycache__/colors.cpython-314.pyc create mode 100644 truenas_migrate/__pycache__/migrate.cpython-314.pyc create mode 100644 truenas_migrate/__pycache__/summary.cpython-314.pyc create mode 100644 truenas_migrate/archive.py create mode 100644 truenas_migrate/cli.py create mode 100644 truenas_migrate/client.py create mode 100644 truenas_migrate/colors.py create mode 100644 truenas_migrate/migrate.py create mode 100644 truenas_migrate/summary.py diff --git a/truenas_migrate.py b/truenas_migrate.py index b678f48..eeb6402 100644 --- a/truenas_migrate.py +++ b/truenas_migrate.py @@ -1,1396 +1,6 @@ #!/usr/bin/env python3 -""" -truenas_migrate.py – TrueNAS Share Migration Tool -===================================================== -Reads SMB shares and NFS shares from a TrueNAS debug archive (.tar / .tgz) -produced by the built-in "Save Debug" feature, then re-creates them on a -destination TrueNAS system via the JSON-RPC 2.0 WebSocket API (TrueNAS 25.04+). - -SAFE BY DEFAULT - • Existing shares are never overwritten or deleted. - • Always run with --dry-run first to preview what will happen. - -REQUIREMENTS - Python 3.9+ (stdlib only – no external packages needed) - -QUICK START - # 1. Inspect your debug archive to confirm it contains the data you need: - python truenas_migrate.py --debug-tar debug.tgz --list-archive - - # 2. Dry-run – connect to destination but make zero changes: - python truenas_migrate.py \\ - --debug-tar debug.tgz \\ - --dest 192.168.1.50 \\ - --api-key "1-xxxxxxxxxxxx" \\ - --dry-run - - # 3. Live migration: - python truenas_migrate.py \\ - --debug-tar debug.tgz \\ - --dest 192.168.1.50 \\ - --api-key "1-xxxxxxxxxxxx" - - # 4. Migrate only SMB shares (skip NFS): - python truenas_migrate.py \\ - --debug-tar debug.tgz \\ - --dest 192.168.1.50 \\ - --api-key "1-xxxxxxxxxxxx" \\ - --migrate smb - -CONFLICT POLICY - Shares that already exist on the destination are silently skipped: - SMB – matched by share name (case-insensitive) - NFS – matched by export path (exact match) -""" - -from __future__ import annotations - -import argparse -import asyncio -import base64 -import contextlib -import getpass -import hashlib -import json -import logging -import os -import re as _re -import ssl -import struct -import sys -import tarfile -from dataclasses import dataclass, field -from pathlib import Path -from typing import Any, Optional - -# ───────────────────────────────────────────────────────────────────────────── -# Color helpers (ANSI; auto-disabled when stderr is not a TTY) -# ───────────────────────────────────────────────────────────────────────────── - -_USE_COLOR = sys.stderr.isatty() - -def _c(code: str, text: str) -> str: - return f"\033[{code}m{text}\033[0m" if _USE_COLOR else text - -def _dim(t: str) -> str: return _c("2", t) -def _bold(t: str) -> str: return _c("1", t) -def _red(t: str) -> str: return _c("31", t) -def _green(t: str) -> str: return _c("32", t) -def _yellow(t: str) -> str: return _c("33", t) -def _cyan(t: str) -> str: return _c("36", t) -def _bold_red(t: str) -> str: return _c("1;31", t) -def _bold_green(t: str) -> str: return _c("1;32", t) -def _bold_yellow(t: str) -> str: return _c("1;33", t) -def _bold_cyan(t: str) -> str: return _c("1;36", t) - -def _vis_len(s: str) -> int: - """Visible character width of a string, ignoring ANSI escape sequences.""" - return len(_re.sub(r'\033\[[0-9;]*m', '', s)) - - -# ───────────────────────────────────────────────────────────────────────────── -# Logging -# ───────────────────────────────────────────────────────────────────────────── - -class _ColorFormatter(logging.Formatter): - _STYLES = { - logging.DEBUG: "2", # dim - logging.INFO: "36", # cyan - logging.WARNING: "1;33", # bold yellow - logging.ERROR: "1;31", # bold red - logging.CRITICAL: "1;31", - } - - def format(self, record: logging.LogRecord) -> str: - ts = self.formatTime(record, self.datefmt) - msg = record.getMessage() - if _USE_COLOR: - code = self._STYLES.get(record.levelno, "0") - level = f"\033[{code}m{record.levelname:<8}\033[0m" - ts = f"\033[2m{ts}\033[0m" - else: - level = f"{record.levelname:<8}" - return f"{ts} {level} {msg}" - - -_handler = logging.StreamHandler() -_handler.setFormatter(_ColorFormatter(datefmt="%H:%M:%S")) -logging.basicConfig(level=logging.INFO, handlers=[_handler]) -log = logging.getLogger("truenas_migrate") - - -# ───────────────────────────────────────────────────────────────────────────── -# Summary -# ───────────────────────────────────────────────────────────────────────────── - -@dataclass -class Summary: - smb_found: int = 0 - smb_created: int = 0 - smb_skipped: int = 0 - smb_failed: int = 0 - - nfs_found: int = 0 - nfs_created: int = 0 - nfs_skipped: int = 0 - nfs_failed: int = 0 - - errors: list[str] = field(default_factory=list) - - # Populated during dry-run dataset safety checks - paths_to_create: list[str] = field(default_factory=list) - missing_datasets: list[str] = field(default_factory=list) - - def report(self) -> str: - w = 60 - - def _stat(label: str, n: int, color_fn) -> str: - s = f"{label}={n}" - return color_fn(s) if n > 0 else _dim(s) - - smb_val = ( - f"{_dim('found=' + str(self.smb_found))} " - f"{_stat('created', self.smb_created, _bold_green)} " - f"{_stat('skipped', self.smb_skipped, _yellow)} " - f"{_stat('failed', self.smb_failed, _bold_red)}" - ) - nfs_val = ( - f"{_dim('found=' + str(self.nfs_found))} " - f"{_stat('created', self.nfs_created, _bold_green)} " - f"{_stat('skipped', self.nfs_skipped, _yellow)} " - f"{_stat('failed', self.nfs_failed, _bold_red)}" - ) - - hr = _cyan("─" * w) - tl = _cyan("┌"); tr = _cyan("┐") - ml = _cyan("├"); mr = _cyan("┤") - bl = _cyan("└"); br = _cyan("┘") - side = _cyan("│") - - title_text = "MIGRATION SUMMARY" - lpad = (w - len(title_text)) // 2 - rpad = w - len(title_text) - lpad - title_row = f"{side}{' ' * lpad}{_bold(title_text)}{' ' * rpad}{side}" - - def row(label: str, val: str) -> str: - right = max(0, w - 2 - len(label) - _vis_len(val)) - return f"{side} {_dim(label)}{val}{' ' * right} {side}" - - lines = [ - "", - f"{tl}{hr}{tr}", - title_row, - f"{ml}{hr}{mr}", - row("SMB shares : ", smb_val), - row("NFS shares : ", nfs_val), - f"{bl}{hr}{br}", - ] - - if self.errors: - lines.append(f"\n {_bold_red(str(len(self.errors)) + ' error(s):')} ") - for e in self.errors: - lines.append(f" {_red('•')} {e}") - - if self.missing_datasets: - lines.append( - f"\n {_bold_yellow('WARNING:')} " - f"{len(self.missing_datasets)} share path(s) have no " - "matching dataset on the destination:" - ) - for p in self.missing_datasets: - lines.append(f" {_yellow('•')} {p}") - lines.append( - " These paths must exist before shares can be created.\n" - " Use interactive mode or answer 'y' at the dataset prompt to create them." - ) - lines.append("") - return "\n".join(lines) - - -# ───────────────────────────────────────────────────────────────────────────── -# Debug archive parser -# ───────────────────────────────────────────────────────────────────────────── -# -# TrueNAS SCALE generates debug archives with the "ixdiagnose" tool. -# The internal layout has changed across versions: -# -# SCALE 24.04+ (plugins layout, lowercase dirs, combined JSON files) -# ixdiagnose/plugins/smb/smb_info.json – SMB shares + config combined -# ixdiagnose/plugins/nfs/nfs_config.json – NFS shares + config combined -# -# Older SCALE (plugins layout, uppercase dirs, per-query JSON files) -# ixdiagnose/plugins/SMB/sharing.smb.query.json -# ixdiagnose/plugins/SMB/smb.config.json -# ixdiagnose/plugins/NFS/sharing.nfs.query.json -# ixdiagnose/plugins/Sharing/sharing.smb.query.json -# ixdiagnose/plugins/Sharing/sharing.nfs.query.json -# -# TrueNAS CORE uses the "freenas-debug" tool (stored as "fndebug" inside the -# archive). It produces only plain-text dump files – there is NO JSON share -# data in CORE debug archives. The script detects CORE archives early and -# exits with a clear message rather than silently returning empty results. - -_CANDIDATES: dict[str, list[str]] = { - "smb_shares": [ - # SCALE 24.04+ – combined plugin file; shares are under "sharing_smb_query" - "ixdiagnose/plugins/smb/smb_info.json", - # Older SCALE – uppercase plugin dirs, per-query files - "ixdiagnose/plugins/SMB/sharing.smb.query.json", - "ixdiagnose/plugins/Sharing/sharing.smb.query.json", - "ixdiagnose/SMB/sharing.smb.query.json", - ], - "nfs_shares": [ - # SCALE 24.04+ – combined plugin file; shares are under "sharing_nfs_query" - "ixdiagnose/plugins/nfs/nfs_config.json", - # Older SCALE – uppercase plugin dirs, per-query files - "ixdiagnose/plugins/NFS/sharing.nfs.query.json", - "ixdiagnose/plugins/Sharing/sharing.nfs.query.json", - "ixdiagnose/NFS/sharing.nfs.query.json", - ], -} - -# When a candidate file bundles multiple datasets, pull out the right sub-key. -_KEY_WITHIN_FILE: dict[str, str] = { - "smb_shares": "sharing_smb_query", - "nfs_shares": "sharing_nfs_query", -} - -# Keyword fragments for heuristic fallback scan (SCALE archives only) -_KEYWORDS: dict[str, list[str]] = { - "smb_shares": ["sharing.smb", "smb_share", "sharing/smb", "smb_info"], - "nfs_shares": ["sharing.nfs", "nfs_share", "sharing/nfs", "nfs_config"], -} - -# Presence of this path prefix identifies a TrueNAS CORE archive (fndebug / -# freenas-debug). CORE stores diagnostics as plain-text dump files, but each -# dump embeds JSON blocks that we can extract. -_CORE_MARKER = "ixdiagnose/fndebug" - - -def _members_map(tf: tarfile.TarFile) -> dict[str, tarfile.TarInfo]: - """Return {normalised_path: TarInfo} for every member.""" - return {m.name.lstrip("./"): m for m in tf.getmembers()} - - -def _read_json(tf: tarfile.TarFile, info: tarfile.TarInfo) -> Optional[Any]: - """Extract and JSON-parse one archive member. Returns None on any error.""" - try: - fh = tf.extractfile(info) - if fh is None: - return None - raw = fh.read().decode("utf-8", errors="replace").strip() - return json.loads(raw) if raw else None - except Exception as exc: - log.debug("Could not parse %s: %s", info.name, exc) - return None - - -def _extract_subkey(raw: Any, data_type: str) -> Optional[Any]: - """ - When a JSON file bundles multiple datasets, pull out the sub-key that - corresponds to data_type (e.g. "sharing_smb_query" from smb_info.json). - Falls back to the raw value when no sub-key mapping exists. - """ - if not isinstance(raw, dict): - return raw - key = _KEY_WITHIN_FILE.get(data_type) - if key and key in raw: - return raw[key] - return raw - - -def _find_data( - tf: tarfile.TarFile, - members: dict[str, tarfile.TarInfo], - data_type: str, -) -> Optional[Any]: - """Try candidate paths, then keyword heuristics. Return parsed JSON or None.""" - - # Pass 1 – exact / suffix match against known candidate paths - for candidate in _CANDIDATES[data_type]: - norm = candidate.lstrip("./") - # Direct hit - info = members.get(norm) - if info is None: - # Archive may have a date-stamped top-level directory - for path, member in members.items(): - if path == norm or path.endswith("/" + norm): - info = member - break - if info is not None: - raw = _read_json(tf, info) - result = _extract_subkey(raw, data_type) - if result is not None: - log.info(" %-12s → %s", data_type, info.name) - return result - - # Pass 2 – keyword heuristic scan over all .json members - log.debug(" %s: candidates missed, scanning archive …", data_type) - keywords = _KEYWORDS[data_type] - for path in sorted(members): - if not path.lower().endswith(".json"): - continue - if any(kw in path.lower() for kw in keywords): - raw = _read_json(tf, members[path]) - result = _extract_subkey(raw, data_type) - if result is not None: - log.info(" %-12s → %s (heuristic)", data_type, path) - return result - - return None - - -def _extract_core_dump_json(dump_text: str, title_fragment: str) -> list[Any]: - """ - Extract all top-level JSON values from a named section of a CORE dump.txt. - - CORE dump sections look like: - +--------...--------+ - + SECTION TITLE + ← title line (contains the section name) - +--------...--------+ - - debug finished in N seconds for SECTION TITLE - - Returns a list of parsed JSON values found in the content block, in order. - An empty list is returned when the section is not found or contains no JSON. - """ - import re as _re - - # Split on the horizontal rule lines - parts = _re.split(r'\+[-]{20,}\+', dump_text) - - for i, part in enumerate(parts): - if title_fragment.lower() in part.lower() and i + 1 < len(parts): - content = parts[i + 1] - # Trim the "debug finished …" trailer and surrounding whitespace - content = _re.sub( - r'debug finished.*', '', content, - flags=_re.IGNORECASE | _re.DOTALL, - ).strip() - - # Greedily parse consecutive JSON values from the content - results: list[Any] = [] - decoder = json.JSONDecoder() - pos = 0 - while pos < len(content): - remaining = content[pos:].lstrip() - if not remaining or remaining[0] not in "{[": - break - pos += len(content[pos:]) - len(remaining) # account for whitespace - try: - val, end = decoder.raw_decode(remaining) - results.append(val) - pos += end - except json.JSONDecodeError: - break - return results - - return [] - - -def _parse_core_into( - tf: tarfile.TarFile, - members: dict[str, tarfile.TarInfo], - result: dict[str, Any], -) -> None: - """ - Populate *result* from TrueNAS CORE fndebug dump files. - - SMB dump (ixdiagnose/fndebug/SMB/dump.txt) - "Database Dump" section → JSON object (global config) + JSON array (shares) - - NFS dump (ixdiagnose/fndebug/NFS/dump.txt) - "Configuration" section → JSON object (global config) + JSON array (shares) - """ - log.info("TrueNAS CORE archive detected; parsing fndebug dump files.") - - smb_key = "ixdiagnose/fndebug/SMB/dump.txt" - if smb_key in members: - fh = tf.extractfile(members[smb_key]) - dump = fh.read().decode("utf-8", errors="replace") # type: ignore[union-attr] - vals = _extract_core_dump_json(dump, "Database Dump") - if len(vals) >= 2 and isinstance(vals[1], list): - result["smb_shares"] = vals[1] - log.info(" smb_shares → %s (CORE, %d share(s))", smb_key, len(vals[1])) - elif vals: - log.warning(" smb_shares → NOT FOUND in Database Dump") - else: - log.warning(" SMB dump not found: %s", smb_key) - - nfs_key = "ixdiagnose/fndebug/NFS/dump.txt" - if nfs_key in members: - fh = tf.extractfile(members[nfs_key]) - dump = fh.read().decode("utf-8", errors="replace") # type: ignore[union-attr] - vals = _extract_core_dump_json(dump, "Configuration") - if len(vals) >= 2 and isinstance(vals[1], list): - result["nfs_shares"] = vals[1] - log.info(" nfs_shares → %s (CORE, %d share(s))", nfs_key, len(vals[1])) - else: - log.warning(" nfs_shares → NOT FOUND in Configuration") - else: - log.warning(" NFS dump not found: %s", nfs_key) - - if not result["smb_shares"] and not result["nfs_shares"]: - log.warning( - "No share data found in CORE archive. " - "This is expected when SMB/NFS services were disabled on the source system." - ) - - -@contextlib.contextmanager -def _open_source_tar(tar_path: str): - """ - Open the archive that actually contains the ixdiagnose data. - - TrueNAS HA debug bundles (25.04+) wrap each node's ixdiagnose snapshot - in a separate .txz inside the outer .tgz. We prefer the member whose - name includes '_active'; if none is labelled that way we fall back to the - first .txz found. Single-node (non-HA) bundles are used directly. - """ - with tarfile.open(tar_path, "r:*") as outer: - txz_members = [ - m for m in outer.getmembers() - if m.name.lower().endswith(".txz") and m.isfile() - ] - if not txz_members: - yield outer - return - - # HA bundle – pick the active node's inner archive - active = next( - (m for m in txz_members if "_active" in m.name.lower()), - txz_members[0], - ) - log.info(" HA bundle detected; reading inner archive: %s", active.name) - fh = outer.extractfile(active) - with tarfile.open(fileobj=fh, mode="r:*") as inner: - yield inner - - -def parse_archive(tar_path: str) -> dict[str, Any]: - """ - Extract SMB shares and NFS shares from the debug archive. - Returns: {"smb_shares": list, "nfs_shares": list} - """ - log.info("Opening archive: %s", tar_path) - result: dict[str, Any] = { - "smb_shares": [], - "nfs_shares": [], - } - - try: - with _open_source_tar(tar_path) as tf: - members = _members_map(tf) - log.info(" Archive contains %d total entries.", len(members)) - - is_core = any( - p == _CORE_MARKER or p.startswith(_CORE_MARKER + "/") - for p in members - ) - - if is_core: - _parse_core_into(tf, members, result) - else: - for key in ("smb_shares", "nfs_shares"): - data = _find_data(tf, members, key) - if data is None: - log.warning(" %-12s → NOT FOUND", key) - continue - - if isinstance(data, list): - result[key] = data - elif isinstance(data, dict): - # Some versions wrap the list: {"result": [...]} - for v in data.values(): - if isinstance(v, list): - result[key] = v - break - - except (tarfile.TarError, OSError) as exc: - log.error("Failed to open archive: %s", exc) - sys.exit(1) - - log.info( - "Parsed: %d SMB share(s), %d NFS share(s)", - len(result["smb_shares"]), - len(result["nfs_shares"]), - ) - return result - - -def list_archive_and_exit(tar_path: str) -> None: - """ - Print a structured listing of the archive contents, then exit. - For SCALE archives: lists all .json plugin files. - For CORE archives: lists the fndebug dump files and the JSON sections - that contain share / config data. - """ - try: - with _open_source_tar(tar_path) as tf: - members_map = _members_map(tf) - is_core = any( - p == _CORE_MARKER or p.startswith(_CORE_MARKER + "/") - for p in members_map - ) - - if is_core: - print(f"\nTrueNAS CORE archive: {tar_path}\n") - print(" fndebug plain-text dump files (JSON is embedded inside):\n") - dump_files = sorted( - p for p in members_map - if p.startswith(_CORE_MARKER + "/") and p.endswith(".txt") - ) - for p in dump_files: - size = members_map[p].size / 1024 - print(f" {p} ({size:.1f} KB)") - print() - print(" Data this tool will extract:") - print(" SMB config + shares → fndebug/SMB/dump.txt " - "(\"Database Dump\" section)") - print(" NFS shares → fndebug/NFS/dump.txt " - "(\"Configuration\" section)") - else: - print(f"\nJSON plugin files in archive: {tar_path}\n") - json_members = sorted( - (m for m in tf.getmembers() if m.name.endswith(".json")), - key=lambda m: m.name, - ) - if not json_members: - print(" (no .json files found)") - else: - current_dir = "" - for m in json_members: - parts = m.name.lstrip("./").split("/") - top = "/".join(parts[:-1]) if len(parts) > 1 else "" - if top != current_dir: - print(f"\n {top or '(root)'}/") - current_dir = top - print(f" {parts[-1]} ({m.size / 1024:.1f} KB)") - except (tarfile.TarError, OSError) as exc: - sys.exit(f"ERROR: {exc}") - print() - sys.exit(0) - - -# ───────────────────────────────────────────────────────────────────────────── -# Payload builders -# ───────────────────────────────────────────────────────────────────────────── - -# Read-only / server-generated fields that must NOT be sent on create/update -_SMB_SHARE_READONLY = frozenset({"id", "locked"}) - -# CORE SMB share fields that do not exist in the SCALE API -_SMB_SHARE_CORE_EXTRAS = frozenset({ - "vuid", # server-generated Time Machine UUID; SCALE sets this automatically -}) - -# CORE NFS share fields that do not exist in the SCALE API -_NFS_SHARE_CORE_EXTRAS = frozenset({ - "paths", # CORE uses a list; SCALE uses a single "path" string (converted below) - "alldirs", # removed in SCALE - "quiet", # removed in SCALE -}) - - -def _smb_share_payload(share: dict) -> dict: - exclude = _SMB_SHARE_READONLY | _SMB_SHARE_CORE_EXTRAS - return {k: v for k, v in share.items() if k not in exclude} - - -def _nfs_share_payload(share: dict) -> dict: - payload = {k: v for k, v in share.items() - if k not in {"id", "locked"} | _NFS_SHARE_CORE_EXTRAS} - # CORE stores export paths as a list under "paths"; SCALE expects a single "path" string. - if "path" not in payload and share.get("paths"): - payload["path"] = share["paths"][0] - return payload - - -# ───────────────────────────────────────────────────────────────────────────── -# Minimal WebSocket client (stdlib only, RFC 6455) -# ───────────────────────────────────────────────────────────────────────────── - -def _ws_mask(data: bytes, mask: bytes) -> bytes: - """XOR *data* with a 4-byte repeating mask key.""" - out = bytearray(data) - for i in range(len(out)): - out[i] ^= mask[i & 3] - return bytes(out) - - -def _ws_encode_frame(payload: bytes, opcode: int = 0x1) -> bytes: - """Encode a masked client→server WebSocket frame.""" - mask = os.urandom(4) - length = len(payload) - header = bytearray([0x80 | opcode]) # FIN=1 - if length < 126: - header.append(0x80 | length) - elif length < 65536: - header.append(0x80 | 126) - header += struct.pack("!H", length) - else: - header.append(0x80 | 127) - header += struct.pack("!Q", length) - return bytes(header) + mask + _ws_mask(payload, mask) - - -async def _ws_recv_message(reader: asyncio.StreamReader) -> str: - """ - Read one complete WebSocket message, reassembling continuation frames. - Skips ping/pong control frames. Raises OSError on close frame. - """ - fragments: list[bytes] = [] - while True: - hdr = await reader.readexactly(2) - fin = bool(hdr[0] & 0x80) - opcode = hdr[0] & 0x0F - masked = bool(hdr[1] & 0x80) - length = hdr[1] & 0x7F - - if length == 126: - length = struct.unpack("!H", await reader.readexactly(2))[0] - elif length == 127: - length = struct.unpack("!Q", await reader.readexactly(8))[0] - - mask_key = await reader.readexactly(4) if masked else None - payload = await reader.readexactly(length) if length else b"" - if mask_key: - payload = _ws_mask(payload, mask_key) - - if opcode == 0x8: # Close frame - raise OSError("WebSocket: server sent close frame") - if opcode in (0x9, 0xA): # Ping / Pong — ignore - continue - - fragments.append(payload) - if fin: - return b"".join(fragments).decode("utf-8") - - -class _WebSocket: - """asyncio StreamReader/Writer wrapped to match the send/recv/close API.""" - - def __init__( - self, - reader: asyncio.StreamReader, - writer: asyncio.StreamWriter, - ) -> None: - self._reader = reader - self._writer = writer - - async def send(self, data: str) -> None: - self._writer.write(_ws_encode_frame(data.encode("utf-8"), opcode=0x1)) - await self._writer.drain() - - async def recv(self) -> str: - return await _ws_recv_message(self._reader) - - async def close(self) -> None: - with contextlib.suppress(Exception): - self._writer.write(_ws_encode_frame(b"", opcode=0x8)) - await self._writer.drain() - self._writer.close() - with contextlib.suppress(Exception): - await self._writer.wait_closed() - - -async def _ws_connect( - host: str, - port: int, - path: str, - ssl_ctx: ssl.SSLContext, -) -> _WebSocket: - """ - Open a TLS connection, perform the HTTP→WebSocket upgrade handshake, - and return a connected _WebSocket. - """ - reader, writer = await asyncio.open_connection(host, port, ssl=ssl_ctx) - - key = base64.b64encode(os.urandom(16)).decode() - writer.write(( - f"GET {path} HTTP/1.1\r\n" - f"Host: {host}:{port}\r\n" - f"Upgrade: websocket\r\n" - f"Connection: Upgrade\r\n" - f"Sec-WebSocket-Key: {key}\r\n" - f"Sec-WebSocket-Version: 13\r\n" - f"\r\n" - ).encode()) - await writer.drain() - - # Read headers line-by-line to avoid consuming WebSocket frame bytes - response_lines: list[bytes] = [] - while True: - line = await asyncio.wait_for(reader.readline(), timeout=20) - if not line: - raise OSError("Connection closed during WebSocket handshake") - response_lines.append(line) - if line in (b"\r\n", b"\n"): - break - - status = response_lines[0].decode("latin-1").strip() - if " 101 " not in status: - raise OSError(f"WebSocket upgrade failed: {status}") - - expected = base64.b64encode( - hashlib.sha1( - (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode() - ).digest() - ).decode().lower() - headers_text = b"".join(response_lines).decode("latin-1").lower() - if expected not in headers_text: - raise OSError("WebSocket upgrade: Sec-WebSocket-Accept mismatch") - - return _WebSocket(reader, writer) - - -# ───────────────────────────────────────────────────────────────────────────── -# TrueNAS JSON-RPC 2.0 WebSocket client -# ───────────────────────────────────────────────────────────────────────────── - -class TrueNASClient: - """ - Minimal async JSON-RPC 2.0 client for the TrueNAS WebSocket API. - - TrueNAS 25.04+ endpoint: wss://:/api/current - Authentication: auth.login_with_api_key - """ - - def __init__( - self, - host: str, - api_key: str, - port: int = 443, - verify_ssl: bool = False, - ) -> None: - self._host = host - self._port = port - self._api_key = api_key - self._verify_ssl = verify_ssl - self._ws = None - self._call_id = 0 - - @property - def _url(self) -> str: - return f"wss://{self._host}:{self._port}/api/current" - - async def __aenter__(self) -> "TrueNASClient": - await self._connect() - return self - - async def __aexit__(self, *_: Any) -> None: - if self._ws: - await self._ws.close() - self._ws = None - - async def _connect(self) -> None: - ctx = ssl.create_default_context() - if not self._verify_ssl: - ctx.check_hostname = False - ctx.verify_mode = ssl.CERT_NONE - - log.info("Connecting to %s …", self._url) - try: - self._ws = await _ws_connect( - host=self._host, - port=self._port, - path="/api/current", - ssl_ctx=ctx, - ) - except (OSError, asyncio.TimeoutError) as exc: - log.error("Connection failed: %s", exc) - raise - - log.info("Authenticating with API key …") - result = await self.call("auth.login_with_api_key", [self._api_key]) - if result is not True and result != "SUCCESS": - raise PermissionError(f"Authentication rejected: {result!r}") - log.info("Connected and authenticated.") - - async def call(self, method: str, params: Optional[list] = None) -> Any: - """ - Send one JSON-RPC request and return its result. - Raises RuntimeError if the API returns an error. - """ - self._call_id += 1 - req_id = self._call_id - - await self._ws.send(json.dumps({ - "jsonrpc": "2.0", - "id": req_id, - "method": method, - "params": params or [], - })) - - # Drain until the matching reply arrives (skip server-push notifications) - while True: - raw = await asyncio.wait_for(self._ws.recv(), timeout=60) - msg = json.loads(raw) - - if "id" not in msg: # server-initiated notification - continue - if msg["id"] != req_id: # response to a different in-flight call - continue - - if "error" in msg: - err = msg["error"] - reason = ( - err.get("data", {}).get("reason") - or err.get("message") - or repr(err) - ) - raise RuntimeError(f"API error [{method}]: {reason}") - - return msg.get("result") - - -# ───────────────────────────────────────────────────────────────────────────── -# Dataset safety checks -# ───────────────────────────────────────────────────────────────────────────── - -async def check_dataset_paths( - client: TrueNASClient, - paths: list[str], -) -> list[str]: - """ - Return the subset of *paths* that have no matching ZFS dataset on the - destination (i.e. no dataset whose mountpoint equals that path). - Returns an empty list when the dataset query itself fails (with a warning). - """ - if not paths: - return [] - - unique = sorted({p.rstrip("/") for p in paths if p}) - log.info("Checking %d share path(s) against destination datasets …", len(unique)) - try: - datasets = await client.call("pool.dataset.query") or [] - except RuntimeError as exc: - log.warning("Could not query datasets (skipping check): %s", exc) - return [] - - mountpoints = { - d.get("mountpoint", "").rstrip("/") - for d in datasets - if d.get("mountpoint") - } - - missing = [p for p in unique if p not in mountpoints] - if missing: - for p in missing: - log.warning(" MISSING dataset for path: %s", p) - else: - log.info(" All share paths exist as datasets.") - return missing - - -async def create_dataset(client: TrueNASClient, path: str) -> bool: - """ - Create a ZFS dataset whose mountpoint will be *path*. - - *path* must be an absolute /mnt/… path (e.g. /mnt/tank/data). - The dataset name is derived by stripping the leading /mnt/ prefix. - Returns True on success, False on failure. - """ - if not path.startswith("/mnt/"): - log.error("Cannot auto-create dataset for non-/mnt/ path: %s", path) - return False - - name = path[5:].rstrip("/") # strip "/mnt/" - log.info("Creating dataset %r …", name) - try: - await client.call("pool.dataset.create", [{"name": name}]) - log.info(" Created: %s", name) - return True - except RuntimeError as exc: - log.error(" Failed to create dataset %r: %s", name, exc) - return False - - -async def _create_missing_datasets( - host: str, - port: int, - api_key: str, - paths: list[str], - verify_ssl: bool = False, -) -> None: - """Open a fresh connection and create ZFS datasets for *paths*.""" - async with TrueNASClient( - host=host, port=port, api_key=api_key, verify_ssl=verify_ssl, - ) as client: - for path in paths: - await create_dataset(client, path) - - -# ───────────────────────────────────────────────────────────────────────────── -# Migration routines -# ───────────────────────────────────────────────────────────────────────────── - -async def migrate_smb_shares( - client: TrueNASClient, - shares: list[dict], - dry_run: bool, - summary: Summary, -) -> None: - summary.smb_found = len(shares) - if not shares: - log.info("No SMB shares found in archive.") - return - - log.info("Querying existing SMB shares on destination …") - try: - existing = await client.call("sharing.smb.query") or [] - except RuntimeError as exc: - msg = f"Could not query SMB shares: {exc}" - log.error(msg) - summary.errors.append(msg) - return - - existing_names = {s.get("name", "").lower() for s in existing} - log.info(" Destination has %d existing SMB share(s).", len(existing_names)) - - for share in shares: - name = share.get("name", "") - log.info("%s SMB share %s", _bold("──"), _bold_cyan(repr(name))) - - if name.lower() in existing_names: - log.info(" %s – already exists on destination.", _yellow("SKIP")) - summary.smb_skipped += 1 - continue - - payload = _smb_share_payload(share) - log.debug(" payload: %s", json.dumps(payload)) - - if dry_run: - log.info(" %s would create %s → %s", - _cyan("[DRY RUN]"), _bold_cyan(repr(name)), payload.get("path")) - summary.smb_created += 1 - if payload.get("path"): - summary.paths_to_create.append(payload["path"]) - continue - - try: - r = await client.call("sharing.smb.create", [payload]) - log.info(" %s id=%s", _bold_green("CREATED"), r.get("id")) - summary.smb_created += 1 - except RuntimeError as exc: - log.error(" %s: %s", _bold_red("FAILED"), exc) - summary.smb_failed += 1 - summary.errors.append(f"SMB share {name!r}: {exc}") - - -async def migrate_nfs_shares( - client: TrueNASClient, - shares: list[dict], - dry_run: bool, - summary: Summary, -) -> None: - summary.nfs_found = len(shares) - if not shares: - log.info("No NFS shares found in archive.") - return - - log.info("Querying existing NFS shares on destination …") - try: - existing = await client.call("sharing.nfs.query") or [] - except RuntimeError as exc: - msg = f"Could not query NFS shares: {exc}" - log.error(msg) - summary.errors.append(msg) - return - - existing_paths = {s.get("path", "").rstrip("/") for s in existing} - log.info(" Destination has %d existing NFS share(s).", len(existing_paths)) - - for share in shares: - # CORE archives store paths as a list; SCALE uses a single string. - core_paths = share.get("paths") or [] - path = (share.get("path") or (core_paths[0] if core_paths else "")).rstrip("/") - all_paths = [p.rstrip("/") for p in (core_paths if core_paths else ([path] if path else []))] - log.info("%s NFS export %s", _bold("──"), _bold_cyan(repr(path))) - - if path in existing_paths: - log.info(" %s – path already exported on destination.", _yellow("SKIP")) - summary.nfs_skipped += 1 - continue - - payload = _nfs_share_payload(share) - log.debug(" payload: %s", json.dumps(payload)) - - if dry_run: - log.info(" %s would create NFS export for %s", - _cyan("[DRY RUN]"), _bold_cyan(repr(path))) - summary.nfs_created += 1 - summary.paths_to_create.extend(all_paths) - continue - - try: - r = await client.call("sharing.nfs.create", [payload]) - log.info(" %s id=%s", _bold_green("CREATED"), r.get("id")) - summary.nfs_created += 1 - except RuntimeError as exc: - log.error(" %s: %s", _bold_red("FAILED"), exc) - summary.nfs_failed += 1 - summary.errors.append(f"NFS share {path!r}: {exc}") - - -# ───────────────────────────────────────────────────────────────────────────── -# CLI -# ───────────────────────────────────────────────────────────────────────────── - -async def run( - args: argparse.Namespace, - archive: Optional[dict] = None, -) -> Summary: - if archive is None: - archive = parse_archive(args.debug_tar) - migrate_set = set(args.migrate) - - if args.dry_run: - msg = " DRY RUN – no changes will be made on the destination " - bar = _bold_yellow("─" * len(msg)) - print(f"\n{_bold_yellow('┌')}{bar}{_bold_yellow('┐')}", file=sys.stderr) - print(f"{_bold_yellow('│')}{_bold_yellow(msg)}{_bold_yellow('│')}", file=sys.stderr) - print(f"{_bold_yellow('└')}{bar}{_bold_yellow('┘')}\n", file=sys.stderr) - - summary = Summary() - - async with TrueNASClient( - host=args.dest, - port=args.port, - api_key=args.api_key, - verify_ssl=args.verify_ssl, - ) as client: - - if "smb" in migrate_set: - await migrate_smb_shares( - client, archive["smb_shares"], args.dry_run, summary) - - if "nfs" in migrate_set: - await migrate_nfs_shares( - client, archive["nfs_shares"], args.dry_run, summary) - - # During dry runs, verify that every path we would create a share for - # actually exists as a ZFS dataset on the destination system. - if args.dry_run and summary.paths_to_create: - summary.missing_datasets = await check_dataset_paths( - client, summary.paths_to_create, - ) - - return summary - - -# ───────────────────────────────────────────────────────────────────────────── -# Interactive wizard -# ───────────────────────────────────────────────────────────────────────────── - -def _find_debug_archives(directory: str = ".") -> list[Path]: - """Return sorted list of TrueNAS debug archives found in *directory*.""" - patterns = ("*.tgz", "*.tar.gz", "*.tar", "*.txz", "*.tar.xz") - found: set[Path] = set() - for pat in patterns: - found.update(Path(directory).glob(pat)) - return sorted(found) - - -def _prompt(label: str, default: str = "") -> str: - suffix = f" [{default}]" if default else "" - try: - val = input(f"{label}{suffix}: ").strip() - return val if val else default - except (EOFError, KeyboardInterrupt): - print() - sys.exit(0) - - -def _confirm(label: str) -> bool: - try: - return input(f"{label} [y/N]: ").strip().lower() in ("y", "yes") - except (EOFError, KeyboardInterrupt): - print() - return False - - -def _select_shares(shares: list[dict], share_type: str) -> list[dict]: - """ - Display a numbered list of *shares* and return only those the user selects. - Enter (or 'all') returns all shares unchanged. 'n' / 'none' returns []. - """ - if not shares: - return shares - - print(f"\n {_bold(f'{share_type} shares in archive ({len(shares)}):')} \n") - for i, share in enumerate(shares, 1): - if share_type == "SMB": - name = share.get("name", "") - path = share.get("path", "") - print(f" {_cyan(str(i) + '.')} {name:<22} {_dim(path)}") - else: # NFS - pl = share.get("paths") or [] - path = share.get("path") or (pl[0] if pl else "") - extra = f" {_dim('+ ' + str(len(pl) - 1) + ' more')}" if len(pl) > 1 else "" - print(f" {_cyan(str(i) + '.')} {path}{extra}") - - print() - raw = _prompt( - f" Select {share_type} shares to migrate " - "(e.g. '1 3', Enter = all, 'n' = none)", - default="all", - ) - - low = raw.strip().lower() - if low in ("", "all"): - print(f" {_green('✓')} All {len(shares)} {share_type} share(s) selected.") - return shares - if low in ("n", "none", "0"): - print(f" {_yellow('–')} No {share_type} shares selected.") - return [] - - seen: set[int] = set() - selected: list[dict] = [] - for tok in raw.split(): - if tok.isdigit(): - idx = int(tok) - 1 - if 0 <= idx < len(shares) and idx not in seen: - seen.add(idx) - selected.append(shares[idx]) - - if selected: - print(f" {_green('✓')} {len(selected)} of {len(shares)} {share_type} share(s) selected.") - else: - print(f" {_yellow('–')} No valid selections; skipping {share_type} shares.") - return selected - - -def interactive_mode() -> None: - """Interactive wizard: pick archive → configure → dry run → confirm → apply.""" - print( - f"\n{_bold_cyan(' TrueNAS Share Migration Tool')}\n" - f" {_dim('Migrate SMB/NFS shares from a debug archive to a live system.')}\n" - ) - - # 1 ── Locate debug archive ──────────────────────────────────────────────── - archives = _find_debug_archives() - if not archives: - sys.exit( - "No debug archives (.tgz / .tar.gz / .tar / .txz) found in the " - "current directory.\n" - "Copy your TrueNAS debug file here, or use --debug-tar to specify a path." - ) - - if len(archives) == 1: - chosen = archives[0] - print(f" {_dim('Archive:')} {_bold(chosen.name)} {_dim('(' + f'{chosen.stat().st_size / 1_048_576:.1f} MB' + ')')}\n") - else: - print(f" {_bold('Debug archives found:')}\n") - for i, p in enumerate(archives, 1): - print(f" {_cyan(str(i) + '.')} {p.name} {_dim('(' + f'{p.stat().st_size / 1_048_576:.1f} MB' + ')')}") - print() - while True: - raw = _prompt(f"Select archive [1-{len(archives)}]") - if raw.isdigit() and 1 <= int(raw) <= len(archives): - chosen = archives[int(raw) - 1] - break - print(f" Enter a number from 1 to {len(archives)}.") - - # 2 ── Destination ───────────────────────────────────────────────────────── - print() - host = "" - while not host: - host = _prompt("Destination TrueNAS host or IP") - if not host: - print(" Host is required.") - - port_raw = _prompt("WebSocket port", default="443") - port = int(port_raw) if port_raw.isdigit() else 443 - - # 3 ── API key ───────────────────────────────────────────────────────────── - api_key = "" - while not api_key: - try: - api_key = getpass.getpass("API key (input hidden): ").strip() - except (EOFError, KeyboardInterrupt): - print() - sys.exit(0) - if not api_key: - print(" API key is required.") - - # 4 ── Migration scope ───────────────────────────────────────────────────── - print(f"\n {_bold('What to migrate?')}") - print(f" {_cyan('1.')} SMB shares") - print(f" {_cyan('2.')} NFS shares") - sel_raw = _prompt( - "Selection (space-separated numbers, Enter for all)", default="1 2" - ) - _sel_map = {"1": "smb", "2": "nfs"} - migrate: list[str] = [] - for tok in sel_raw.split(): - if tok in _sel_map and _sel_map[tok] not in migrate: - migrate.append(_sel_map[tok]) - if not migrate: - migrate = ["smb", "nfs"] - - # 5 ── Parse archive once (reused for dry + live runs) ──────────────────── - print() - archive_data = parse_archive(str(chosen)) - - # 5b ── Select individual shares ─────────────────────────────────────────── - if "smb" in migrate and archive_data["smb_shares"]: - archive_data["smb_shares"] = _select_shares(archive_data["smb_shares"], "SMB") - if "nfs" in migrate and archive_data["nfs_shares"]: - archive_data["nfs_shares"] = _select_shares(archive_data["nfs_shares"], "NFS") - print() - - base_ns = dict( - debug_tar=str(chosen), - dest=host, - port=port, - api_key=api_key, - verify_ssl=False, - migrate=migrate, - ) - - # 6 ── Dry run ───────────────────────────────────────────────────────────── - dry_summary = asyncio.run( - run(argparse.Namespace(**base_ns, dry_run=True), archive_data) - ) - print(dry_summary.report()) - - # Offer to create missing datasets before the live run - if dry_summary.missing_datasets: - non_mnt = [p for p in dry_summary.missing_datasets if not p.startswith("/mnt/")] - creatable = [p for p in dry_summary.missing_datasets if p.startswith("/mnt/")] - - if non_mnt: - print(f" NOTE: {len(non_mnt)} path(s) cannot be auto-created " - "(not under /mnt/):") - for p in non_mnt: - print(f" • {p}") - print() - - if creatable: - print(f" {len(creatable)} dataset(s) can be created automatically:") - for p in creatable: - print(f" • {p}") - print() - if _confirm(f"Create these {len(creatable)} dataset(s) on {host} now?"): - asyncio.run(_create_missing_datasets( - host=host, - port=port, - api_key=api_key, - paths=creatable, - )) - print() - - if not _confirm(f"Apply these changes to {host}?"): - print("Aborted – no changes made.") - sys.exit(0) - - # 7 ── Live run ──────────────────────────────────────────────────────────── - print() - live_summary = asyncio.run( - run(argparse.Namespace(**base_ns, dry_run=False), archive_data) - ) - print(live_summary.report()) - if live_summary.errors: - sys.exit(2) - - -def main() -> None: - if len(sys.argv) == 1: - interactive_mode() - return - - p = argparse.ArgumentParser( - prog="truenas_migrate.py", - description=( - "Migrate SMB shares, NFS shares, and SMB global config " - "from a TrueNAS debug archive to a live destination system." - ), - formatter_class=argparse.RawDescriptionHelpFormatter, - epilog=__doc__, - ) - - # ── Source ──────────────────────────────────────────────────────────────── - p.add_argument( - "--debug-tar", required=True, metavar="FILE", - help="Path to the TrueNAS debug .tar / .tgz from the SOURCE system.", - ) - p.add_argument( - "--list-archive", action="store_true", - help=( - "List all JSON files found in the archive and exit. " - "Run this first to verify the archive contains share data." - ), - ) - - # ── Destination ─────────────────────────────────────────────────────────── - p.add_argument( - "--dest", metavar="HOST", - help="Hostname or IP of the DESTINATION TrueNAS system.", - ) - p.add_argument( - "--port", type=int, default=443, metavar="PORT", - help="WebSocket port on the destination (default: 443).", - ) - p.add_argument( - "--verify-ssl", action="store_true", - help=( - "Verify the destination TLS certificate. " - "Off by default because most TrueNAS systems use self-signed certs." - ), - ) - - # ── Authentication ──────────────────────────────────────────────────────── - p.add_argument( - "--api-key", metavar="KEY", - help=( - "TrueNAS API key. Generate one in TrueNAS UI: " - "top-right account menu → API Keys." - ), - ) - - # ── Scope ───────────────────────────────────────────────────────────────── - p.add_argument( - "--migrate", - nargs="+", - choices=["smb", "nfs"], - default=["smb", "nfs"], - metavar="TYPE", - help=( - "What to migrate. Choices: smb nfs " - "(default: both). Example: --migrate smb" - ), - ) - p.add_argument( - "--dry-run", action="store_true", - help="Parse archive and connect to destination, but make no changes.", - ) - p.add_argument( - "--verbose", "-v", action="store_true", - help="Enable DEBUG-level logging.", - ) - - args = p.parse_args() - - if args.verbose: - log.setLevel(logging.DEBUG) - - if not Path(args.debug_tar).is_file(): - p.error(f"Archive not found: {args.debug_tar}") - - if args.list_archive: - list_archive_and_exit(args.debug_tar) # does not return - - if not args.dest: - p.error("--dest is required (or use --list-archive to inspect the archive).") - if not args.api_key: - p.error("--api-key is required.") - - summary = asyncio.run(run(args)) - print(summary.report()) - if summary.errors: - sys.exit(2) - +"""Compatibility shim – delegates to the truenas_migrate package.""" +from truenas_migrate.cli import main if __name__ == "__main__": main() diff --git a/truenas_migrate/__init__.py b/truenas_migrate/__init__.py new file mode 100644 index 0000000..82855db --- /dev/null +++ b/truenas_migrate/__init__.py @@ -0,0 +1 @@ +# truenas_migrate package diff --git a/truenas_migrate/__main__.py b/truenas_migrate/__main__.py new file mode 100644 index 0000000..4e28416 --- /dev/null +++ b/truenas_migrate/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/truenas_migrate/__pycache__/__init__.cpython-314.pyc b/truenas_migrate/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8dd5bff32d8abbc58ed345cfdcbd1de9d297a72c GIT binary patch literal 174 zcmdPqlZt^Ok`jmFl=Q@+qSQRSqQsKS{5<^KMa)3Kl?K7;Hmy|dZr=%wq z6{Y6sCFker2NdOJr6!jY>xUGTrut^47bTWt=I7~`0C{WHa^HWN5QtgU3fM$d2F6IRiAD9^#8SgTPK4f5NV=H0OT* literal 0 HcmV?d00001 diff --git a/truenas_migrate/__pycache__/archive.cpython-314.pyc b/truenas_migrate/__pycache__/archive.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3090055068df4d2a33e9ec6350bd7cd8a6a26b6c GIT binary patch literal 18650 zcmd6Pd30OXncsWZSAq*ja1;5sijYK5qE<_kWSWaeQKD#&h9(Ye6e2+iHVMG*fszF~ zGaPq1p=4)7JIXPMLfL7J3F!<=JxlC~#*w53LJWh*(; z{=WMbfD~lAr#U_IB;I%5eRuut^8LQ$VSAp_#^KuYKjtISevbPUJ*byaj_|h*@*Ef9 zrn#M5f~)8JT!Md`^YgvB9(_Vz%^|MqH6#q|ZcG^2-IOq~yE$RTUEgczu_mlNwuBA& zdB368-s4C(syV;W$N6hc@qQETwWs(NUBc-%KbGgWG@^08(Qoye{5GVz{C1=|;PL&v$yT1ZCDk;YvB{5Ov--5I%R;t`zjkv(#6@FbE=WRvACY{`Ns( zG<;@iTnLFHXCvprLOdi&VNqxr>}WsHB}AScjfBRdF)1vBqN75`DSwwR7L`j(gf7IU z5|Y>F<i6o>`u6CBwB=Ns5<(Nmpr441r)|>YnV@twB!;E5EjlJC z@uYh+GLrbbln{3a6A_8U9(ycp4|cToclLI+4|NTu3#eEmIv%7do|+1a7cxmy6H7|w zWhz2#kEQd2eO;dmp6(s$>Fp18_nzoVTPfxADSzi+vM@8a7G+E$SX3PS(PhA&9*?&K zM5eg__qguBpK#nL(ki)!`Ln#A_v`$6zoCT)YW$53+0wei*j-m5B#uQU!oH!9*o}MI zB!&}HV)Sl#rh8dgdZS~p$F6W7l4&d^PKFW^^`JI=yya|cGTb7K#1e@IrP1+_D2AiH zk=SI*fEarsJd%)FXbMk8#zjW>mIR)nAt^Y?Qo=3Dl=|Wq(vDzInW1nnD3+l94*W?K z2&TC^oQ^+ylY-gcI^7n$9qjQIUUQ5l6rsz6KaAga>kknjnhdDaqPNPWF2Q{65q?V7 z!cFPAt2pkdf&rq?rfWdi(Amj;eveXXfIp+_6FAPwRiTzRUx%=o#Uh+nm)z}VbQPY7 z65X(L!lS`>C~@|n0Ll|tzaW63Liik(i!d3UJQEgucXhrNuRd)X4=3alDQ$>`CNW79 zSTd2gS0}pAnKVBsQFj{!v6vnmK_z!^G87jv9GQT$6{oomcKCj9^2*8YpIWlFT{)F{ z>T>tR?#m}Ho}8;+a#W`Z-geYpIkjkSTQIbJ{Ikk?>tTu(R{H=Oc<^5M1 z4j@yD9xKi)<{>NVt(cEk+7Q8zli90}Xdo8g-Cd`;7e9}k`IuOQG%qi@ag$dY8zoy9 zEa;&ZH3(>R*m(C%3TBV4(`~+m-L6ftMA3s>BiS>nt&wib;xt!rpH&jjW;w14=*HNt z@?(Yrx&g#DU+S95xn9^V+^_Uuz`VIESc#Gp*9Y`lmG~$+Udatx)Z7D%>8MZgy@m_u z)wNg$_EN_Q+^|Ea6yPi&np^*eThmQ}Q>ThNzwGfNqXjaZ{Wr+)dye}l7pWeoz zD04vwi(*Xl_1`s4CB~W$q|IVDJ`oxTU%|1%UP;N4j@Z=1sDOi9<^n>4bP#`DTiPC$ z8>dq(Z4kqu(X?qaJQ5oXr;Tjaqzz9){CL@X?^(V5s9)kHVT5W<7`1{CrI^*t$4+jqKu3TGl<1wI?9>p14$(FPFgZ1mh{TaV6qUjW$>%ksZ4oIFl@g)o2v#}qYSE6i(t7X;uU=%+KqpCBkE$h_ z5G+@jJ-C9(4hf=VycG}OEu%RAENJ#?d-3eZH}99h({4H99p9D#WN%+lvON#+ivTkM+ zW`&oDbR>y1ByleV#3?@8atb1+L!g1%ZvSYTmY0wrh9QXr{8^?yLnsIF^y zSM)XG=r_fU)!a!PK^yc2W3S};M2;J#CC9GQMs-ic&BIPDRn$@~0gF=Z&rt5uSdig# zLoODSVL$;3fvY+Owz6t-t1V~l#LxmdwI9{oFab(-ZJe>Vb+}A>qp^EX^$JlG6eNzY z=~8K*le(t+&~qEcU#V7fh=15{oExnsidJ(0+iu=(hU8-jSo1X?&82U+TTkYL`R6QqpFT`%m^*&Zmc4dWT7BwFUj7U-X$$BEZMo--$#(YS@gV@-5lCzi z9B(AkCC*Aa=!A?4IGoPM#8KgFcuIt<6&aCyDqfJe{-}If0yh(S@QE~o{b{~sija*U zG&JvOl?34*{`4ipr75grN(4~Qn#185kpRVa2olnw5ZW|=@(7*NS> zI@XBCxAC=@wrOsN>jpx5s$fv}Niqg8h^M(}Y^6SQluaF$@Lqj{zry!Jqyi#29~J@l zb-tZx-IM3j21X1zfEtBq`S7;3jfbP*r{m(0^p@aQBsv--rr>Lvh>e6Mq$57<^%qb| zA};jNZ_vSoyuDX?QfFp|UU~GzN54G$j;rQ-Js&f0|1Ya+`JwKqEy%f~oM#a!AX#l%E-RP3M! zJ)oY~E@Q02l!f+F+Nw!`w=WcLpRumGN>^Ofi>~UFVae4nV_0>STz=-_ zGjn^ETmqi#u1jqn2wPuszUut0>qW<`VRme_$TPR+<+jwG*ABmW_-fJB#E;5X>e`m; z+Ey!SW;@??m)$gTH9Nsk9Qn)kinYSZIsZ~&<4ng|aeZoVsd&#!_o}OS*;TWaU-XLQ zCCftD_W95Kt@UrL%lQwjdbYgeyt#!d*>Ss;Gg`jb`&{or-r;K(mJPjYuF|=(8?L(P zp0)gv+4FzyqOKNQPF_qt|LlsZe$iE*I=$j*e%sajuEYJlJ@5J5%QXE3$L3`Y};<0q|5>GGN6!Y|UA0RO}Czr!{0c{eFY zN$YgG*1>Mqds*VWFj4_8##;vLzG+g?@x2_$emU8>F2HGrrhZt{5@h7De6EY{2FxP% zEOvqAo6)UJm&!~6#!q@nJ5OQE+c_>^c%08>;lmo6&$N92z)305$#JLmY%Gy|@W)LU zpT^QN-6OqGfZov--gF^VW9FGxJF>>-H|{scZw3gnZMxJ^$DzOk!y%Bzt^v0i%iL=; zmKe7@&c&?(Yu0!*c1z$QEu$DXU5K=FlM!!lp^~kYp0E zcr#>}@Py2T&xIzYAgzpvu}L8$ke+H(kf8d5T2+XRA&F^ZX_NaBPbYjfoA#_`lZ1&_ z>`CZVp9~+g3BvYfKN*UpfuDw1l2FnJ|TbGNGzJb333E^WOXnY zXGA)S{zalfKeZkM4`ceb4fWb=s&XR4l)W?zrpZunhdSaI1U0IP#5$!d;S4zPlRGFC z2nM=5TYu6qy#4ddk3G}6bH^8kwa1?U&)S0xc_gFtv-5u?NU1?M2siF20Cq!UOPQ>}ZrR^k6b~5#c z$ar_!1|>#NQHYpA@i^Fb+Lg)GMXDFCQyiiyNGeS0q1;FtG46zv<|713#6;SxjFWUT z#EYf0S*}w|>*Fz~RmAXQ2vm-ar}ZEcv?ic1t$mF@W6~6X)T0TcBQY@?WNd(`Qew#e zMf^!aSbEdkt#Zy-{DH%Dx%Xo4Y%FzQ$+2s?Yptkq?va(Ex{Kx+e$`&KVy|AbS1;LX zQbVsj`s$;9G5n5w*VWLqnjeLKGIG7)$4|VouV=NSBE^5R>njs8$5uawxB~af(%k;kp1H$I9`E-%u3Ep>f5UU|M*hKB{kqOrP`+B- zxKh1qv3l24?^1Q!i#@X)tL}o%dD~460h8x(XYxGmpoOzy-Apma z^R}B-#O{^nH8x)h7B!tE&F518Lz2|wLRj8(asBCC#EwUQPKPGU3mW4`qH>)s%4R0& zaSf^8@JUQQr#uNJ3|6XWuHwdw>>YsqM1voujPxDQSR#x;KS4hSsh`8pR>@bu1SpWD zry3@qiCt=K8f)CgMz|?|K<76%BLA>bisSA>HA_IB-K&kQsx-;Sh$r_2Oj^sb;ycp< zt)NX8=EbVk0xg9nt3SDO+W?LfI$|E}ar5b?*gXFGPnbtfHl@_oe(&?>Q2Q1C|2vP& z@|#6Bg*w@U29@&yS-PFgt4F0t);#*MYRo+oF16HWnz#IbS)~tjC0KpgbE2-8aqvFu zMxEc8^DV#@_JB69Tv{fKv>?+je<4g9%AdC=xql!QpPC>=#WtDoZIc0yYM|a=btX>6 zRJoEbd2mAx)e15iP5>c=M-MSvLFxz9Vx3*K`d;gA%izg}T2w40)?q+mJp~O2lFm-( z1kONJEOep=B1vRZga!m5W7H=0t0+c?Q;+m_ z61LgUP_h)b?BqiN>pEFx#t0aZStgll1pVEE+6dClj#zXoGCn1nhedOy%Ng7IXX|o~ z{2RKwp&{xr`jM;4@&1@h4yJaO$3jD9Gpw4c1!3rHL=x~HIvF;TvKpUehw6oU!nw!@ zP%#uc(BDU-(3uIqP9S2!ic)M!9Kr2^1Wk&s-)qZKoX~m}n<(&7Ku}1ebt{sRK5ZZ| zIBh;35(#2?Em=wwVy|g4O*HBDX&PZ4z%oc1fWrXOh(35D5${vAC9L=Wyq4)>tLD6AbD3)3I(DOS$C9%dHZE7e z7hKg#u7fj%HCJOQF&|uV9Y)-qHjJNsw4XWJGX!Wv*-&!wJ);)kvu`%i-l4?nUHIDO;cKlqQsHyn?? z?<}C#eV6(cN_&?b$3OZt%wqOS_64`^s(IOR;GauBf30=$hU56=KH3XrcL2OD+pFhI z^Utm{wJkTbk$Df}Sl98+&+-3DueaK6>N)RW0Ni`kE_7r=SHa(^MC5+vmKNA+VI3lg zJ#B$o2lGIX?I}e_`y?y-8s1BkiL@zYeCbUJ<|KB@^ak(QMS2JvO2%8?MZaVnY1W~e zlf488UMX)5@j-ZFOzDFeiJ#AHC{ZPO_;2W@bUyOm=!axYni@LxHeI=Zm0Cki&4*59 z1R8q*jbZ#iA+(Ss%c~8kwQb6Og7Ww4`mi}k8jtJIL(RHB4&Vir#X~pDS3}DHeGC3& z7^xo~6dBm&H+W#_&zJ`S`i&Y3m4>kNYk=-PY|aQ6nuAJe4PdhTK(RWq%styU^{KOJ zgy)e-g{?W})f6zvvSj!&V-nD5s9e6MT~Rft zM%<>>y}q4$wtI#1Vkj3#;xtg7S*K6NrfIDzdl$7BY(qPCz!>y0lrL2Ydi!GerkVUeSr2xQS#B$#$v| zk0Bvl7mQ(b1ZBP*!~vSa@bW*YB+fpL*j&U$GB^;9JBE;BgsI1R!Sk!0T(*pU*O5C6={r0tk7)uSwJ zlGl)oD~zm5k{Kn)7Y3IKBsP&r{1URfhd=3`5D;N*yE$9I<;qKyvu9Jr6>H(`^f9bzHVwvMdxlbiMl*eQ)+HTLxC0g)7dgMQ7FAxg}@g zjBd^8zC3(sI8{5pcg4BuhI7}|M_3c+Kwj&eYPpe*|4~QnS0V|S?y_Wza>3g8JLz$d z;5nhIfd6(;=V9ZIc6Byb-rQb<@TYncg;p1me_Cooj-S@H@4&-bdB)Cd*0+i*6tAEc zZ#5VxzRliw(D~M0mVVgYm1q5##frBZ^fv}H2d2f}5)io`Fu+l(xQ5F97qken{4AcL z$4^5Dze6TwaU_JWlP|nU!5j=NxTX5p)4f1}P$+t5y!8&k&!U0Jm|92&15akXk86Ru zWd)aIc;QsGorB@%hH1UB3w{AO@#%`|KaG+34Oyyv&CxEyDu-=yO&q7#^ec%ek%#t1 zc70!KzUgEEJD|)bosd8`mD2hN&5;T%Mgij>%W5FxbRGPYKt;MBPk+ttfcL`77@IKz z9+1bV=7G7#G=>9WgFLK32Du&zNxyO1ppKi;ZSrtQYvCh^xsB^X>Qw(h+DG8BdG5)cg(bH$fC{$MqjJG8>m-fnR^YsL{P`bTtY2z%TddT zuldX~yWlaZN|y{twd2qW7?R0&%(!5kExsm*9FY7a7l%nqS8 zBkn6{U$7;M2*VM}R4O56$+RF$uaK-4gm%RXRYP;oeI)Qvz=S}S=15rbZGb4G+1*WN z=(C7u5T%tMN`TMM^Z-`)AzoemKitB218S2CsiT(${UrKsX4f~B4fTL4dvq6__3 zbnx;fO+@fyyT7l?kIxmr_!G1N@RW~CJiQ9p7)>2 zgxPlR)Plm7hZpl}R`Rs9tFeec496gdIMVz7ly92mLHs^`|iXJROA(5HF(&kfxa>U47 zk70DX05|;b(~$(66&ar-gql&nq|FhDsZhj&RFQ%FbJF}daTlH>l8Y6nq=F@j~m{;FdCY{yzfL+Z@c-Ajmfu6in7dUma};#>Bw*%#^$Uz3(gJKil7 ze(kAA)qmfx;%R={)4bZaZBF{SeXR`Eu{c9=i;l`~j4#+5H#$!HuIiU9dq~j&*f(2n*?!Tk zI!{B7z;)(BEH6fe?K zSWyEGr3jNv!UP4Oo>cRe_qbiY-`JZ z&&r*jgEAQ!;QK!a;Tp6!UZB{O^UouI!+RrbWq0kg&nQ64xmGjeyPhbLe27so9D zGbs8ka8$VUy>1zkhqg>^t5tR+uvv@yt0Ym@gm|su!nj5kMg=OeqMH`jbD~PQJ z>!+`y94qEk1uK-I*QFUunQ1L+1!?GTK*&%`jRDzdV}=A%`l*<-Rl}sEY8?X_I#pAE z{y{VOVwkj^XcTv3kZSI}_$!7S4z5*`)fm<+jXRDyPDK6eo`p5HB>$^z03TmMV<+IN zlPRd}L|_aumk=O?0}nw=h1sgkI;y`hF?FA<8}7vN7o|!zI1E#HNg%Bl^9UB=6I0{R z(UBfSl1pTpg-}2#L5bdbv-Jo6<| zZtSm}n(P$m!y``M9Nr}vf0Run9;6bA1ktUW3i$AsDL(Z~f?$f+L_dr{I0HHH%M_4y zF!!{H-^HtTni0aZifB+@_-^&Kxtdi^=_}8^`0QVvxVrmB{k|2?{@Z%4xMa2M@ZTQ) zo8#Bdt+e$+Gge&kQv27R`B)2;MZ<5JosajHH3qeo_yg^Q7LlYYfkQ z4|$VS0y8mV(_$JDvDk!gK7vh1ic2UW4^Dx|92LwGD`l&;vgtOM$x(Oh27`4iRP4+@ z(W_OY$rkE&18PNhp;@b`q77!`Ohw6R8(THm2LzUldg9>I0|h&wPkfYuVfOloSR~5iP4R1#_;m`tNx{EH04Gc) zUb+xZ+m+AY*eBh9NnF}SMtywT01hNVd&P-Api~DrPHNhE`^Xf1h@S|;48zDNhAERC zp1ZOeAg{?-yHWBiZ%m0x<=bZZ-p>*<>+cma z<)n>bX7NJl&a0(YZP#iSN)9jQA6d&UUMRsQ2al|6se{1EjvH77>I+eo4A6K zm(8s9eard#S>*BM{Gc4Ukstgu8ZJF>t@N7hI=TYgM)z8E!@KSiDIIn|M!iWlf9bXW8i>VMf%*6Y;&ozsZ8*O08w7(rX)qf=^uwvYaT^h*H@btRi~-?P{AKQ`$+qsdVlHQcZrZ-!thvQGdEGib^eR{= zY*;nqBZPXdUbn#I&-!rQ;bk6PzFpXhmv0yL;bom}+PYw`mR~x!{Q7Cfhk5uxln-tA z-HQ*)@vZ!aZhXZ2;Q){Cojwfk%i|4U- zblql;C2m2O7xmp%Pkub#V~g9^y`kIgal{>-f_Q+!fGWe1}+wu#<&r#Ug}@S$L;df^aDd*NK`M&Q&?F zsOLCyT$%`keX+su;Hcz_2L$2QQ`ZII9AbMr`i1^6pA-;IvuD9bSU4Angv>|&SZ5Xk zK7UN;KYdJyQHC+W7xoLiC;FA!5h*e*__C$?149#|f=?PA3tkEc_3pS&5}E~fd^F)Q zN2Q2=VmROzh9-shSU?z>2!`TK!LYEs--pDV^y+qDB;borNP$L55*DOD)3AgX6pZ_E z0TuC?{ef6K81}_kwKCO>O~&GQektf|R(h$Q#}p+G#~cW2(~2wnC~#sq01EL;x8$AqRPzckrIw*mb(A_gp_R?3;{_q0(*cgAGD`Ebb2lhtLS2N!Hon zh!D6E4@hBONQnA|FZxE&PT@enAMm@(X5@4Hl+b^!L!=tj2rX`*I~&t@@9Qtp8 zSedjv?#y%$kU2;-sOw`FgHamRu73;Us@hhW+hXHGX7lkgy(fCQkDn9z&fxL_`k75~ z#H1KTz7Qq~|0G5Pc#S3snq$iv=)K#RB12FplnZvDauC-1u7HN;D$HF&Z)O>jPJOU}=`A%k1LQ7GF3V ziL;3-){A>xAClo}IuoVI%olQL()m%J6blSy#!9-FMvihfh{<;l^^3c->HN$zc03eB zjnYogJ$!MHhH5MjAEfeQ=~8*_#0n9MVIotW#$2YfHWV36>j#G-A%EJ;E`!69zHr(q zhbIG}P~%T zh}oV#(L5Fz4>ZSyBk}mNG5@Gfk}w?)N5-4`q{x>sGsT*z!L#dW^M;wPc{mhwM<>yK z=p@$3fx$s(3-a#3FSZlj6!(~`<(&`7TQyfb(Bk@We&qu_t`7{HA^)Kfp-+*L8>U(+ z&V<09Q!5SWKq*&EEya(NP#X;>Es3`4Q@@jWcTeVdlplb^AUmX_cHiK#DK&UbC2nwa znS1|#rgB9-svWQ>Nkq*+z7qCom7E&0C32h+7u5~em6W172c43_%6WBGPL;Pl_oP)f z;85P~D^xwh}<*L%QYX8J?O}TP6)GjagMAZIK^($8D zy(bfh>IX`dIP_te63$%_Tamj{LWr$W!nrGAx90AYkZ9fy9wc{|AJk50CbWYZrTmk; z$bA_j_XxmW~ujNICW#)y!tTBP|S&_ zx5({5o02=J+1LtReGO;idj1x_H}U%te%tV~30#Am8to_dhLJOK3PGbNxrz%ysoH|M zg#OMUpTpWMT#($oLH!{~bmoBXvi^Sq-9WA4n&7;9VNMLC*B<|qIr>91$-=q??fk@t1Gj2ZP0uD9JCe3z z|IPW|D}GUN&vXt6Z&b|%QUyDbj@tJ_i*2c$`;)Z?eo}QikUH3tJa9T`>s>zo{L1;k zrSpULOkY};etvlV{D?#ICb(9d(~=w!ORO+Ur;WNqMO#Y ztgFt_n@8U|`Z!PTFg)hq{hquBX3p&R*y5NywQQ+gE8MZ_D4Dn3D{ESa|KPO`Ub}VS zQ!Vd2!9V8s{1f~G**9?3f~W2SwkeY$<_EHUXV35+v*~9?8xj7;2Hs;e{bOS>!avsl%FzG3sEqtZ-eWWU zyeW^u`z(0(4$pfWx;q*ph0Q$T?^u~{W4?o@_(FrHG4DfoxDzH^e3Ba>ittJ7 zawQvoC4v5E03m6^L=-ARK%&~Fb)%uk5N1C{flDW~Q=|!V8$k@Qw4N23)*>QColldJ zJS7zkjs&5Z$kS~`0FOysNW;482m({w$M$1Wp0(17>l0I5zqFT5YgbMAGuErt8|UU+ z*9TXMT}#ETg_=~c`@X69p_X%$ePSt?t$p)IQdgGiQKi?OR&&rmchE4+A;W3HA_)HF&CR&5G8WF=v*IwO(yDca#Zn z1&Y%-zJ?RH0qQp92C^zZo5sNTv7S{y5Xdp7k{jX}W=k*iS><=t{x%@93LBF^A?j+l zU`Ch#J8FcN()`P5E}=Oj^ty~`eJ~uIh^O@!`oU=0)OF@Wmn21`bit{>RSp$24A@bdq8`6k~`@jEn@Yq_vlPA(<7yi!v)32LUsV*0 zbAQjbb3bY6Xwm+3HxK_$o;c;o6W20kP3wjL=YAo}P9c4Evefg%SYi0$SxlC=%yqK6 z+|WW==7>iauYMPSDQ+V#id<`U;`%&3>?5Av;1sSBnOeX&GRubpBr`F`I1*|inyfoR|`|=^-1Rx0>E6Zdc<}ye<$W1y8FRcfb0k-O;h%@97b77Ss zj$*6KPMLX3gBb`^dJ#{i=qbGq=}M#17q8ii=Jd0d=f;x7bt!xOiru|rcc<*TSM2+i z?E6yo1GjpX?4G2_vz}Fo8LOo2MZ_Y-e~6)G+}9eLxgS(^*t9>*=i#qokEfm52o#a0 zHiNvj8e{+_vz*GPhnd9<#wVizHs?LvR5wjESEjn%G}XbIJM@4qiq>KfMbEAhAH1j7H_jh^tXeZX!Q`SlO{DP_x+@%O*b+LLlmLQz0fOV>SnD^C)_i zgX`i??BKY|n1-bqFSdUUJP%Fu6?sDJ%dW!8yBsGPM5FR{ROi)hn#LsNWz*<$GK6T( z;Rz}%Bs)+<3&e`7QzlH*%j(sM`T4l!@CxX?nN`lpwHlN#B#19_n94V$%BG|`oLyMt z)~@E<(8HhbRu+{|y;UR+JFh|MH9w|*EFsunQ|r}Ort*L{WQVvnOXn;6fg5P&ee{;P zR-!%EE}OsCm|bXcc#*o*M2GrLsWI9}!>BR-tMrB*i>(oPX8WKTd~^zp!DP2gzvN^_ za9@GatzrS=;KEj|=oE_-Br0n1n)+x}*x;1utS^h>)qIuptv6>+$yQn=?HMHii z92u1Z^)k{bLPZyRsIz$pGtN6xM@U_ zyv|&fb~8B7g1Qp~y1^-FAZ@=2Y1`_x_Em34f?Fk(!ih3RpF&AFB*|-4XnTcM^G`JY z74BUAgyyj305oMk+8?#cacj0^4nT*`auBHdf9FMgBF`Ii6Z44=tdb$M5s?_T!$cwn$ueB^rUsPsguZSp9oXH{|vAu zsGbr!o=fNmXG!z!KWLDbw!O6#zz}VvBs2{|LNAO*B)o|YBSyWDIG~h@tam|mip zQPwi`V3#29w_wa497RCOpf;c1pEj^QFdYz_Sg?i8K3B5(Xm2gq<0P z2|yIgoV9S(UOb)ui7s!t^G5O9uDPmY?ZKba+#30D!>uj1hmxh;seMXl?@U4S$-uahO&X(!U+`X7`)=zh?0eK6MK61NI}(s{e+$IsnrPZoOaTf09lD7jhwR{7li zR6)&3fh$?ynwO@JWiw4VcdR&%B%Me8Zu>&vLdV|}-`b8MPITYd{$ECueP{pS;_T47 zMRWf5`#*Fph~F<=Z26CssoK5ww(h-Uytnmes{GkxS;tb*vE+pprn^3evV}{FfyE2S z!XqozXC6AaV%MV*PH+DD$u)Dqs@XaF+-y70HDNcryxku=iXRzu7Q@u>2UgBvn{i!r zz1gs0u3R!#rp#NWj;|Wc(-)VG<&cd5IqsXQy?$i2dA|7l*uuW|UY#FbY*~D9u_yU# zf6{ht*>s+D30_Isgk_^3zq5aC@AYSrh1=(M%qQkU$=2gZTj#Q=Yt>#bo%cy`+4b{N zovW2wraY_m!kNjdliz$5fCC6QeD(0mk!wc?86Cb}KX>*o8mEu1+8wjU@7rtNt(hBn zr(wljd)HpOQ1dsrIt-pGQF6UfUy>7b8D2Smgaf&8k3Xe zmGwWW)**nOVdJo3E!=gNXbxg3;`Xl|rQIO9YM=?bkHwInR&tM61Y0k4+! zy7<%0D*3{YtAUu+9zZQ1C?yBTwyOO^wPtk!xQeV-t;;bhN93fz9L6cypFpJv+Q}v@ z_?ec|{{L&abz0YzC(HhTMz&;oreHHpLoJ{WC@l)DQuj2HuKy@;<}18P&drXOtW1m5 z6(fO0A#*wIW)d%x=TBpg?{tM%EiY&DSuZcE<Hr7e?5pt8C?rA-GvX;yy&&4G%@V@ zK+kGYa>`xTzoSr&)@$3SXN^J!1TIl4*10uNORnw;RcJqg2ANaMV!V>4a*gJTwV(~@ z{5vT}Z@yTsrd#10zr0?drn1|ZPIM_~5Mf=EZ zv$}fqUZd9}HhL{$(|&`TE@~T4nu_ZK#f+WRmLH#&tJ-cO=VzW|KwwECXRdIw&c(uKnQ@}C^Es8p@kNB@xN)G>6S4sg&abzh$ zOJj!3%>lbrv$sQ`8*@O5dQ5vA7>!s@VAGX*v(>9zKeLjkH9L&{SploVRi&{ao3W|qoIQj|P08M_(4b&^4bz7cUj#nB9{q;p9 zWi<)fjOx<%{7%`7nt;SLp^0jP@RgA3$i!Kc%yu?m4ot@3mxBrHbsiF;!QqQpl(UKbeLJV*nHz>0AYX!gW^spp1?$zzz%)5-jJI{8T+thkV}TkTizd zG+!U&I0u)|x?4sPS^a?h+~hC{S+G$By8D3S2_Cu&;y_5?&n95~0-(;8JCVZ7+_Zc4v`M2V z@OM83J0{BLmd!dTGn4}@KZghYwJjjmq-zmNQ_J9o5#cPW&n~x@0oL5s9!7V zR)Rf;nH9AFGeh4*Jd!yCnvi1N}78iSd46~joh zf0{o!LFi1!kZi_7M_X`8M@c~@lkQ#_jM*VWtk6U_iD9U;zA%disgFVgyGjgz^&teX z69)%i9vB~t1#uWAL{D=dY(L#b$cs{9BOuzSFBX#_*xG*QC-5+gqzqXSNCvs|Jb)wBV->=7yEa#9ZL z3>d5_hY9%)OEF5oN2SZSvNnZj5waTuoSSQ1R4@>tsI-Zo+QD&OROT0K6me;5CR28h zA#EJOQHMboCRxawQ?Z*g0>T~yTcz_@zx#$lfwYyTGbIJH(IAG|;ADi@0caY)Gn=Oe z$0K00d6dl1v9F`erZ^zOt7hx97n5ZNQuzmyR_u!Nhx6C+3zCIrQ~6@j+Mnz{|3J&< zUEm*SwR*!NE9cmeve!;|KC(D71|ttNntXb!%QNG#$ufQ5&tIS80f(*@z&_OD)c$=?Vm4r$2&j#?SWM3u4GBWLd(M0A9z3T zE_fD?treHg)y%hiZ{K(Jy?0=xrfsRFZ83PS=ICv0a(n0P<4M7DuflVu?OsK1s`yN@ zs1IZ~MIs}=pNE}SaRZ~}nsL=uxK>!6ta5)CTRian*B2tU&L%64rV5`WfVObv)vK@0 zy#9^X=L_B+UZ{C*?Ebc+KdMrJrTbyJF|GT^?z|eOmkR6dTkAie z2X%|Z%l7?%kgImAR5dPDHLg^-?^e05wM}2Sc4YDRR433}d%@Jnk8H&-5XmcBE7@{0 z@^&P-^Vw8M$4be`rIM4WlJ4n~k4>DT^y+1n`L6p`s<7Tn+Y8B3SE^uF($SDKH!ka% z*0AYt?ApZaxi?>(9-lj$G}kTb>Nk(H6|b7wZwDS}HFm?)NzhSHG<$yT_fWB- zWZ(Hzt#{fwJFsNhzG}2ST|(6D>nA^Xh8H*lDO*GFPR{TbFg!M7VRs zScCs1W6i42up+eI71~opFD?n~b1gsb`Dst`?DH#~FWfvkdlpgC#$?fps$^a{nf0gE zI~P`-J+Gw7OBQ*5Ybm4w;=%AJeP>{<7gIq)9Xw~;Z!Jai?y(!Oxr5igk}TPo+<7Kh*tcvw`^)03o4r=eUR$%5&NY3g zUFiP4Em^mJ@vFDvw@2=DCCkJWd;eW~|C*!prsZwRa#h=E`4;-Gs#&e5d6cK$Quv6| z7dSE96qMXBeqwPZOPUt^3*N<+WYOLg%f7poeVB8Sw#H>sQ`W5QnkCB$6*mvReRy3D z9H1UJuvmN7df--L%5)MvP-K|uk$a$V#k^(7yd`C>Ue*b#Mzntg{*%UvY~J6rpe;<5 z|NIE&5emweD;kpaMp)Ki6jAG}S?x2PZ+K?U&YXhTR9@M?{I4OND}IUpKj|WGj3kR+ zwC#Ao$}QEEy{OaO)fvdID0?wacXz)5{y$k;UgWiDj)%X_xJrXpwPc9-MO!G+SCR4y zcTG+~T+8m7yu#D23Hj50`TE&i6Ivx%`44pKcTEO}GUod5WTR7b!OYGDP+#anMkQ5F za}lxm^F`VVov2f#A^Bs28z8xYIZ>x*xU>t?2aI5&qmy}}QG{KTl9DV-Hce?3EuvM- z-;er}dty49Xcrw~!55}09H0{z%#p>7SS*(8C$R?ZiFucThO(!ip&SSGE1!afDshXr zRjhVvGQJ>g6Spg9*%PU#0Tnx*f(pV@En*!~W_W_-_<0sXJa(997jJ{hYSOigbIO#* zo1F&(hDU>gxy{aiDZB_hd7F|BrIM7q!6_*}4-Ho+8wF}cyK#L=geotwe)Ig-<5NgO z>c|GCWame#V9aC@>CJOgC`LQ;eWa&sa7u2Y;JhbN>sptKdqWnueX3k`<#HL=t>n>1 z`qT!8{%TXAyq`s?H3K-aCOe#J$Ra&TT+YraS02)3vD;iah3X6%lbf$LMR#9OqcND3 zXl8OX{0{Ra*PFI)I6Rv;uL3Cks4q-F<;s8+>XY6X;DQd5!5VxN3PK<@42yF*WSzFl z=YApO4-SW*BSC4225}lXVNrcC;9OdFqPwRnaikA011Z)7WUR+32>M`v2Y?WU3Q&^% zGv~$QT?%pnqZ*T<_2cweAnv<_BVM|(Kq#89t^X(jDO4~n8l;vdLuBTrZ4oLCf2vq-gz%85^8KG~GWXb?UFzh3!X`FDH^>v9c3<`?qNK-608U`{# z8O7X*yr!ni$05?%Q(Z4ih#*XPHG|%uOHKx0d4VEP#Q+`FQBs`mJ_MeMHc7$JG4%8B zFi!K}I{`4UV)!0qfCCY7F&RWB%{MUAg+VT9-MJU~x)MDbP#=_dd@K?i#uqXK9FG(P z9ElWiO*s_7#|Dt5>xyqY8VVc&wwB49QoHnNeVC5br;Sk9DHCR#rfO}}IrFKD#4~;L zk%DzxxcLV>8u68v%ojXzFbHctblebdfrO^%QlhdeOnN$)>e%^{O(A?^B_v=(j>25c zop5$!z9vJ^4;uskijK)Vsz$+>4WSGf(%~uPYa=vx@#O~g*%Q^sak&#E**;Wf<92P8 zp%c&05RGZg`Qws;RULE4yUgS4JC3_GO7U^TL&!;vc>8+rz~ z5Wtc9N`yq4bu#j{gF@dVk0xmuaC(bE*U2N8$)%P4lES6r(X1aM@eI(242S5(@f8^9 z9}vYv-6qWXQ{0--e64Nfz*PmcSTmI^n=1avWc$SASTR-KHC3!RG7|ZaVG?uNIw#@neD*UZ5l)R5E_L+k-2d57zZ#tNz9b(x**KBP4 zvdM*Gxic5PaS`@${&!p{+m5M|gi6%SxUafX=89!q<*L#4##i3>%G6gjsCOky19I{B z*a?H$aXAHEa@9+Jj|)CLGB`3pUwgu*GDx0aV@&4PYjjJnn|ub$NN_*CeIy@LG+;5n z+#r!u%uZ9wa3+LUL9wj~0FGqB`vBm`KJjV~EeC9Q{@{+2ORJKZ|SBNazR97(xg z$Gh3EkPm~iQPf2b`pJ8dJOWYW8G?ohQyE#UGe?Tr0U}O>0?$alz*7=`VgYyn!Fc|m zW*4vfy_e_sn%{8tUvf6(-~21C>{ndnZ@GgXac7peGaqr@70#RDydQCgf5lZjw&6P= mkDupp_#{UIZz88A{ literal 0 HcmV?d00001 diff --git a/truenas_migrate/__pycache__/client.cpython-314.pyc b/truenas_migrate/__pycache__/client.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a05abb278aafd0955fa525a09fc86e9d44fa36a GIT binary patch literal 19146 zcmb_^32*M4c2R*`|0%ie!ogX(~}9u@C`?ut)-a53nq5 zCSjCJLfLXeIdMcM-Ze9mZlpaLb&po*cBj_iY&vacK!XnGXKk%)+}V0M(;-c%irq}x z-~W4f07#LR6BO(i}5i1LA5gS6|0sDSO#IfHQ zaiTunD;#j`cSqdioYz#qdCdjfNLD1vTi~_eOQf$6K9cRV;>*U;a`Lze&TGfFW2-^y zZN%es;@id2b6L6@-&v{jJa0C>b69%5*8@10!3EwzZyvt#=?m=?dJR=vUD0@RzZ?zp zwD(Cz0w?=ILuUdJX=o%E7>h{0F~8*ZMSKdPqLJW8FcJ(XO?A9#_l=E(BECp4G^X^_ z8B}BY*f~`^9Ht202r?K)LZ^p#beMkR%=j7cxl|2cf*asY7+Sy0ael;=aL@3=yqEVH zyhi*4uc6t9j+s890jmOaKsB8_7YQh;FzQpzsAf43iOOTwI5cEF6_Q7N5rx{RR1Y){ zheiX<%1|g0c}nr0_Q`T!tZ67T+T1ILo(l{`6gJqWgQsQI`Q`|QKIT&fM_EjuSsQp$ z_?+q(983-}FgPf?QGFhMN(H`J%?Cp%$M9;aRyd?ilrsxpZq-0DipT%ZI&EJuN`JbT$|nmVDCIMp_9; z4uk`k!Lid4t%!6caIUFNP_2}PIrp7YO|oz7bO8GxFeY13cby=M2xK~psxcIesC-Zn z@KIYUNwSM##lf>!2vjE*;FHbvP5=|!hc?et=gZ9#9e15MKiD#*e82U3gOmKcXuo8h zs-A9nE&AHVcg50I%!@|ORr;YlYs&L-TTIB4vrzgAZ9>6oN^U~4Omm}J!oG7OA)lWO zMKy=QLm@u~7aWVocBIP=cy+w&M4;_NHX^obm{&>(`qpvsZHpYd>A(U7SNm9SV;hUq zI-R+7Xe+R-^Y5^<*gE*sTyJKsWLyPzLeMt~HdJ~WrMHmlL)6qnFU{u) z2Q+9HFzQIIfu=g?!ot(dYxM8`{z3H{*Q);*9e?HqQnNc?XwV}O9?RB0ju=vX)T0pM zI0?~B0J_F#>);^KMi2OtnD(E(^Qsb%UkJzf2 z6o<nPfWuG9e4?2%2#)Pe+DT z^KiiD56BAb6grB@vzSF^F~Isd$T}hu!eHC@Cjf9Jo$gCVUphL?zkGb6^R6>%s^H~g z6Ww>6`P0Jm3xDjK`bONjexiHc;+!&m`)l)}>yq<=bILzG@Y?9RV)eY$JymnDOpktU zPOJd@gW4+_U)?zEpXrY0;6QJ@V7`|uw?>9Z^IaVF{FnDkZ++KZ{?iRtcf>1qzH8qZ z6Lvm&_6gVE81YLI>;IYrNN6UiXwuF-C=Jcdr@21m+}Kbs)YKP2%&3>evZJpWG2@Rt z|BYxyNrCync;tnh=vy(ujI-x?M+;mF9y-*%|P?X z+@PrXZr=wc<~fvkk)JRgGTFFH(lTH;v5YXBn8+j@T<~Z>QGBNZ4d8ZsiV_$-IYJE2P-qN{Pn7YG62py3 zldbPeFsw*nq&J5{$xL!+L{FC_uP>+s6zOnZmn?^5vk8xK%DAkJy&+ z%2YSnq$Fujh$YV`^{FWuIn~%&XZVPg>La3_AP{zR)*pD$HxwB;rwS)Sp%J+fDXKX- z#%KcZR;nf051DupRd_BG98=By0FkCTtGt0Ctr*(rQ8cNj#$ms#8czks82wPq8tIa& zDVs*g8f^xM&sGllg9b%+^J_?%b%`(trR3+WwLo$ z@;!U@RNuutck_y`9Dnur%+{+fB%0f2o7>;2h&Oj8>N?|jU2pl`&D(#Wd*11pQZ62U zY|hEDdA~~d75tcC&dqT>`A3`$BVWkJX>O(EpKg^w0vOq z+|4hWJ{!+(ncSy$DyJy6?umHLlQGAW^XTW%IeWS4$h{|KC&a>8v2eNogz*Er>!mI8 zp8PAVms_U~#67hMPvfko@#^WgXM4=C{o`gXr}1MKSKr0|rmpiho~HLaTRu_3nBnt< z-Rt=u@!iG3kM|k?-YVt0%WSvGb_2e>j_iF&& z;Z8k^Y2AbPw|7;*|796Zeg*TZm|sKw5ZX++Bx}kqeF;$$+;J|#L%KGc=!t+Wj6lSL z9BO1zsmUt*blIWW22&?vyfaCbrA)Hid_)dL zz)78zi6`<)k&xt*lpvjMoTM>-GwnqPdUX2Qdk-{yVF|&^#8eC86xf-O>o8c=e3rF8 zzJh1adZqCULJu&`XE643vRJKT7tbEG?3->&ug#$-fyv9+0 z4@fy$7JOTm2+>S%N3zCJNk^oy6i?v=zj}@Dxn@u;gAAR>0JLG%8C5|EjGWR2`~-?C z)Ie=*tuDk32FHSt!9mrtYzSH;>pn5}6C6(ZMF+g^Ins=e-T?tQQ0pJWt7+n8JxXdK z*^>lOQ)|g(Pa*AD{1jq}wdt&)X{(x6XcntxBJ4}+U8bmILH=*iB)$beW6Qu00AA^O zX=YZq43UiQrJ7baZ%R;uwus-6Y*#0DAd{=Nlp@yY>b539Fb#%GW)Qk3aJV3XW<~^! zasx76;~8%60pr_+Pt~NYHZlFGS(5`)lVA1)$Lb7nH)7dzE7U!G<*_XwfVeYOog&x_ z(>5VIKQB5iIW9O7;<{OJ-CXg;tDQeTcn+o+vcCc3j7 z^SFKT71XGBShWv1-n4a*ylL?52-4dZ>CDlxvkkHzfVM$06+WxU?fL>^vAE2|VhbhE zLdknk877~Ir`!n-OGXR9^bN_wClG2|wgeQxI!~LNmf&t)&5ZwNBR?65=WR*2x5UIP zvu+R!nt<~~mHdx+NqECl`Tt{LKi2^yZANNh&8Egh4&HP>3m% zP2!I=`?m^ftk+roSVd7!UdsqFNL5>{|^1p zE@LSdB(}ZkvvX2?THYi^BMw?V2(JeWv5gq^z>ATQ;7L_fqT#R%wqCV%y*LzL`f7&o z@Xmb@N#s)_Ljs{O1Fw2Z-ixgQA`r%gUi8M89*9m~gd^q(P#oi=Ad!YRGulzv*2 zaBh6pxpCf+b?MNBLkUOOtfOqMy!pEEM#o(FQ*(~?`JDVK4VN1dIn}c{)$yF#grhd* zsC{VSTn!6m&g#rSoMOq{N9xr}*U#qE$8#DIj)s_{;UN^F4YSUTizd#wo>hfje7b1k zoQ*6AUoq##-z!Ja-SbtQHvV6>3mp}vn{|9gxp=dl!A)39(oyB)-6>nz? z1n1chf4iJVf!h`Kjuz|f29aX6@|3#W-f6Mk;jM^~J5YU3oec`!!7=Exz#G^G2S-Ex z=m^13v_Bv9jU-dtgM+7nvVzqb3yg)3Y#AK%hlZen--lY{!{iZ7(Plqya8MyL#L%EG z5|M)^qc9R6D`7K+Js3|S@1{@kXlWHNKjhSI1pi!;G%n&9B;HPYYN6btuU%N=&bjJb(GvH|uT(4rm zTP_yF`i`?a8ln|YQg?*pfM`uiA7j9u578dsN#nZ%`N^$B0%vi&<*qc!SzV`dj7$W~ z_0nXoaL{dM>8MH%SJNu!h$w6xFs-IN@ItrcfM#KUgKrw(;Y0TX-?G`dnm$3VPj3_G z*wjnRzzV15TRHY}(%M;yGZAv`ilX?eARRji=PR6E>xo>isI!ABTYz3hOd(Qi6KHQp z-OHTUs`ne38A~s1xfO1i7Dn`Dk?!U+*0F>;c#PwUxqcWpGxd9`&AHd6x6(_Bj1^8F zLl{C_zjY_rd_OkqvR*p+tqyK;N`pDfKX$LsIcBa`N&*={1=nx(I_aS5?(?i{Z`?~t2@oZMSxuvPaY1u7K6*w(&xQ)fr(w!JMa4DpU{ zdZd~*Y5Bm~hlp2^MuQ4W-9y851}1npZA&CD+v@UEOHvV{xGcA(F7xk*D+7Tj(u2M znJC`;PVwgJTi@C-S9~y$ckr7%6Wt%$vJ$p+@7UJ8vH8umAGOV#{P7cUTYbXTGHYvz z+qO(}%nP>f9{T1X=qD#5lcyFqJ}-M7YDe>wZ(5joVSzK2m4Qbqubh4snu99$g4Jxz zdg$UT&TpRj*6{so(Ui52!`ZVhHC$+TxhWwO#)QIqmDLkHsPJxHNg}W2oxGZvQ$Gv+ zBowdR@m9`UUT4DHIVpVL_S_{E;r@8eftcgKyqKR5OJ~K>Y5$y9HE(yvJXOE8SIs-J z?>UR7+uwDTC7d<0&YGEE+_@>?+&=5v9(T5q5zaMrYR*z}*OeD5+{*Jr+uIRqxc*!yE%!+w-zU$nXaJ0`l+T)InC8NP=`)6C$h1!=lELnss z^FuK?u1nDi(U;FISUHmu`rF(Brl9PJdp0IKn`b?nukVa|_9Q$Vv!0H)r)%QSyv03d zsiHpQ?~l6<#KZ$1+ViGP#qBjQq2}WQJePln|G1Dd6@4snSzY`mN;_8Xe968Y{Gagq znuIso_W-+L&A!f^H zlYIDY!o{87*dX2Z7(`yu?AyK&7fcsN1V#k5G$3$cVUNhiTm+dG}n|3nqj(wK;hU7J@|1owHyi$j0RrE!YWi za5mSX6OhIsn^24Hz|WE}U|sS8SfNSXl-8s|t4cYUJMz?6atFe$1x4PNpr@RmuT<@% zebux*X4N!EtA~JA4+2zj##H-YdZJV#kRH;8d?O=+K|fS@q!-fW`8xzL^!r^vNh`mu ztYspVn!j@HG*vB|H{w@do^f&c>n_^n^NQdVmS1$v7uU>*h4Y?*N#UY6UsyCLe9x&( z5fLQ%kxNd|QiSKuG_xSxDP&@fHubmYDC`B0(ueC^*77i4N1_m4mMJ+&zm?WV zYg?bE4Xv?!k4!AI2R`7ffDH9O;j*F8$xo)RF_4VF6wlBqk6svkIRpY46HDjao=H1y zi&@K(n$O&F{)c?2aK%!-T=PjM6vO+23=t|nDASaDKGY@C*1nEk$_!yNl`L}bW+1C0 zxLVD=R-bkpYU|QPluk`M;FXRJF*$z&$y}?SgVjtGYcChWURtH_3186`RWbg(eG?PKSW-PMOa=pl`MFo$pBTHaY(}IF*8SU;%t(RISTS-~A;qrznO_!VE zIh6@VWz11IUr;>R`Mv$nbvbh1bHHRkI+636+6Dfv_;!Qv*I9s;QL@jEpKvMV@%ZU) zX>|Ib$v3qua`0vZ7CgFo7UX7!W(wW-d>R&gJ;H>sQnUm0B%Qv_ry+rt5QX8>^yZkJ ziR%?pUO2_DsE>o66V{nY7fjQ*O)!}rA*!=XD4&i8<0s_u(JKIvES$wzr_2Y zqbcD4TU*cKOkpOV$Iz!9*S7nOZO|?)(`2Ojy~d2%btxUDIf0#{7bWf(S!|d)R-!|_ zDV>KN!*N>-X3mfJ61wG;Zb?e#&>^gysXui)v78%5YhlBah9DnIs?TwPqfGvHbVq5~ zbm&|u4g_)e z+VO%lxX*so()VmfM^|6pcu{5@Xh9Cpm2M_(mn0j8y`L_vQ(Z~UHsEin%aupa12umL z5^W?f=ntIoMMt35PpVj7p+wivaA4>RW4A~Zty(*}y#0echkLqIdy?0J$~qv^K~aU^ z7`XBu0AxaD1{1)L9+Q`!r;Lt%P1C?`{ir4g#}KRq;?(4elOJ1!pn5Lc?jm&921`m4D=FS=GVCvI6VSgkpC zOE-Y|yK?mM(Ns`1Us!zKYASQ#Dtw`9vh(8p`?;Lep%veZf_d}gTy>ewjoV9SYOfxctK4=sry^Fp=f;Uo!utbwGoA$su8y(bS4I}8(IBc+cTNDHLC&RrpQ1qn z+%CRXPtOQ(!;^9+UDPn9gKO{;xIeloSKXVUGpP!=V>Fk{twx)O;?T%9$|V3ja29WGV~ns=}q1BlDiyAK!{#nIA-!FP!$fqwY`gRe7r#mwBWewbIY zQotm|=`?QD^pqw)W1efM1+eL>*O;PT86~i9sl>57ed5yhPs5gDDLaSP!R#;20Sjnm zYBpf0Sk=Pfb?NiPsO0|uz4{wQuiWD&`=Iw=7dcZZ6glvG6k16}5VRLVQZS-`Y7jnc zvL$b1dZW;Qj0PC?m4c_3riJK|Rzg8(iDB&~n`(JZ3606&p>ZR$GpZrzm;W_JC;t_^ z9+^&rOfo9F8AE8>huo?qdFgT7PL;Bnq_2(}kGDzkzd~kcNH0+tMm7!u!evYht41=IH=#_~QwVtU5BgUngpkf2vp1i|3&dP>r(htZE!pPJ^b% zvixsQnIgi|^`{z9`7>%`g}h3nis6`TNrD1J35?Osmz>kt?~3bQaY7M#rSvi`V%~#{ zHGE;1012@=@O}!O zi-XRyusl(?;hn+_AVP)L3V&XDt@O{!->RG|>`LTxO?KP`?_d9F{dC{=n=-SF+Fi>8gugpU*FvUjN$2l<^)El>094o9>?38_!*z$lW-b zyYcG&cY@K_{cj+-Vk|y^7g?~#jp&+fE+oe@hq=X!k>^we1uHvE(gdXYQyM+wjsVv zpX9xYpF+6iJKUndWD*~`NIh1pzzj{n0}*oT0~^Gjhnql#mWUQ< zwNhPQn2ZYSzGd=7mRWnL$9kul$b_zZY9-?NRahvJ1oX3LC*kx(8dWVkQVo{#o<;KJ z9YqU9g!k;Z3nqk65t`kLPz-o90-6*LP>SQ|2!Vi_mgdt0bOmv0f?%9+`HRouo;5@b zo4BZ<8)Dk?mCH%lL3DY*u$nOLOk$Yn%?b24dIl*G;DmF!PIL=U+Kp~Cu|;a zIw>IX2=bS@-K3?G?@i@hDH3I%dq$L)p(cEud&G?$WroUniAnvSH z!fVm{``9^pQk$c&1??NPS#P3Q2OINzWxs_;ZPLh%G4~sK3f%#8{}#ei6GMT{6WM=yj3#KW(Uo(2D#~+=ZeQ4w7Q! z-$Edy?aUkmzG~`K*pyMHOHWOQOyD!=Ge)ZuHI$OjXL`~El?@Bi!e6pIb19J#grhRL_oGJxHc60yF6g0ck9RcWtmxk_Gi>cVFu9FoZP}wfRAg zgJaCv&?HG(of?H54M)yNbbO_=5Zo9M)*l{&h|A}Qa^u!96DslCLgkyIZ5U_=$!yeE zjbDN#QVvMer434*dwDxjCx29_9Igch1Co33at1Z8a zS{UNoffGa$=EE$`kv;Fup0s}G^jzzT=kB=HHS_#mKl6j*Q_o!a^5rj25B>1?wXWIR z9n2VcSFn6{|M~qf=YjvcxSVxMCc$I=O?J^_`@G$8Y1@TuQzZ#|*?kjdZJ6XQnJ$0qp|^+v_79lN0)&M5-7DckOt6I`pNjZZ#yaPGEGly71Z&*yg4^FOq9 zZV-Q(2k#f_@p-c%x3ihMxsC5^6mD*}0lvi>$rl8q-V%$tB(MxM@Ne7Ah`F6DcGU>C z%gp3gw_6c)r;$g2JI(g4QsbSr!mdK&+c`Yow>>7LzFlY{xYP;w3lri@c~;wW-^O?} z1Aje1L`5|7-Q!5G2b(A*zUJ_GiyXX}(gg~x53%5epG7`OeF=*)>iZ*9l2m~6t38ZN z)6Nv70WOEyE=9{%Q>OxZLLr&Po?iS>0w^ejOd+e+f8Eg@7U>p@)1Dt$K@ z6vY}gK53-+(V#RKGZSfm-MLx%Gq&oL6ENMuN$_jO7Xw`LTjLF)F6ppB$PGAaj} zY~vxz!l$dsqfqS|mHMvf?? zIn`(alQu~>)2Ks=FhGgONI{U2-Xo7t|BAo1;L@5IO!iuSA6eLG{3!s!>#%;~vhtG` zo}Av1uvfifuOj4TeaRZj>3Ss+?&+&O_k47&xz~bb9-REv3BskyKm+k zg@jV`Qmt;gj#k_M(Vfc>YToaaz3AxqcB#Y8->fR!Tg=_u#qTW=ZtgAye5=5K_*+Fh z!Nv9t(Ri!2pu=dqwa1M3+eQ;&Zi`01%djA!D$WkiSE5M4IJ)An&{6Q5&DrNJ!Gk211pc0N6za9@ba~-#l+~E*KHsLuf(> zX$4msEChja<>5jDARCvRhYJmW9GuyP3k`g;hD~WJ>nI!F@B@|SZ53pGxe-aVvW zKI{XWS#Rp?(q>?<68voRGJxem-?ZoM7S8q=uL@Sa6&T;0yybWbdZFQrdyY(Plst+V ztgONiLz45w-#p2%gRnKGmBnAgVR9)uQ+1Y=S=csUJ0NvR6$^^dZ8k7q8t+w*Jf2DR zOh2ZI{9loiZ3Q0yI;Z!uxUAB7XZFl#C@pY5i-Y%oyn9yn`?*Dvop*Djnf!Q8Q^L{o zj-v@|P2ReP7Cq+@=O+3BOmqf>zG2Yv0Vj!MD4`KQ;oI$!*Uw{SDuq zZTnjh(BIn;cT?osJ;F_^h2U%+(KkIj!FlHPX6wyjzP-$Kv&2U5dIR}&R;1SPAJG=W zL({q3dl7J+pl_2$JS|%+HoGgdAgZ`qd#mt-{9ELs4l8HiEf{&8e~;Vt zk6h(%IoJD~^L;M+eFE(7bJq7c8)81-r2obhJaFL;1U<~cpL%&9;m}?MaomZ-&q?Q7cOm&GO}kr zJ3Bky`Tm;ue&@W`Q%9hD_s_LtOAWO6P$~`!2-|sqkStM%NgfkL7!mG@rr3&D9Fa_k zw>HzxTZieGbcDU7Davj5NEw${e<4*KzDo zT!7sidle60FULN`2e_W&dgTb<29A#?jez|eHz-Yjn>lV&0)UTl>{pHfKF)Dd6KNyL z33zUql)@KE>n*qal7-Cxyzeddmrxto;qBPuvG72tzuZm+fWk@T)FqHDmi}VfN?^u8 ztFN+d!~V;aJr3ADu}1>3_J80~nJuQrH6|G*Wj0g$g~=RLGv{dLDFJ$pX`QB{ zrh!!2*+|cfp4NJdm~NWy8u3JwQZ3UR)6+d+s(+}(OrvLl&T03O3Cfq)W5QD=YN%=6 zqxEnOjqce6=2q1rE1IfOH%#w>ztIU*mb@l{@UHNZ*GuCsc$<@~c2%c^@3KjX6=2?cghg9l!Ar21BEilKejvZ=5G zQS?isv%RCr{=-xyo>;~%6OJ8(D#cLW-nw+C394xsdLHN^ZHimSp(qoX+|8+RFr0?= z=Cnp#&{8RVp7sD?Bz}0T;>Jl^vD)zxc;Mx~vD?AD8MCXgg=mI`U_MUh&@|`=in}zp z(s%$Gd}F(Kbw3xa$m&_m-G51QR7?l>qF@8ijJ7RTJ3 zZvu6G)crVqZ5T`i?ZsfqCw~ep zVDJcmo_+fkfXc|~Q8B6_B#irBRe^i~0`~|hJ5b+J$%^vlPgyz z&!rj6%%qf-VG`^qkQvNPL3oL)t_*ja>c>e#g;DgU@Ptfz^(Gp1|dpzOAoaIma%SmDiklKomTTN-w7B0T6{6G<)ACrZuqKMGH zN2;sfJQ%lB6~zYq=(bfwvGv%i(_Yi9I6>I8%%=`xg7zNIh(Ofz7<^6puzCk8Ocnp9 z(0-RSLiQEYX?*brCVil^?omCV@GdLb=W+iJg#4_fYMI@YnbuiYqhaakHL10Cy~HYi zvik+L>mmMV>1oF(;frEsRlD5|TPfz2;nJ20FH7b9`f%kp#qLtdFe^rcFwRp|O3C@G z^awfQ1&?Yhaq#vQq=souRS9FtRaJn(UbaDY)?kCI&xTnajNH>&JaF~VXP>nz_J|1M zY<<$Kc+b24=vix=BVvcqj)>$Ym`?_4n9xgk5`eb1C|tfewKtF=Hhtpy#- zW@xEtdLM_2rop7Nk>Kenv-42~%C)mohGHd#=vS<{+DfeDaQpZ@49Jo#Z^ILPS zp871c=?yK3TV8*`+q&Ux-SoCCN#DAfemk_~Kl+E!Peu#=(;NQNtC7zifA)BNeA9nr z>DE^BiQKK+;Oddp!9qu9qa(C_Gv9t`eLnBMzI1!5`DAV?H?bPX`-96jp9fAB0$m${ zt~D{=d>-C5_*eWZO3uDApPO23T(#$Yoq1;`w@bb^j|Dx;d~k=^M#kQ|&uZSkp^S};-C_3IigI6J z&S7O_Vr2NnD2;$5cw)_(RMo(HmzWwgVOyP=Gc^M)F3=IJb)h}dd!-7)S_90N{F*#> z`kr`}J-Lo80PbaX?lc15vM<+(0AC>}IFD{PkLEgGxEwx5_Rj12l6P9uh2QOUEQ)vvp6{$u? z5J<*?!5oGehnSb6`8=8D$t}+Xd1l0O1|DHkj5{fYUlbz}ik$Ovfz_6` zOJsq0#!ae4(|4@}gBFWRXKq6?cg8Z;RMbeuhV{%e+#G56VJ$cSY=z($a5m|QD#gG=iDB(E*v7`jG0osWFw6~}#thgqLv>HU zUg^%-J>3~@CE#{-810_c+2uBl_D>5+Ko-8P-mVXje&etNrI>2cq3% z_mBI&>Y4E{F|So3opdF;`s?bdud2SP`l^22^Nb_UMj%Z8&rHZ$O324}lMaF|aQEUM zOc05v!AZm!Fq8ayZ)x_Om{E8i`28C4^sagxw&>N2b5E{kgEvZ~fDn`-N_tM)EY z6}$4(JeJ4U?dWo_I2*{D#rp;RvPL!%^nDcBPz?T^uayf79OiJYy3!LSTp*ML`H&| zja~dhV}Y<{*X3a<%VEu`-%vRSS=nQ=e@q@3iJSsOc|IDG``Y&I7zxQ?6%x#QW22)1 zI;I&yLCrK089D*wgjZsr;1C)nWgLRT!^7=8F9;K4fE+eF|CfXWA+3_^;f`?QCW@A@aqIBH9^lu1FC}BtF>K?$0DP0qcRjx)o&`n z;Q*y_xM3(V+SpGc$K@eaX~b4%4A`i`T{xilN0~|G#?-YTI;M$!f68>S-%raxzZd?M zl@Lvk-w^}%O>U9JQ^5sxomTbxdys>$E~9|K1plndlORlxK}=&|5Ie`hdUIMLB+3nt ze&f=8TH2fvTx6eNA0OizNzBmcB;*ump*1g6y#PiSW@hZkBjlx;BZi)QLabyZiSbcB z3g(Q#{MY3W7_YC6*9!eGZTx$?+N2J@)X~=dVqfn8&7GCF<3)JVvEL`P?bVDSRUTF9 z1X>6!(aiGcp^;cn*0>WI_lhDw+G?-YOJO|{t-!C09`UnL^G5??BauLmR>K1?{3}}^ znjm)_Rms}sJC5dwo%1WIe{}HD!5)29^$yAbMzsE(s0aM9-vYdRZh7Olbu# zJ^Coa7;{mBWEkKvPw*LBP|MFGiBEf`HK~>bi}c(QzaG1k3WrJd-2g1g)$nA1&&(u4 zl!KWhP?&FoIv~=7Xh1!t88KGsc#6vf<`ej&)FjKEPyDc~re+iMqJ}EWZBEUXFU_d% zQ6*#eYxY;|m-Fru&e_Smns-T<;^qqqr`o62#w(slHniWVXrC=; zzt0m_(d5aq>Z^GZI~Q5r1rxD2&cBp2S56ndR`zPy%yV;9n{HQanlJZEoBzIHCjU

MFrolPauh6>nP?%7;sq-cZNy|-w4^Lr-mLNn4`k$M)a6WKo|bZHsh09^&I%q=V~B z!p36tSs&6aOFDnl_SpKYgXl6@kZkGt262aEVG!Js9(0Fz^W*DGVwntgOXL$i~ z1-CuX_}d4;b?QM9Gc~2`3^nmvxqkFRIpHw39^FwB^h@Y$E0 z0j{)`&8gLvvKxBXobV;PWoGi@bZ2^5gT3_e{pcEVLO*WJIl*U_T-h5@x~(3V*F6ye ztJt^vD$e)W8@RNcb60Xy%o@$*Jxjr0W4b9)VY*hIxU2<>($BNzIL^GMvLN(lPG9-x z$;_m~(A(@KgXl$BFp=PPv|ZFF2|lApHX5R)tX`n!&j~Q&OzD>4nD*l?ofDR|c~+?v z-~*$^RwK4amqM`ZZ{=5z5XX*`hNw9Nt?m6;PFq8ITOZd#sd%s{-6YAKZmrLlV@cen za>AG9V#W(Sc~7MhpRL&-5~-9~R7ltFA?bd173@yZ>M*htU|H0NarSsI$Xnh*|*${HDe#_Mg*(seALcvl6NsdcSV*D!9~ z77Js!V9U63m9nJJyGr>CSHwr(pMbyES`gu64>>-X_?|Z~LghekOxLM+A8dSsChYC$ z?*GC8`BUe(W+)h|%&0iUdNBk#RGF##tG|WI7uJLA(gCluukR4W6Ww@0wgL3qUx<1m zyrJMXRwoL6nBb-|l7msF1&n%4xFw^)^Wmw}B&5=&_<3KA6-6$S3C~hGdM-|)z zS3>{;>LEpNDHdJ=IfV{DR(4wabJ$&fOD4#Dk(5)T!Nyq2WUx?nIFo%3a$A5~nch_8A+QTD=IA^f$*3tO*Bla@sX zY2MByinm`Yz0!73z93I^PH#`R*UY)=XWjL`bT?iro#=}fZ@-(?+5Gbz*Byy%FD8Zl zPx4D9jq}c;b7N=5rrgsN31|JJkZDZy<&K%PKY8(um){ny``)r8p6y9mdVg(qo@+kS z{5RXCT9Wp9UcSO$zV$mlBwbT0& z1;7HR+ss<)8i{>1x%)q6NWhg@d}g^5TgLC*svxu9jT2CfwWST-ztxQjIu&`0~LkWrP{at&>eImeExz^9@o#}`>Hcs-X z{+_S-u4CTfJm)y$n6s>ywXB#LN?NKvv^f5-V1*KY{8S|F9o*sulD~ud{e2U0Ro*8& zG!mQBeYX4jiOVm{>`&BexymQSZLAq9Z;LCZtL}(3cf^&DaIWu6-<()AD^^YKno(ar zcIC*`!mGx3b<3RCnh;z6uwX`Ye^kB)bMf14=G|+!H*H0`D#)8Fxm~5gn;sj=>v)Xo z1$^4Hwr8F2i?Yr_xOu1E-P2@vm*;vKP45bAd2n+hA9dU)<)Flk3VY8Q{>JJpJ=Ofp zN)x2ttTsZ*%{3O38#%~-v&p`@)_8MUTjg$#@x4+k{GP`MN$=J2Ab&LsNbzL!Sa2JL zOc`+71pKu~FlOL}0s}V`QsBl!9o(3xgByyE0Ji~fLjO;|4UVOM8n{X3^q~RJ4Yn_Q z=N5eubhA7Gx*0lMk3hE!oGc5bwub>>_StreK6^$lY`dTrj^?|$1_IzL#taPz&VbA(Z#`OfpP>hEgV{Il2@deK zEL6>a*Rr6Ld78G&(&6*KRR*Kqz*c7+WtT?=F$+fXGP+>9#E^ojr-D&H0j-Gg;K0C8;Gy{@PYh{M%>$2Og%U?R;FX86$0GUCyC0)ME zNHApl*&~tB@d#veW?6js@h`|inI&X&evQaz=z(>EDmBlBlc|q-XszV9-NHZ zG9B>J7D%Y8psjGxtoV6h2@5H{5T~$m2;G5EJ4PKCbz;OA&_orLhN4RlNW1Wv&VcM| zMT!B@tu#7)e(BzHZ9jrx=flCUxcuUQ3kRl70gBBP*T;+N zXEsjS=3T`Xtrx6Qs}ruOIoGbZYuCSyPanUM|I^Xybj~TwI;HsDfrRs=N#R#pJ6?Zz zZs6s2eD8L~|LjP-ekf5S&(;r3pS-yL!v3kDzdHmslh$~VoI%c-%ll@Y|B3&N<8K#T z5585N*wUM{>;~ja+N&~n*m=tw?;p77Op5y-1TtVvpu4AI&8^F@CIV*LGJuK1Su-ov z%!%t}#dR|;B*kaHa++-Vs%Ua2_c=gi@lkGZ_16xS{|>Vlpb~y40Z`dficr~8j8KX4 zIv(SC0iQP2^{x?qQQivjTjk{#KWo9br3&J=y;h9Zp`N#!%6iwE{?pd(ftz>NxqCMm zZV;}w$#jEj%Y&Pn4$O74m;>E6OYOb2{LPvzy&nEnnF-Qwd5n;9t2PhiCJyr7+GOur zZM@aeR@ql&e7_tEzh7m9r1w|zAh*C-kua~U8BfMSvZ|S3eFsBSf!AMVzdslm^84Wz zAO52;_zeaAvH_N??v^x@Iu?ajjuchE*T=|6gevS6S(by#xFzPln5)xW5`Lv*FQVAz zN6mVaM#kl^BC8Z%{ZafvLlFSjuK*}>i1;>%`3?rU3_*1tW7#$u3C2d`7CHi1@WY^T z5~2m3g$He|2asP%$b3~dY(ubt#LsGfuA|>AGqUqCd?}miCDNYI(O{BbL xUa&3jhFZftVkqb1#Jy;P#7~KUDFRFR%z?k=_{;_$^gk=d@0hzuAa)!Eu#MLyU;{}UUbbV$0msI8z?-H9R>SUKHr-e53{If^ z*jANF)JWbaR9UHsL~5&{sw+`7Qu(Qr&wg&SCdQ*wO4E-vAC+)%O8eJy@60TV19jAo zj^Lhq&OPVed(J)goOAb>x5ka&`{&=6llugO{z4Gzvl$&`GY6eKl8}Zbk;Du!gRI6j zl0G}c4e}a4Xwz(fGZHssA9QGrL8s;%bZM?Zx8@%7Xr4hq69#LvnnADT9rS6wMl`{P zQ4^B*dL-GpS!11Ub6w3Z*;`P=F@OA0@;a8aWHuwJv+1;q=fs4p$?=q|s-m1ph*-&G zu?8TMP_TmABaH5mGnuSLh13YEyD9>@Z91u>5)qfq#}di3Zi`K2Qwcz@lF%KoIVF|K zz5&$uoSe~JbdU~tjP7-;C>dQa5R=&%yOC65DJ2s(cTNFsAk609pp!=vNMpcq?4+P^ z2f=0l8cS2x z2T8-I?YTK{zL@FUN-U;_OmnwaQ=fuG)gaya7CL1UACRWVQLqVQM3p5S@_FDN6mxBc z>HXQd`{cVX(M~>Z7$6AYj8(R82 zR>#n`jMaOLj&&B_HhheZ4cnYnZx0>0@%Zwp2RaBuubyiVpDpXe$)mx#utEfJUnhi?+oM$vNe!$WpT3NHL$}=X}HZzkDzMSxQ ziKtL5pQ{-Sg4YFrT4f&rjW>RCUCLLplaEMu+Hzj0wjM=;(NGsF1>qJs;U4**Y2V5?k9VoY9G6Egtj@Pr$&xht8T-n-{OEI*Z5T*jO@seBzQxH zVKxs!XRiz?aS|^Fl5R`MQ%VYpq|an93C5V*EZhW5gu%@KjI4=?(_%8e ze$dKtMyc1{m&(TFlsd2@>yZgm(&W(=VqMQXAWbp2LBNs$R=Ow=lcR~783wvE{~79> z1B9ppXOE%`xn(&n%1ok@i;(_Z)PvY21UJ&maxG{(Z&XvE1Yr6>9TGw!JU$8l6Eo}1 zcs7;AvFVJ?s45SX!-o%xR8^=tRVZ2Y1A8hz`*uQFmBnmBT8j?9;TInk7d0axrhh zw@p|u;rk}MZNfzp7ESo0`M{;2m!vaeLzjoe(W{p(osnME8S#Je92R`X@EjZU9K(hp z>`&)z$f?IV4JLu9AO2Nx+J;Y@W61fRV+tzqigS$N7U!7dpg}hrqbhwM2gp)C|0Q(# z*nHJ-PohyCl1)|W*$$)8N|720u4Zk2 ze>pU;@Zx&izCyHk;{CCuLqES(ytLf199~veuKwox%jwkmdkra( zyaRwyoRM!RVkRp>jUJzYcL04-!aG2OSEe?jhzUj2k{SAd?3w?eD2~l2szRkzF+Hnl zqVgs@GQ=qb>U_o8QC!XdMKmv0JM8T$2*Eo@!SFOm!iwo^LJ_kVo~`N|3Km=ET18ni z7Bt==Vh(51IUr@tX~uxQnrIeVaya~>g%E|D^#-w!_bnqN}n#^iTB{Pbu zGvItVJOC7Y3L2Gc|1deso!{xxhFijei#+;u?@rI8FP|^r0{5S6OVr?9{>1Q**8$~o?68_$*pBy zPs!W6+DXXYS>iwPedzl*Qug(gy!{&vuIAJRhg|-L-v8e^;lkD8m7l#@_BAfBpTnlE z{`3b6?CnF#wp(pQ>Dv?dFTzI_-0S|p;@P76-OFqKmStwm7g^xfeg4I+!u-3Z)_l!N zV+N^ye_^^fy43x_cZ(PAgqP>bk)G0#-qo|89Q?zD)xJOSpWG;gh8J90U~?Y6JI&_`(wX7R&F0CA3gO++uG6LvUs`FI8qw9QhGwVD~zs(pLp-u z;8_5p|Lnl|jP$UTn;eYgzt@^PHs5Gq5h(d$OZgGUvEpVwJ0@4_Q$+Oak9j}m)aEw+m$#_iG zG@P88)f5#bCZ&t9Yg*a)2ud0fZ`8M;`3d@>z9BzU2$uPVb?CjkOYZ`ueOtaiHyl__ z+->N}pI;a%^IZ?@sO88if48|mf4Pt>^ZoamTPa%NLlocQt)EMbjgsvRM*{H6U@Vc1 z8`hnkg`axBx|q}nx~KBfqM~9_Jz;7bJ1z56BV^NHOXDkz(KHIv7)^sWQQyeT)FL~r zq&xeJ(;UDF;3HM1`aCon9K$g8(X)R-&3{Elws?+Vw-7X6lV(FetY?cIVHj}(k#>^< K!apd1>h&MFo}^y@ literal 0 HcmV?d00001 diff --git a/truenas_migrate/archive.py b/truenas_migrate/archive.py new file mode 100644 index 0000000..03b6430 --- /dev/null +++ b/truenas_migrate/archive.py @@ -0,0 +1,352 @@ +"""TrueNAS debug archive parser (SCALE ixdiagnose and CORE fndebug layouts).""" +from __future__ import annotations + +import contextlib +import json +import sys +import tarfile +from typing import Any, Optional + +from .colors import log + +# ───────────────────────────────────────────────────────────────────────────── +# Archive layout constants +# ───────────────────────────────────────────────────────────────────────────── +# +# TrueNAS SCALE generates debug archives with the "ixdiagnose" tool. +# The internal layout has changed across versions: +# +# SCALE 24.04+ (plugins layout, lowercase dirs, combined JSON files) +# ixdiagnose/plugins/smb/smb_info.json – SMB shares + config combined +# ixdiagnose/plugins/nfs/nfs_config.json – NFS shares + config combined +# +# Older SCALE (plugins layout, uppercase dirs, per-query JSON files) +# ixdiagnose/plugins/SMB/sharing.smb.query.json +# ixdiagnose/plugins/NFS/sharing.nfs.query.json +# ixdiagnose/plugins/Sharing/sharing.smb.query.json +# ixdiagnose/plugins/Sharing/sharing.nfs.query.json +# +# TrueNAS CORE uses the "freenas-debug" tool (stored as "fndebug" inside the +# archive). It produces plain-text dump files with embedded JSON blocks. + +_CANDIDATES: dict[str, list[str]] = { + "smb_shares": [ + "ixdiagnose/plugins/smb/smb_info.json", + "ixdiagnose/plugins/SMB/sharing.smb.query.json", + "ixdiagnose/plugins/Sharing/sharing.smb.query.json", + "ixdiagnose/SMB/sharing.smb.query.json", + ], + "nfs_shares": [ + "ixdiagnose/plugins/nfs/nfs_config.json", + "ixdiagnose/plugins/NFS/sharing.nfs.query.json", + "ixdiagnose/plugins/Sharing/sharing.nfs.query.json", + "ixdiagnose/NFS/sharing.nfs.query.json", + ], +} + +# When a candidate file bundles multiple datasets, pull out the right sub-key. +_KEY_WITHIN_FILE: dict[str, str] = { + "smb_shares": "sharing_smb_query", + "nfs_shares": "sharing_nfs_query", +} + +# Keyword fragments for heuristic fallback scan (SCALE archives only) +_KEYWORDS: dict[str, list[str]] = { + "smb_shares": ["sharing.smb", "smb_share", "sharing/smb", "smb_info"], + "nfs_shares": ["sharing.nfs", "nfs_share", "sharing/nfs", "nfs_config"], +} + +# Presence of this path prefix identifies a TrueNAS CORE archive. +_CORE_MARKER = "ixdiagnose/fndebug" + + +# ───────────────────────────────────────────────────────────────────────────── +# Internal helpers +# ───────────────────────────────────────────────────────────────────────────── + +def _members_map(tf: tarfile.TarFile) -> dict[str, tarfile.TarInfo]: + """Return {normalised_path: TarInfo} for every member.""" + return {m.name.lstrip("./"): m for m in tf.getmembers()} + + +def _read_json(tf: tarfile.TarFile, info: tarfile.TarInfo) -> Optional[Any]: + """Extract and JSON-parse one archive member. Returns None on any error.""" + try: + fh = tf.extractfile(info) + if fh is None: + return None + raw = fh.read().decode("utf-8", errors="replace").strip() + return json.loads(raw) if raw else None + except Exception as exc: + log.debug("Could not parse %s: %s", info.name, exc) + return None + + +def _extract_subkey(raw: Any, data_type: str) -> Optional[Any]: + """Pull out the relevant sub-key when a JSON file bundles multiple datasets.""" + if not isinstance(raw, dict): + return raw + key = _KEY_WITHIN_FILE.get(data_type) + if key and key in raw: + return raw[key] + return raw + + +def _find_data( + tf: tarfile.TarFile, + members: dict[str, tarfile.TarInfo], + data_type: str, +) -> Optional[Any]: + """Try candidate paths, then keyword heuristics. Return parsed JSON or None.""" + + # Pass 1 – exact / suffix match against known candidate paths + for candidate in _CANDIDATES[data_type]: + norm = candidate.lstrip("./") + info = members.get(norm) + if info is None: + # Archive may have a date-stamped top-level directory + for path, member in members.items(): + if path == norm or path.endswith("/" + norm): + info = member + break + if info is not None: + raw = _read_json(tf, info) + result = _extract_subkey(raw, data_type) + if result is not None: + log.info(" %-12s → %s", data_type, info.name) + return result + + # Pass 2 – keyword heuristic scan over all .json members + log.debug(" %s: candidates missed, scanning archive …", data_type) + keywords = _KEYWORDS[data_type] + for path in sorted(members): + if not path.lower().endswith(".json"): + continue + if any(kw in path.lower() for kw in keywords): + raw = _read_json(tf, members[path]) + result = _extract_subkey(raw, data_type) + if result is not None: + log.info(" %-12s → %s (heuristic)", data_type, path) + return result + + return None + + +def _extract_core_dump_json(dump_text: str, title_fragment: str) -> list[Any]: + """ + Extract all top-level JSON values from a named section of a CORE dump.txt. + + CORE dump sections look like: + +--------...--------+ + + SECTION TITLE + + +--------...--------+ + + debug finished in N seconds for SECTION TITLE + + Returns a list of parsed JSON values found in the content block, in order. + """ + import re as _re + + parts = _re.split(r'\+[-]{20,}\+', dump_text) + for i, part in enumerate(parts): + if title_fragment.lower() in part.lower() and i + 1 < len(parts): + content = parts[i + 1] + content = _re.sub( + r'debug finished.*', '', content, + flags=_re.IGNORECASE | _re.DOTALL, + ).strip() + + results: list[Any] = [] + decoder = json.JSONDecoder() + pos = 0 + while pos < len(content): + remaining = content[pos:].lstrip() + if not remaining or remaining[0] not in "{[": + break + pos += len(content[pos:]) - len(remaining) + try: + val, end = decoder.raw_decode(remaining) + results.append(val) + pos += end + except json.JSONDecodeError: + break + return results + + return [] + + +def _parse_core_into( + tf: tarfile.TarFile, + members: dict[str, tarfile.TarInfo], + result: dict[str, Any], +) -> None: + """Populate *result* from TrueNAS CORE fndebug dump files.""" + log.info("TrueNAS CORE archive detected; parsing fndebug dump files.") + + smb_key = "ixdiagnose/fndebug/SMB/dump.txt" + if smb_key in members: + fh = tf.extractfile(members[smb_key]) + dump = fh.read().decode("utf-8", errors="replace") # type: ignore[union-attr] + vals = _extract_core_dump_json(dump, "Database Dump") + if len(vals) >= 2 and isinstance(vals[1], list): + result["smb_shares"] = vals[1] + log.info(" smb_shares → %s (CORE, %d share(s))", smb_key, len(vals[1])) + elif vals: + log.warning(" smb_shares → NOT FOUND in Database Dump") + else: + log.warning(" SMB dump not found: %s", smb_key) + + nfs_key = "ixdiagnose/fndebug/NFS/dump.txt" + if nfs_key in members: + fh = tf.extractfile(members[nfs_key]) + dump = fh.read().decode("utf-8", errors="replace") # type: ignore[union-attr] + vals = _extract_core_dump_json(dump, "Configuration") + if len(vals) >= 2 and isinstance(vals[1], list): + result["nfs_shares"] = vals[1] + log.info(" nfs_shares → %s (CORE, %d share(s))", nfs_key, len(vals[1])) + else: + log.warning(" nfs_shares → NOT FOUND in Configuration") + else: + log.warning(" NFS dump not found: %s", nfs_key) + + if not result["smb_shares"] and not result["nfs_shares"]: + log.warning( + "No share data found in CORE archive. " + "This is expected when SMB/NFS services were disabled on the source system." + ) + + +@contextlib.contextmanager +def _open_source_tar(tar_path: str): + """ + Open the archive that actually contains the ixdiagnose data. + + TrueNAS HA debug bundles (25.04+) wrap each node's ixdiagnose snapshot + in a separate .txz inside the outer .tgz. We prefer the member whose + name includes '_active'; if none is labelled that way we fall back to the + first .txz found. Single-node (non-HA) bundles are used directly. + """ + with tarfile.open(tar_path, "r:*") as outer: + txz_members = [ + m for m in outer.getmembers() + if m.name.lower().endswith(".txz") and m.isfile() + ] + if not txz_members: + yield outer + return + + active = next( + (m for m in txz_members if "_active" in m.name.lower()), + txz_members[0], + ) + log.info(" HA bundle detected; reading inner archive: %s", active.name) + fh = outer.extractfile(active) + with tarfile.open(fileobj=fh, mode="r:*") as inner: + yield inner + + +# ───────────────────────────────────────────────────────────────────────────── +# Public API +# ───────────────────────────────────────────────────────────────────────────── + +def parse_archive(tar_path: str) -> dict[str, Any]: + """ + Extract SMB shares and NFS shares from the debug archive. + Returns: {"smb_shares": list, "nfs_shares": list} + """ + log.info("Opening archive: %s", tar_path) + result: dict[str, Any] = { + "smb_shares": [], + "nfs_shares": [], + } + + try: + with _open_source_tar(tar_path) as tf: + members = _members_map(tf) + log.info(" Archive contains %d total entries.", len(members)) + + is_core = any( + p == _CORE_MARKER or p.startswith(_CORE_MARKER + "/") + for p in members + ) + + if is_core: + _parse_core_into(tf, members, result) + else: + for key in ("smb_shares", "nfs_shares"): + data = _find_data(tf, members, key) + if data is None: + log.warning(" %-12s → NOT FOUND", key) + continue + + if isinstance(data, list): + result[key] = data + elif isinstance(data, dict): + # Some versions wrap the list: {"result": [...]} + for v in data.values(): + if isinstance(v, list): + result[key] = v + break + + except (tarfile.TarError, OSError) as exc: + log.error("Failed to open archive: %s", exc) + sys.exit(1) + + log.info( + "Parsed: %d SMB share(s), %d NFS share(s)", + len(result["smb_shares"]), + len(result["nfs_shares"]), + ) + return result + + +def list_archive_and_exit(tar_path: str) -> None: + """ + Print a structured listing of the archive contents, then exit. + For SCALE archives: lists all .json plugin files. + For CORE archives: lists the fndebug dump files and the JSON sections + that contain share data. + """ + try: + with _open_source_tar(tar_path) as tf: + members_map = _members_map(tf) + is_core = any( + p == _CORE_MARKER or p.startswith(_CORE_MARKER + "/") + for p in members_map + ) + + if is_core: + print(f"\nTrueNAS CORE archive: {tar_path}\n") + print(" fndebug plain-text dump files (JSON is embedded inside):\n") + dump_files = sorted( + p for p in members_map + if p.startswith(_CORE_MARKER + "/") and p.endswith(".txt") + ) + for p in dump_files: + size = members_map[p].size / 1024 + print(f" {p} ({size:.1f} KB)") + print() + print(" Data this tool will extract:") + print(" SMB shares → fndebug/SMB/dump.txt (\"Database Dump\" section)") + print(" NFS shares → fndebug/NFS/dump.txt (\"Configuration\" section)") + else: + print(f"\nJSON plugin files in archive: {tar_path}\n") + json_members = sorted( + (m for m in tf.getmembers() if m.name.endswith(".json")), + key=lambda m: m.name, + ) + if not json_members: + print(" (no .json files found)") + else: + current_dir = "" + for m in json_members: + parts = m.name.lstrip("./").split("/") + top = "/".join(parts[:-1]) if len(parts) > 1 else "" + if top != current_dir: + print(f"\n {top or '(root)'}/") + current_dir = top + print(f" {parts[-1]} ({m.size / 1024:.1f} KB)") + except (tarfile.TarError, OSError) as exc: + sys.exit(f"ERROR: {exc}") + print() + sys.exit(0) diff --git a/truenas_migrate/cli.py b/truenas_migrate/cli.py new file mode 100644 index 0000000..d9581b3 --- /dev/null +++ b/truenas_migrate/cli.py @@ -0,0 +1,425 @@ +""" +truenas_migrate – TrueNAS Share Migration Tool +================================================= +Reads SMB shares and NFS shares from a TrueNAS debug archive (.tar / .tgz) +produced by the built-in "Save Debug" feature, then re-creates them on a +destination TrueNAS system via the JSON-RPC 2.0 WebSocket API (TrueNAS 25.04+). + +SAFE BY DEFAULT + • Existing shares are never overwritten or deleted. + • Always run with --dry-run first to preview what will happen. + +REQUIREMENTS + Python 3.9+ (stdlib only – no external packages needed) + +QUICK START + # 1. Inspect your debug archive to confirm it contains the data you need: + python -m truenas_migrate --debug-tar debug.tgz --list-archive + + # 2. Dry-run – connect to destination but make zero changes: + python -m truenas_migrate \\ + --debug-tar debug.tgz \\ + --dest 192.168.1.50 \\ + --api-key "1-xxxxxxxxxxxx" \\ + --dry-run + + # 3. Live migration: + python -m truenas_migrate \\ + --debug-tar debug.tgz \\ + --dest 192.168.1.50 \\ + --api-key "1-xxxxxxxxxxxx" + + # 4. Migrate only SMB shares (skip NFS): + python -m truenas_migrate \\ + --debug-tar debug.tgz \\ + --dest 192.168.1.50 \\ + --api-key "1-xxxxxxxxxxxx" \\ + --migrate smb + +CONFLICT POLICY + Shares that already exist on the destination are silently skipped: + SMB – matched by share name (case-insensitive) + NFS – matched by export path (exact match) +""" +from __future__ import annotations + +import argparse +import asyncio +import getpass +import logging +import sys +from pathlib import Path +from typing import Optional + +from .archive import parse_archive, list_archive_and_exit +from .client import TrueNASClient, check_dataset_paths, create_missing_datasets +from .colors import log, _bold, _bold_cyan, _bold_yellow, _cyan, _dim, _green, _yellow +from .migrate import migrate_smb_shares, migrate_nfs_shares +from .summary import Summary + + +# ───────────────────────────────────────────────────────────────────────────── +# CLI orchestration +# ───────────────────────────────────────────────────────────────────────────── + +async def run( + args: argparse.Namespace, + archive: Optional[dict] = None, +) -> Summary: + if archive is None: + archive = parse_archive(args.debug_tar) + migrate_set = set(args.migrate) + + if args.dry_run: + msg = " DRY RUN – no changes will be made on the destination " + bar = _bold_yellow("─" * len(msg)) + print(f"\n{_bold_yellow('┌')}{bar}{_bold_yellow('┐')}", file=sys.stderr) + print(f"{_bold_yellow('│')}{_bold_yellow(msg)}{_bold_yellow('│')}", file=sys.stderr) + print(f"{_bold_yellow('└')}{bar}{_bold_yellow('┘')}\n", file=sys.stderr) + + summary = Summary() + + async with TrueNASClient( + host=args.dest, + port=args.port, + api_key=args.api_key, + verify_ssl=args.verify_ssl, + ) as client: + + if "smb" in migrate_set: + await migrate_smb_shares( + client, archive["smb_shares"], args.dry_run, summary) + + if "nfs" in migrate_set: + await migrate_nfs_shares( + client, archive["nfs_shares"], args.dry_run, summary) + + if args.dry_run and summary.paths_to_create: + summary.missing_datasets = await check_dataset_paths( + client, summary.paths_to_create, + ) + + return summary + + +# ───────────────────────────────────────────────────────────────────────────── +# Interactive wizard helpers +# ───────────────────────────────────────────────────────────────────────────── + +def _find_debug_archives(directory: str = ".") -> list[Path]: + """Return sorted list of TrueNAS debug archives found in *directory*.""" + patterns = ("*.tgz", "*.tar.gz", "*.tar", "*.txz", "*.tar.xz") + found: set[Path] = set() + for pat in patterns: + found.update(Path(directory).glob(pat)) + return sorted(found) + + +def _prompt(label: str, default: str = "") -> str: + suffix = f" [{default}]" if default else "" + try: + val = input(f"{label}{suffix}: ").strip() + return val if val else default + except (EOFError, KeyboardInterrupt): + print() + sys.exit(0) + + +def _confirm(label: str) -> bool: + try: + return input(f"{label} [y/N]: ").strip().lower() in ("y", "yes") + except (EOFError, KeyboardInterrupt): + print() + return False + + +def _select_shares(shares: list[dict], share_type: str) -> list[dict]: + """ + Display a numbered list of *shares* and return only those the user selects. + Enter (or 'all') returns all shares unchanged. 'n' / 'none' returns []. + """ + if not shares: + return shares + + print(f"\n {_bold(f'{share_type} shares in archive ({len(shares)}):')} \n") + for i, share in enumerate(shares, 1): + if share_type == "SMB": + name = share.get("name", "") + path = share.get("path", "") + print(f" {_cyan(str(i) + '.')} {name:<22} {_dim(path)}") + else: # NFS + pl = share.get("paths") or [] + path = share.get("path") or (pl[0] if pl else "") + extra = f" {_dim('+ ' + str(len(pl) - 1) + ' more')}" if len(pl) > 1 else "" + print(f" {_cyan(str(i) + '.')} {path}{extra}") + + print() + raw = _prompt( + f" Select {share_type} shares to migrate " + "(e.g. '1 3', Enter = all, 'n' = none)", + default="all", + ) + + low = raw.strip().lower() + if low in ("", "all"): + print(f" {_green('✓')} All {len(shares)} {share_type} share(s) selected.") + return shares + if low in ("n", "none", "0"): + print(f" {_yellow('–')} No {share_type} shares selected.") + return [] + + seen: set[int] = set() + selected: list[dict] = [] + for tok in raw.split(): + if tok.isdigit(): + idx = int(tok) - 1 + if 0 <= idx < len(shares) and idx not in seen: + seen.add(idx) + selected.append(shares[idx]) + + if selected: + print(f" {_green('✓')} {len(selected)} of {len(shares)} {share_type} share(s) selected.") + else: + print(f" {_yellow('–')} No valid selections; skipping {share_type} shares.") + return selected + + +# ───────────────────────────────────────────────────────────────────────────── +# Interactive wizard +# ───────────────────────────────────────────────────────────────────────────── + +def interactive_mode() -> None: + """Interactive wizard: pick archive → configure → dry run → confirm → apply.""" + print( + f"\n{_bold_cyan(' TrueNAS Share Migration Tool')}\n" + f" {_dim('Migrate SMB/NFS shares from a debug archive to a live system.')}\n" + ) + + # 1 ── Locate debug archive ──────────────────────────────────────────────── + archives = _find_debug_archives() + if not archives: + sys.exit( + "No debug archives (.tgz / .tar.gz / .tar / .txz) found in the " + "current directory.\n" + "Copy your TrueNAS debug file here, or use --debug-tar to specify a path." + ) + + if len(archives) == 1: + chosen = archives[0] + print(f" {_dim('Archive:')} {_bold(chosen.name)} " + f"{_dim('(' + f'{chosen.stat().st_size / 1_048_576:.1f} MB' + ')')}\n") + else: + print(f" {_bold('Debug archives found:')}\n") + for i, p in enumerate(archives, 1): + print(f" {_cyan(str(i) + '.')} {p.name} " + f"{_dim('(' + f'{p.stat().st_size / 1_048_576:.1f} MB' + ')')}") + print() + while True: + raw = _prompt(f"Select archive [1-{len(archives)}]") + if raw.isdigit() and 1 <= int(raw) <= len(archives): + chosen = archives[int(raw) - 1] + break + print(f" Enter a number from 1 to {len(archives)}.") + + # 2 ── Destination ───────────────────────────────────────────────────────── + print() + host = "" + while not host: + host = _prompt("Destination TrueNAS host or IP") + if not host: + print(" Host is required.") + + port_raw = _prompt("WebSocket port", default="443") + port = int(port_raw) if port_raw.isdigit() else 443 + + # 3 ── API key ───────────────────────────────────────────────────────────── + api_key = "" + while not api_key: + try: + api_key = getpass.getpass("API key (input hidden): ").strip() + except (EOFError, KeyboardInterrupt): + print() + sys.exit(0) + if not api_key: + print(" API key is required.") + + # 4 ── Migration scope ───────────────────────────────────────────────────── + print(f"\n {_bold('What to migrate?')}") + print(f" {_cyan('1.')} SMB shares") + print(f" {_cyan('2.')} NFS shares") + sel_raw = _prompt( + "Selection (space-separated numbers, Enter for all)", default="1 2" + ) + _sel_map = {"1": "smb", "2": "nfs"} + migrate: list[str] = [] + for tok in sel_raw.split(): + if tok in _sel_map and _sel_map[tok] not in migrate: + migrate.append(_sel_map[tok]) + if not migrate: + migrate = ["smb", "nfs"] + + # 5 ── Parse archive once (reused for dry + live runs) ──────────────────── + print() + archive_data = parse_archive(str(chosen)) + + # 5b ── Select individual shares ─────────────────────────────────────────── + if "smb" in migrate and archive_data["smb_shares"]: + archive_data["smb_shares"] = _select_shares(archive_data["smb_shares"], "SMB") + if "nfs" in migrate and archive_data["nfs_shares"]: + archive_data["nfs_shares"] = _select_shares(archive_data["nfs_shares"], "NFS") + print() + + base_ns = dict( + debug_tar=str(chosen), + dest=host, + port=port, + api_key=api_key, + verify_ssl=False, + migrate=migrate, + ) + + # 6 ── Dry run ───────────────────────────────────────────────────────────── + dry_summary = asyncio.run( + run(argparse.Namespace(**base_ns, dry_run=True), archive_data) + ) + print(dry_summary.report()) + + # Offer to create missing datasets before the live run + if dry_summary.missing_datasets: + non_mnt = [p for p in dry_summary.missing_datasets if not p.startswith("/mnt/")] + creatable = [p for p in dry_summary.missing_datasets if p.startswith("/mnt/")] + + if non_mnt: + print(f" NOTE: {len(non_mnt)} path(s) cannot be auto-created " + "(not under /mnt/):") + for p in non_mnt: + print(f" • {p}") + print() + + if creatable: + print(f" {len(creatable)} dataset(s) can be created automatically:") + for p in creatable: + print(f" • {p}") + print() + if _confirm(f"Create these {len(creatable)} dataset(s) on {host} now?"): + asyncio.run(create_missing_datasets( + host=host, + port=port, + api_key=api_key, + paths=creatable, + )) + print() + + if not _confirm(f"Apply these changes to {host}?"): + print("Aborted – no changes made.") + sys.exit(0) + + # 7 ── Live run ──────────────────────────────────────────────────────────── + print() + live_summary = asyncio.run( + run(argparse.Namespace(**base_ns, dry_run=False), archive_data) + ) + print(live_summary.report()) + if live_summary.errors: + sys.exit(2) + + +# ───────────────────────────────────────────────────────────────────────────── +# Argument parser + entry point +# ───────────────────────────────────────────────────────────────────────────── + +def main() -> None: + if len(sys.argv) == 1: + interactive_mode() + return + + p = argparse.ArgumentParser( + prog="truenas_migrate", + description=( + "Migrate SMB and NFS shares from a TrueNAS debug archive " + "to a live destination system." + ), + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=__doc__, + ) + + # ── Source ──────────────────────────────────────────────────────────────── + p.add_argument( + "--debug-tar", required=True, metavar="FILE", + help="Path to the TrueNAS debug .tar / .tgz from the SOURCE system.", + ) + p.add_argument( + "--list-archive", action="store_true", + help=( + "List all JSON files found in the archive and exit. " + "Run this first to verify the archive contains share data." + ), + ) + + # ── Destination ─────────────────────────────────────────────────────────── + p.add_argument( + "--dest", metavar="HOST", + help="Hostname or IP of the DESTINATION TrueNAS system.", + ) + p.add_argument( + "--port", type=int, default=443, metavar="PORT", + help="WebSocket port on the destination (default: 443).", + ) + p.add_argument( + "--verify-ssl", action="store_true", + help=( + "Verify the destination TLS certificate. " + "Off by default because most TrueNAS systems use self-signed certs." + ), + ) + + # ── Authentication ──────────────────────────────────────────────────────── + p.add_argument( + "--api-key", metavar="KEY", + help=( + "TrueNAS API key. Generate one in TrueNAS UI: " + "top-right account menu → API Keys." + ), + ) + + # ── Scope ───────────────────────────────────────────────────────────────── + p.add_argument( + "--migrate", + nargs="+", + choices=["smb", "nfs"], + default=["smb", "nfs"], + metavar="TYPE", + help=( + "What to migrate. Choices: smb nfs " + "(default: both). Example: --migrate smb" + ), + ) + p.add_argument( + "--dry-run", action="store_true", + help="Parse archive and connect to destination, but make no changes.", + ) + p.add_argument( + "--verbose", "-v", action="store_true", + help="Enable DEBUG-level logging.", + ) + + args = p.parse_args() + + if args.verbose: + log.setLevel(logging.DEBUG) + + if not Path(args.debug_tar).is_file(): + p.error(f"Archive not found: {args.debug_tar}") + + if args.list_archive: + list_archive_and_exit(args.debug_tar) # does not return + + if not args.dest: + p.error("--dest is required (or use --list-archive to inspect the archive).") + if not args.api_key: + p.error("--api-key is required.") + + summary = asyncio.run(run(args)) + print(summary.report()) + if summary.errors: + sys.exit(2) diff --git a/truenas_migrate/client.py b/truenas_migrate/client.py new file mode 100644 index 0000000..6992321 --- /dev/null +++ b/truenas_migrate/client.py @@ -0,0 +1,308 @@ +"""TrueNAS WebSocket client and dataset utilities.""" +from __future__ import annotations + +import asyncio +import base64 +import contextlib +import hashlib +import json +import os +import ssl +import struct +from typing import Any, Optional + +from .colors import log + + +# ───────────────────────────────────────────────────────────────────────────── +# Raw WebSocket implementation (stdlib only, RFC 6455) +# ───────────────────────────────────────────────────────────────────────────── + +def _ws_mask(data: bytes, mask: bytes) -> bytes: + """XOR *data* with a 4-byte repeating mask key.""" + out = bytearray(data) + for i in range(len(out)): + out[i] ^= mask[i & 3] + return bytes(out) + + +def _ws_encode_frame(payload: bytes, opcode: int = 0x1) -> bytes: + """Encode a masked client→server WebSocket frame.""" + mask = os.urandom(4) + length = len(payload) + header = bytearray([0x80 | opcode]) + if length < 126: + header.append(0x80 | length) + elif length < 65536: + header.append(0x80 | 126) + header += struct.pack("!H", length) + else: + header.append(0x80 | 127) + header += struct.pack("!Q", length) + return bytes(header) + mask + _ws_mask(payload, mask) + + +async def _ws_recv_message(reader: asyncio.StreamReader) -> str: + """ + Read one complete WebSocket message, reassembling continuation frames. + Skips ping/pong control frames. Raises OSError on close frame. + """ + fragments: list[bytes] = [] + while True: + hdr = await reader.readexactly(2) + fin = bool(hdr[0] & 0x80) + opcode = hdr[0] & 0x0F + masked = bool(hdr[1] & 0x80) + length = hdr[1] & 0x7F + + if length == 126: + length = struct.unpack("!H", await reader.readexactly(2))[0] + elif length == 127: + length = struct.unpack("!Q", await reader.readexactly(8))[0] + + mask_key = await reader.readexactly(4) if masked else None + payload = await reader.readexactly(length) if length else b"" + if mask_key: + payload = _ws_mask(payload, mask_key) + + if opcode == 0x8: + raise OSError("WebSocket: server sent close frame") + if opcode in (0x9, 0xA): + continue + + fragments.append(payload) + if fin: + return b"".join(fragments).decode("utf-8") + + +class _WebSocket: + """asyncio StreamReader/Writer wrapped to a simple send/recv/close API.""" + + def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None: + self._reader = reader + self._writer = writer + + async def send(self, data: str) -> None: + self._writer.write(_ws_encode_frame(data.encode("utf-8"), opcode=0x1)) + await self._writer.drain() + + async def recv(self) -> str: + return await _ws_recv_message(self._reader) + + async def close(self) -> None: + with contextlib.suppress(Exception): + self._writer.write(_ws_encode_frame(b"", opcode=0x8)) + await self._writer.drain() + self._writer.close() + with contextlib.suppress(Exception): + await self._writer.wait_closed() + + +async def _ws_connect(host: str, port: int, path: str, ssl_ctx: ssl.SSLContext) -> _WebSocket: + """Open a TLS connection, perform the HTTP→WebSocket upgrade, return a _WebSocket.""" + reader, writer = await asyncio.open_connection(host, port, ssl=ssl_ctx) + + key = base64.b64encode(os.urandom(16)).decode() + writer.write(( + f"GET {path} HTTP/1.1\r\n" + f"Host: {host}:{port}\r\n" + f"Upgrade: websocket\r\n" + f"Connection: Upgrade\r\n" + f"Sec-WebSocket-Key: {key}\r\n" + f"Sec-WebSocket-Version: 13\r\n" + f"\r\n" + ).encode()) + await writer.drain() + + response_lines: list[bytes] = [] + while True: + line = await asyncio.wait_for(reader.readline(), timeout=20) + if not line: + raise OSError("Connection closed during WebSocket handshake") + response_lines.append(line) + if line in (b"\r\n", b"\n"): + break + + status = response_lines[0].decode("latin-1").strip() + if " 101 " not in status: + raise OSError(f"WebSocket upgrade failed: {status}") + + expected = base64.b64encode( + hashlib.sha1( + (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode() + ).digest() + ).decode().lower() + headers_text = b"".join(response_lines).decode("latin-1").lower() + if expected not in headers_text: + raise OSError("WebSocket upgrade: Sec-WebSocket-Accept mismatch") + + return _WebSocket(reader, writer) + + +# ───────────────────────────────────────────────────────────────────────────── +# TrueNAS JSON-RPC 2.0 client +# ───────────────────────────────────────────────────────────────────────────── + +class TrueNASClient: + """ + Minimal async JSON-RPC 2.0 client for the TrueNAS WebSocket API. + + TrueNAS 25.04+ endpoint: wss://:/api/current + Authentication: auth.login_with_api_key + """ + + def __init__( + self, + host: str, + api_key: str, + port: int = 443, + verify_ssl: bool = False, + ) -> None: + self._host = host + self._port = port + self._api_key = api_key + self._verify_ssl = verify_ssl + self._ws = None + self._call_id = 0 + + @property + def _url(self) -> str: + return f"wss://{self._host}:{self._port}/api/current" + + async def __aenter__(self) -> "TrueNASClient": + await self._connect() + return self + + async def __aexit__(self, *_: Any) -> None: + if self._ws: + await self._ws.close() + self._ws = None + + async def _connect(self) -> None: + ctx = ssl.create_default_context() + if not self._verify_ssl: + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + + log.info("Connecting to %s …", self._url) + try: + self._ws = await _ws_connect( + host=self._host, + port=self._port, + path="/api/current", + ssl_ctx=ctx, + ) + except (OSError, asyncio.TimeoutError) as exc: + log.error("Connection failed: %s", exc) + raise + + log.info("Authenticating with API key …") + result = await self.call("auth.login_with_api_key", [self._api_key]) + if result is not True and result != "SUCCESS": + raise PermissionError(f"Authentication rejected: {result!r}") + log.info("Connected and authenticated.") + + async def call(self, method: str, params: Optional[list] = None) -> Any: + """Send one JSON-RPC request and return its result. + Raises RuntimeError if the API returns an error. + """ + self._call_id += 1 + req_id = self._call_id + + await self._ws.send(json.dumps({ + "jsonrpc": "2.0", + "id": req_id, + "method": method, + "params": params or [], + })) + + while True: + raw = await asyncio.wait_for(self._ws.recv(), timeout=60) + msg = json.loads(raw) + if "id" not in msg: + continue + if msg["id"] != req_id: + continue + if "error" in msg: + err = msg["error"] + reason = ( + err.get("data", {}).get("reason") + or err.get("message") + or repr(err) + ) + raise RuntimeError(f"API error [{method}]: {reason}") + return msg.get("result") + + +# ───────────────────────────────────────────────────────────────────────────── +# Dataset utilities +# ───────────────────────────────────────────────────────────────────────────── + +async def check_dataset_paths( + client: TrueNASClient, + paths: list[str], +) -> list[str]: + """ + Return the subset of *paths* that have no matching ZFS dataset on the + destination. Returns an empty list when the dataset query itself fails. + """ + if not paths: + return [] + + unique = sorted({p.rstrip("/") for p in paths if p}) + log.info("Checking %d share path(s) against destination datasets …", len(unique)) + try: + datasets = await client.call("pool.dataset.query") or [] + except RuntimeError as exc: + log.warning("Could not query datasets (skipping check): %s", exc) + return [] + + mountpoints = { + d.get("mountpoint", "").rstrip("/") + for d in datasets + if d.get("mountpoint") + } + + missing = [p for p in unique if p not in mountpoints] + if missing: + for p in missing: + log.warning(" MISSING dataset for path: %s", p) + else: + log.info(" All share paths exist as datasets.") + return missing + + +async def create_dataset(client: TrueNASClient, path: str) -> bool: + """ + Create a ZFS dataset whose mountpoint will be *path*. + *path* must be an absolute /mnt/… path. + Returns True on success, False on failure. + """ + if not path.startswith("/mnt/"): + log.error("Cannot auto-create dataset for non-/mnt/ path: %s", path) + return False + + name = path[5:].rstrip("/") + log.info("Creating dataset %r …", name) + try: + await client.call("pool.dataset.create", [{"name": name}]) + log.info(" Created: %s", name) + return True + except RuntimeError as exc: + log.error(" Failed to create dataset %r: %s", name, exc) + return False + + +async def create_missing_datasets( + host: str, + port: int, + api_key: str, + paths: list[str], + verify_ssl: bool = False, +) -> None: + """Open a fresh connection and create ZFS datasets for *paths*.""" + async with TrueNASClient( + host=host, port=port, api_key=api_key, verify_ssl=verify_ssl, + ) as client: + for path in paths: + await create_dataset(client, path) diff --git a/truenas_migrate/colors.py b/truenas_migrate/colors.py new file mode 100644 index 0000000..01c02f4 --- /dev/null +++ b/truenas_migrate/colors.py @@ -0,0 +1,55 @@ +"""ANSI color helpers and shared logger.""" +from __future__ import annotations + +import logging +import re as _re +import sys + +_USE_COLOR = sys.stderr.isatty() + + +def _c(code: str, text: str) -> str: + return f"\033[{code}m{text}\033[0m" if _USE_COLOR else text + +def _dim(t: str) -> str: return _c("2", t) +def _bold(t: str) -> str: return _c("1", t) +def _red(t: str) -> str: return _c("31", t) +def _green(t: str) -> str: return _c("32", t) +def _yellow(t: str) -> str: return _c("33", t) +def _cyan(t: str) -> str: return _c("36", t) +def _bold_red(t: str) -> str: return _c("1;31", t) +def _bold_green(t: str) -> str: return _c("1;32", t) +def _bold_yellow(t: str) -> str: return _c("1;33", t) +def _bold_cyan(t: str) -> str: return _c("1;36", t) + + +def _vis_len(s: str) -> int: + """Visible character width of a string, ignoring ANSI escape sequences.""" + return len(_re.sub(r'\033\[[0-9;]*m', '', s)) + + +class _ColorFormatter(logging.Formatter): + _STYLES = { + logging.DEBUG: "2", + logging.INFO: "36", + logging.WARNING: "1;33", + logging.ERROR: "1;31", + logging.CRITICAL: "1;31", + } + + def format(self, record: logging.LogRecord) -> str: + ts = self.formatTime(record, self.datefmt) + msg = record.getMessage() + if _USE_COLOR: + code = self._STYLES.get(record.levelno, "0") + level = f"\033[{code}m{record.levelname:<8}\033[0m" + ts = f"\033[2m{ts}\033[0m" + else: + level = f"{record.levelname:<8}" + return f"{ts} {level} {msg}" + + +_handler = logging.StreamHandler() +_handler.setFormatter(_ColorFormatter(datefmt="%H:%M:%S")) +logging.basicConfig(level=logging.INFO, handlers=[_handler]) +log = logging.getLogger("truenas_migrate") diff --git a/truenas_migrate/migrate.py b/truenas_migrate/migrate.py new file mode 100644 index 0000000..9a7fbcc --- /dev/null +++ b/truenas_migrate/migrate.py @@ -0,0 +1,154 @@ +"""Migration routines for SMB and NFS shares.""" +from __future__ import annotations + +import json +from typing import Any + +from .colors import log, _bold, _bold_cyan, _bold_green, _bold_red, _cyan, _yellow +from .client import TrueNASClient +from .summary import Summary + + +# ───────────────────────────────────────────────────────────────────────────── +# Payload builders +# ───────────────────────────────────────────────────────────────────────────── + +# Read-only / server-generated fields that must NOT be sent on create/update +_SMB_SHARE_READONLY = frozenset({"id", "locked"}) + +# CORE SMB share fields that do not exist in the SCALE API +_SMB_SHARE_CORE_EXTRAS = frozenset({ + "vuid", # server-generated Time Machine UUID; SCALE sets this automatically +}) + +# CORE NFS share fields that do not exist in the SCALE API +_NFS_SHARE_CORE_EXTRAS = frozenset({ + "paths", # CORE uses a list; SCALE uses a single "path" string (converted below) + "alldirs", # removed in SCALE + "quiet", # removed in SCALE +}) + + +def _smb_share_payload(share: dict) -> dict: + exclude = _SMB_SHARE_READONLY | _SMB_SHARE_CORE_EXTRAS + return {k: v for k, v in share.items() if k not in exclude} + + +def _nfs_share_payload(share: dict) -> dict: + payload = {k: v for k, v in share.items() + if k not in {"id", "locked"} | _NFS_SHARE_CORE_EXTRAS} + # CORE stores export paths as a list under "paths"; SCALE expects a single "path" string. + if "path" not in payload and share.get("paths"): + payload["path"] = share["paths"][0] + return payload + + +# ───────────────────────────────────────────────────────────────────────────── +# Migration routines +# ───────────────────────────────────────────────────────────────────────────── + +async def migrate_smb_shares( + client: TrueNASClient, + shares: list[dict], + dry_run: bool, + summary: Summary, +) -> None: + summary.smb_found = len(shares) + if not shares: + log.info("No SMB shares found in archive.") + return + + log.info("Querying existing SMB shares on destination …") + try: + existing = await client.call("sharing.smb.query") or [] + except RuntimeError as exc: + msg = f"Could not query SMB shares: {exc}" + log.error(msg) + summary.errors.append(msg) + return + + existing_names = {s.get("name", "").lower() for s in existing} + log.info(" Destination has %d existing SMB share(s).", len(existing_names)) + + for share in shares: + name = share.get("name", "") + log.info("%s SMB share %s", _bold("──"), _bold_cyan(repr(name))) + + if name.lower() in existing_names: + log.info(" %s – already exists on destination.", _yellow("SKIP")) + summary.smb_skipped += 1 + continue + + payload = _smb_share_payload(share) + log.debug(" payload: %s", json.dumps(payload)) + + if dry_run: + log.info(" %s would create %s → %s", + _cyan("[DRY RUN]"), _bold_cyan(repr(name)), payload.get("path")) + summary.smb_created += 1 + if payload.get("path"): + summary.paths_to_create.append(payload["path"]) + continue + + try: + r = await client.call("sharing.smb.create", [payload]) + log.info(" %s id=%s", _bold_green("CREATED"), r.get("id")) + summary.smb_created += 1 + except RuntimeError as exc: + log.error(" %s: %s", _bold_red("FAILED"), exc) + summary.smb_failed += 1 + summary.errors.append(f"SMB share {name!r}: {exc}") + + +async def migrate_nfs_shares( + client: TrueNASClient, + shares: list[dict], + dry_run: bool, + summary: Summary, +) -> None: + summary.nfs_found = len(shares) + if not shares: + log.info("No NFS shares found in archive.") + return + + log.info("Querying existing NFS shares on destination …") + try: + existing = await client.call("sharing.nfs.query") or [] + except RuntimeError as exc: + msg = f"Could not query NFS shares: {exc}" + log.error(msg) + summary.errors.append(msg) + return + + existing_paths = {s.get("path", "").rstrip("/") for s in existing} + log.info(" Destination has %d existing NFS share(s).", len(existing_paths)) + + for share in shares: + core_paths = share.get("paths") or [] + path = (share.get("path") or (core_paths[0] if core_paths else "")).rstrip("/") + all_paths = [p.rstrip("/") for p in (core_paths if core_paths else ([path] if path else []))] + log.info("%s NFS export %s", _bold("──"), _bold_cyan(repr(path))) + + if path in existing_paths: + log.info(" %s – path already exported on destination.", _yellow("SKIP")) + summary.nfs_skipped += 1 + continue + + payload = _nfs_share_payload(share) + log.debug(" payload: %s", json.dumps(payload)) + + if dry_run: + log.info(" %s would create NFS export for %s", + _cyan("[DRY RUN]"), _bold_cyan(repr(path))) + summary.nfs_created += 1 + summary.paths_to_create.extend(all_paths) + continue + + try: + r = await client.call("sharing.nfs.create", [payload]) + log.info(" %s id=%s", _bold_green("CREATED"), r.get("id")) + summary.nfs_created += 1 + except RuntimeError as exc: + log.error(" %s: %s", _bold_red("FAILED"), exc) + summary.nfs_failed += 1 + summary.errors.append(f"NFS share {path!r}: {exc}") diff --git a/truenas_migrate/summary.py b/truenas_migrate/summary.py new file mode 100644 index 0000000..5a30185 --- /dev/null +++ b/truenas_migrate/summary.py @@ -0,0 +1,93 @@ +"""Migration summary dataclass and report renderer.""" +from __future__ import annotations + +from dataclasses import dataclass, field + +from .colors import ( + _dim, _bold, _red, _yellow, _cyan, + _bold_red, _bold_green, _bold_yellow, _vis_len, +) + + +@dataclass +class Summary: + smb_found: int = 0 + smb_created: int = 0 + smb_skipped: int = 0 + smb_failed: int = 0 + + nfs_found: int = 0 + nfs_created: int = 0 + nfs_skipped: int = 0 + nfs_failed: int = 0 + + errors: list[str] = field(default_factory=list) + + # Populated during dry-run dataset safety checks + paths_to_create: list[str] = field(default_factory=list) + missing_datasets: list[str] = field(default_factory=list) + + def report(self) -> str: + w = 60 + + def _stat(label: str, n: int, color_fn) -> str: + s = f"{label}={n}" + return color_fn(s) if n > 0 else _dim(s) + + smb_val = ( + f"{_dim('found=' + str(self.smb_found))} " + f"{_stat('created', self.smb_created, _bold_green)} " + f"{_stat('skipped', self.smb_skipped, _yellow)} " + f"{_stat('failed', self.smb_failed, _bold_red)}" + ) + nfs_val = ( + f"{_dim('found=' + str(self.nfs_found))} " + f"{_stat('created', self.nfs_created, _bold_green)} " + f"{_stat('skipped', self.nfs_skipped, _yellow)} " + f"{_stat('failed', self.nfs_failed, _bold_red)}" + ) + + hr = _cyan("─" * w) + tl = _cyan("┌"); tr = _cyan("┐") + ml = _cyan("├"); mr = _cyan("┤") + bl = _cyan("└"); br = _cyan("┘") + side = _cyan("│") + + title_text = "MIGRATION SUMMARY" + lpad = (w - len(title_text)) // 2 + rpad = w - len(title_text) - lpad + title_row = f"{side}{' ' * lpad}{_bold(title_text)}{' ' * rpad}{side}" + + def row(label: str, val: str) -> str: + right = max(0, w - 2 - len(label) - _vis_len(val)) + return f"{side} {_dim(label)}{val}{' ' * right} {side}" + + lines = [ + "", + f"{tl}{hr}{tr}", + title_row, + f"{ml}{hr}{mr}", + row("SMB shares : ", smb_val), + row("NFS shares : ", nfs_val), + f"{bl}{hr}{br}", + ] + + if self.errors: + lines.append(f"\n {_bold_red(str(len(self.errors)) + ' error(s):')} ") + for e in self.errors: + lines.append(f" {_red('•')} {e}") + + if self.missing_datasets: + lines.append( + f"\n {_bold_yellow('WARNING:')} " + f"{len(self.missing_datasets)} share path(s) have no " + "matching dataset on the destination:" + ) + for p in self.missing_datasets: + lines.append(f" {_yellow('•')} {p}") + lines.append( + " These paths must exist before shares can be created.\n" + " Use interactive mode or answer 'y' at the dataset prompt to create them." + ) + lines.append("") + return "\n".join(lines)