diff --git a/truenas_migrate.py b/truenas_migrate.py index be19690..6b0e910 100644 --- a/truenas_migrate.py +++ b/truenas_migrate.py @@ -136,57 +136,48 @@ class Summary: # Debug archive parser # ───────────────────────────────────────────────────────────────────────────── # -# TrueNAS generates debug archives with its built-in "ixdiagnose" tool (SCALE -# 24.04+) or the older "freenas-debug" tool (CORE / earlier SCALE). -# Neither has a fully stable internal layout across versions, so we try a -# ranked list of known paths and fall back to a keyword heuristic scan. +# TrueNAS SCALE generates debug archives with the "ixdiagnose" tool. +# The internal layout has changed across versions: # -# Known ixdiagnose (SCALE) layouts observed in the wild: -# ixdiagnose/plugins/SMB/sharing.smb.query.json – SMB shares -# ixdiagnose/plugins/SMB/smb.config.json – SMB global config -# ixdiagnose/plugins/NFS/sharing.nfs.query.json – NFS shares -# ixdiagnose/plugins/Sharing/sharing.smb.query.json -# ixdiagnose/plugins/Sharing/sharing.nfs.query.json +# SCALE 24.04+ (plugins layout, lowercase dirs, combined JSON files) +# ixdiagnose/plugins/smb/smb_info.json – SMB shares + config combined +# ixdiagnose/plugins/nfs/nfs_config.json – NFS shares + config combined # -# Known freenas-debug (CORE) layouts: -# freenas-debug/sharing/smb.json – SMB shares -# freenas-debug/sharing/nfs.json – NFS shares -# (CORE SMB config is plain text; JSON form may not exist) +# Older SCALE (plugins layout, uppercase dirs, per-query JSON files) +# ixdiagnose/plugins/SMB/sharing.smb.query.json +# ixdiagnose/plugins/SMB/smb.config.json +# ixdiagnose/plugins/NFS/sharing.nfs.query.json +# ixdiagnose/plugins/Sharing/sharing.smb.query.json +# ixdiagnose/plugins/Sharing/sharing.nfs.query.json +# +# TrueNAS CORE uses the "freenas-debug" tool (stored as "fndebug" inside the +# archive). It produces only plain-text dump files – there is NO JSON share +# data in CORE debug archives. The script detects CORE archives early and +# exits with a clear message rather than silently returning empty results. _CANDIDATES: dict[str, list[str]] = { "smb_shares": [ # SCALE 24.04+ – combined plugin file; shares are under "sharing_smb_query" "ixdiagnose/plugins/smb/smb_info.json", - # Older SCALE layouts + # Older SCALE – uppercase plugin dirs, per-query files "ixdiagnose/plugins/SMB/sharing.smb.query.json", "ixdiagnose/plugins/Sharing/sharing.smb.query.json", "ixdiagnose/SMB/sharing.smb.query.json", - # CORE / freenas-debug - "freenas-debug/sharing/smb.json", - "sharing/smb.json", - "middleware/sharing.smb.query.json", ], "nfs_shares": [ # SCALE 24.04+ – combined plugin file; shares are under "sharing_nfs_query" "ixdiagnose/plugins/nfs/nfs_config.json", - # Older SCALE layouts + # Older SCALE – uppercase plugin dirs, per-query files "ixdiagnose/plugins/NFS/sharing.nfs.query.json", "ixdiagnose/plugins/Sharing/sharing.nfs.query.json", "ixdiagnose/NFS/sharing.nfs.query.json", - # CORE / freenas-debug - "freenas-debug/sharing/nfs.json", - "sharing/nfs.json", - "middleware/sharing.nfs.query.json", ], "smb_config": [ # SCALE 24.04+ – combined plugin file; config is under "smb_config" "ixdiagnose/plugins/smb/smb_info.json", - # Older SCALE layouts + # Older SCALE – uppercase plugin dirs "ixdiagnose/plugins/SMB/smb.config.json", "ixdiagnose/SMB/smb.config.json", - # CORE / freenas-debug - "freenas-debug/SMB/smb_config.json", - "middleware/smb.config.json", ], } @@ -197,13 +188,28 @@ _KEY_WITHIN_FILE: dict[str, str] = { "smb_config": "smb_config", } -# Keyword fragments for heuristic fallback scan +# Keyword fragments for heuristic fallback scan (SCALE archives only) _KEYWORDS: dict[str, list[str]] = { "smb_shares": ["sharing.smb", "smb_share", "sharing/smb", "smb_info"], "nfs_shares": ["sharing.nfs", "nfs_share", "sharing/nfs", "nfs_config"], "smb_config": ["smb.config", "smb_config", "smb_info"], } +# Presence of this path prefix identifies a TrueNAS CORE archive (fndebug / +# freenas-debug). CORE stores diagnostics as plain-text dump files, but each +# dump embeds JSON blocks that we can extract. +_CORE_MARKER = "ixdiagnose/fndebug" + +# CORE SMB config fields that do not exist in the SCALE API and must be +# stripped before calling smb.update on the destination. +_SMB_CONFIG_CORE_EXTRAS = frozenset({ + "cifs_SID", # renamed to server_sid in SCALE (already stripped) + "loglevel", # removed in SCALE + "netbiosname_b", # HA node-B hostname; not applicable in SCALE + "netbiosname_local",# HA active-node field; not applicable in SCALE + "next_rid", # internal RID counter; not settable via API +}) + def _members_map(tf: tarfile.TarFile) -> dict[str, tarfile.TarInfo]: """Return {normalised_path: TarInfo} for every member.""" @@ -278,6 +284,106 @@ def _find_data( return None +def _extract_core_dump_json(dump_text: str, title_fragment: str) -> list[Any]: + """ + Extract all top-level JSON values from a named section of a CORE dump.txt. + + CORE dump sections look like: + +--------...--------+ + + SECTION TITLE + ← title line (contains the section name) + +--------...--------+ + + debug finished in N seconds for SECTION TITLE + + Returns a list of parsed JSON values found in the content block, in order. + An empty list is returned when the section is not found or contains no JSON. + """ + import re as _re + + # Split on the horizontal rule lines + parts = _re.split(r'\+[-]{20,}\+', dump_text) + + for i, part in enumerate(parts): + if title_fragment.lower() in part.lower() and i + 1 < len(parts): + content = parts[i + 1] + # Trim the "debug finished …" trailer and surrounding whitespace + content = _re.sub( + r'debug finished.*', '', content, + flags=_re.IGNORECASE | _re.DOTALL, + ).strip() + + # Greedily parse consecutive JSON values from the content + results: list[Any] = [] + decoder = json.JSONDecoder() + pos = 0 + while pos < len(content): + remaining = content[pos:].lstrip() + if not remaining or remaining[0] not in "{[": + break + pos += len(content[pos:]) - len(remaining) # account for whitespace + try: + val, end = decoder.raw_decode(remaining) + results.append(val) + pos += end + except json.JSONDecodeError: + break + return results + + return [] + + +def _parse_core_into( + tf: tarfile.TarFile, + members: dict[str, tarfile.TarInfo], + result: dict[str, Any], +) -> None: + """ + Populate *result* from TrueNAS CORE fndebug dump files. + + SMB dump (ixdiagnose/fndebug/SMB/dump.txt) + "Database Dump" section → JSON object (global config) + JSON array (shares) + + NFS dump (ixdiagnose/fndebug/NFS/dump.txt) + "Configuration" section → JSON object (global config) + JSON array (shares) + """ + log.info("TrueNAS CORE archive detected; parsing fndebug dump files.") + + smb_key = "ixdiagnose/fndebug/SMB/dump.txt" + if smb_key in members: + fh = tf.extractfile(members[smb_key]) + dump = fh.read().decode("utf-8", errors="replace") # type: ignore[union-attr] + vals = _extract_core_dump_json(dump, "Database Dump") + if vals and isinstance(vals[0], dict): + result["smb_config"] = vals[0] + log.info(" smb_config → %s (CORE)", smb_key) + if len(vals) >= 2 and isinstance(vals[1], list): + result["smb_shares"] = vals[1] + log.info(" smb_shares → %s (CORE, %d share(s))", smb_key, len(vals[1])) + elif result["smb_config"] is not None: + log.warning(" smb_shares → NOT FOUND in Database Dump") + else: + log.warning(" SMB dump not found: %s", smb_key) + + nfs_key = "ixdiagnose/fndebug/NFS/dump.txt" + if nfs_key in members: + fh = tf.extractfile(members[nfs_key]) + dump = fh.read().decode("utf-8", errors="replace") # type: ignore[union-attr] + vals = _extract_core_dump_json(dump, "Configuration") + if len(vals) >= 2 and isinstance(vals[1], list): + result["nfs_shares"] = vals[1] + log.info(" nfs_shares → %s (CORE, %d share(s))", nfs_key, len(vals[1])) + else: + log.warning(" nfs_shares → NOT FOUND in Configuration") + else: + log.warning(" NFS dump not found: %s", nfs_key) + + if not result["smb_shares"] and not result["nfs_shares"]: + log.warning( + "No share data found in CORE archive. " + "This is expected when SMB/NFS services were disabled on the source system." + ) + + @contextlib.contextmanager def _open_source_tar(tar_path: str): """ @@ -325,23 +431,31 @@ def parse_archive(tar_path: str) -> dict[str, Any]: members = _members_map(tf) log.info(" Archive contains %d total entries.", len(members)) - for key in ("smb_shares", "nfs_shares", "smb_config"): - data = _find_data(tf, members, key) - if data is None: - log.warning(" %-12s → NOT FOUND", key) - continue + is_core = any( + p == _CORE_MARKER or p.startswith(_CORE_MARKER + "/") + for p in members + ) - if key in ("smb_shares", "nfs_shares"): - if isinstance(data, list): - result[key] = data - elif isinstance(data, dict): - # Some versions wrap the list: {"result": [...]} - for v in data.values(): - if isinstance(v, list): - result[key] = v - break - else: - result[key] = data if isinstance(data, dict) else None + if is_core: + _parse_core_into(tf, members, result) + else: + for key in ("smb_shares", "nfs_shares", "smb_config"): + data = _find_data(tf, members, key) + if data is None: + log.warning(" %-12s → NOT FOUND", key) + continue + + if key in ("smb_shares", "nfs_shares"): + if isinstance(data, list): + result[key] = data + elif isinstance(data, dict): + # Some versions wrap the list: {"result": [...]} + for v in data.values(): + if isinstance(v, list): + result[key] = v + break + else: + result[key] = data if isinstance(data, dict) else None except (tarfile.TarError, OSError) as exc: log.error("Failed to open archive: %s", exc) @@ -358,28 +472,52 @@ def parse_archive(tar_path: str) -> dict[str, Any]: def list_archive_and_exit(tar_path: str) -> None: """ - Print a structured listing of all JSON files in the archive, then exit. - Helps confirm the archive actually contains the data we need. + Print a structured listing of the archive contents, then exit. + For SCALE archives: lists all .json plugin files. + For CORE archives: lists the fndebug dump files and the JSON sections + that contain share / config data. """ - print(f"\nJSON files in archive: {tar_path}\n") try: with _open_source_tar(tar_path) as tf: - json_members = sorted( - (m for m in tf.getmembers() if m.name.endswith(".json")), - key=lambda m: m.name, + members_map = _members_map(tf) + is_core = any( + p == _CORE_MARKER or p.startswith(_CORE_MARKER + "/") + for p in members_map ) - if not json_members: - print(" (no .json files found)") + + if is_core: + print(f"\nTrueNAS CORE archive: {tar_path}\n") + print(" fndebug plain-text dump files (JSON is embedded inside):\n") + dump_files = sorted( + p for p in members_map + if p.startswith(_CORE_MARKER + "/") and p.endswith(".txt") + ) + for p in dump_files: + size = members_map[p].size / 1024 + print(f" {p} ({size:.1f} KB)") + print() + print(" Data this tool will extract:") + print(" SMB config + shares → fndebug/SMB/dump.txt " + "(\"Database Dump\" section)") + print(" NFS shares → fndebug/NFS/dump.txt " + "(\"Configuration\" section)") else: - # Group by top-level directory for readability - current_dir = "" - for m in json_members: - parts = m.name.lstrip("./").split("/") - top = "/".join(parts[:-1]) if len(parts) > 1 else "" - if top != current_dir: - print(f"\n {top or '(root)'}/") - current_dir = top - print(f" {parts[-1]} ({m.size / 1024:.1f} KB)") + print(f"\nJSON plugin files in archive: {tar_path}\n") + json_members = sorted( + (m for m in tf.getmembers() if m.name.endswith(".json")), + key=lambda m: m.name, + ) + if not json_members: + print(" (no .json files found)") + else: + current_dir = "" + for m in json_members: + parts = m.name.lstrip("./").split("/") + top = "/".join(parts[:-1]) if len(parts) > 1 else "" + if top != current_dir: + print(f"\n {top or '(root)'}/") + current_dir = top + print(f" {parts[-1]} ({m.size / 1024:.1f} KB)") except (tarfile.TarError, OSError) as exc: sys.exit(f"ERROR: {exc}") print() @@ -405,7 +543,8 @@ def _nfs_share_payload(share: dict) -> dict: def _smb_config_payload(config: dict) -> dict: - return {k: v for k, v in config.items() if k not in _SMB_CONFIG_READONLY} + exclude = _SMB_CONFIG_READONLY | _SMB_CONFIG_CORE_EXTRAS + return {k: v for k, v in config.items() if k not in exclude} # ─────────────────────────────────────────────────────────────────────────────