Remove SMB global config migration

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-04 21:13:13 -05:00
parent 8353b50924
commit e6e9d4a000

View File

@@ -2,10 +2,9 @@
"""
truenas_migrate.py TrueNAS Share Migration Tool
=====================================================
Reads SMB shares, NFS shares, and SMB global config from a TrueNAS debug
archive (.tar / .tgz) produced by the built-in "Save Debug" feature, then
re-creates them on a destination TrueNAS system via the JSON-RPC 2.0
WebSocket API (TrueNAS 25.04+).
Reads SMB shares and NFS shares from a TrueNAS debug archive (.tar / .tgz)
produced by the built-in "Save Debug" feature, then re-creates them on a
destination TrueNAS system via the JSON-RPC 2.0 WebSocket API (TrueNAS 25.04+).
SAFE BY DEFAULT
• Existing shares are never overwritten or deleted.
@@ -25,13 +24,13 @@ QUICK START
--api-key "1-xxxxxxxxxxxx" \\
--dry-run
# 3. Live migration of all three data types:
# 3. Live migration:
python truenas_migrate.py \\
--debug-tar debug.tgz \\
--dest 192.168.1.50 \\
--api-key "1-xxxxxxxxxxxx"
# 4. Migrate only SMB shares (skip NFS and global config):
# 4. Migrate only SMB shares (skip NFS):
python truenas_migrate.py \\
--debug-tar debug.tgz \\
--dest 192.168.1.50 \\
@@ -42,7 +41,6 @@ CONFLICT POLICY
Shares that already exist on the destination are silently skipped:
SMB matched by share name (case-insensitive)
NFS matched by export path (exact match)
SMB global config is always applied unless --migrate excludes "smb-config".
"""
from __future__ import annotations
@@ -137,7 +135,6 @@ class Summary:
nfs_skipped: int = 0
nfs_failed: int = 0
cfg_applied: bool = False
errors: list[str] = field(default_factory=list)
# Populated during dry-run dataset safety checks
@@ -163,7 +160,6 @@ class Summary:
f"{_stat('skipped', self.nfs_skipped, _yellow)} "
f"{_stat('failed', self.nfs_failed, _bold_red)}"
)
cfg_val = _bold_green("applied") if self.cfg_applied else _dim("not applied")
hr = _cyan("" * w)
tl = _cyan(""); tr = _cyan("")
@@ -187,7 +183,6 @@ class Summary:
f"{ml}{hr}{mr}",
row("SMB shares : ", smb_val),
row("NFS shares : ", nfs_val),
row("SMB config : ", cfg_val),
f"{bl}{hr}{br}",
]
@@ -252,27 +247,18 @@ _CANDIDATES: dict[str, list[str]] = {
"ixdiagnose/plugins/Sharing/sharing.nfs.query.json",
"ixdiagnose/NFS/sharing.nfs.query.json",
],
"smb_config": [
# SCALE 24.04+ combined plugin file; config is under "smb_config"
"ixdiagnose/plugins/smb/smb_info.json",
# Older SCALE uppercase plugin dirs
"ixdiagnose/plugins/SMB/smb.config.json",
"ixdiagnose/SMB/smb.config.json",
],
}
# When a candidate file bundles multiple datasets, pull out the right sub-key.
_KEY_WITHIN_FILE: dict[str, str] = {
"smb_shares": "sharing_smb_query",
"nfs_shares": "sharing_nfs_query",
"smb_config": "smb_config",
}
# Keyword fragments for heuristic fallback scan (SCALE archives only)
_KEYWORDS: dict[str, list[str]] = {
"smb_shares": ["sharing.smb", "smb_share", "sharing/smb", "smb_info"],
"nfs_shares": ["sharing.nfs", "nfs_share", "sharing/nfs", "nfs_config"],
"smb_config": ["smb.config", "smb_config", "smb_info"],
}
# Presence of this path prefix identifies a TrueNAS CORE archive (fndebug /
@@ -280,16 +266,6 @@ _KEYWORDS: dict[str, list[str]] = {
# dump embeds JSON blocks that we can extract.
_CORE_MARKER = "ixdiagnose/fndebug"
# CORE SMB config fields that do not exist in the SCALE API and must be
# stripped before calling smb.update on the destination.
_SMB_CONFIG_CORE_EXTRAS = frozenset({
"cifs_SID", # renamed to server_sid in SCALE (already stripped)
"loglevel", # removed in SCALE
"netbiosname_b", # HA node-B hostname; not applicable in SCALE
"netbiosname_local",# HA active-node field; not applicable in SCALE
"next_rid", # internal RID counter; not settable via API
})
def _members_map(tf: tarfile.TarFile) -> dict[str, tarfile.TarInfo]:
"""Return {normalised_path: TarInfo} for every member."""
@@ -433,13 +409,10 @@ def _parse_core_into(
fh = tf.extractfile(members[smb_key])
dump = fh.read().decode("utf-8", errors="replace") # type: ignore[union-attr]
vals = _extract_core_dump_json(dump, "Database Dump")
if vals and isinstance(vals[0], dict):
result["smb_config"] = vals[0]
log.info(" smb_config → %s (CORE)", smb_key)
if len(vals) >= 2 and isinstance(vals[1], list):
result["smb_shares"] = vals[1]
log.info(" smb_shares → %s (CORE, %d share(s))", smb_key, len(vals[1]))
elif result["smb_config"] is not None:
elif vals:
log.warning(" smb_shares → NOT FOUND in Database Dump")
else:
log.warning(" SMB dump not found: %s", smb_key)
@@ -496,14 +469,13 @@ def _open_source_tar(tar_path: str):
def parse_archive(tar_path: str) -> dict[str, Any]:
"""
Extract SMB shares, NFS shares, and SMB config from the debug archive.
Returns: {"smb_shares": list, "nfs_shares": list, "smb_config": dict|None}
Extract SMB shares and NFS shares from the debug archive.
Returns: {"smb_shares": list, "nfs_shares": list}
"""
log.info("Opening archive: %s", tar_path)
result: dict[str, Any] = {
"smb_shares": [],
"nfs_shares": [],
"smb_config": None,
}
try:
@@ -519,33 +491,29 @@ def parse_archive(tar_path: str) -> dict[str, Any]:
if is_core:
_parse_core_into(tf, members, result)
else:
for key in ("smb_shares", "nfs_shares", "smb_config"):
for key in ("smb_shares", "nfs_shares"):
data = _find_data(tf, members, key)
if data is None:
log.warning(" %-12s → NOT FOUND", key)
continue
if key in ("smb_shares", "nfs_shares"):
if isinstance(data, list):
result[key] = data
elif isinstance(data, dict):
# Some versions wrap the list: {"result": [...]}
for v in data.values():
if isinstance(v, list):
result[key] = v
break
else:
result[key] = data if isinstance(data, dict) else None
if isinstance(data, list):
result[key] = data
elif isinstance(data, dict):
# Some versions wrap the list: {"result": [...]}
for v in data.values():
if isinstance(v, list):
result[key] = v
break
except (tarfile.TarError, OSError) as exc:
log.error("Failed to open archive: %s", exc)
sys.exit(1)
log.info(
"Parsed: %d SMB share(s), %d NFS share(s), SMB config=%s",
"Parsed: %d SMB share(s), %d NFS share(s)",
len(result["smb_shares"]),
len(result["nfs_shares"]),
"found" if result["smb_config"] else "not found",
)
return result
@@ -610,7 +578,6 @@ def list_archive_and_exit(tar_path: str) -> None:
# Read-only / server-generated fields that must NOT be sent on create/update
_SMB_SHARE_READONLY = frozenset({"id", "locked"})
_SMB_CONFIG_READONLY = frozenset({"id", "server_sid"})
# CORE SMB share fields that do not exist in the SCALE API
_SMB_SHARE_CORE_EXTRAS = frozenset({
@@ -639,11 +606,6 @@ def _nfs_share_payload(share: dict) -> dict:
return payload
def _smb_config_payload(config: dict) -> dict:
exclude = _SMB_CONFIG_READONLY | _SMB_CONFIG_CORE_EXTRAS
return {k: v for k, v in config.items() if k not in exclude}
# ─────────────────────────────────────────────────────────────────────────────
# Minimal WebSocket client (stdlib only, RFC 6455)
# ─────────────────────────────────────────────────────────────────────────────
@@ -1071,39 +1033,6 @@ async def migrate_nfs_shares(
summary.errors.append(f"NFS share {path!r}: {exc}")
async def migrate_smb_config(
client: TrueNASClient,
config: Optional[dict],
dry_run: bool,
summary: Summary,
) -> None:
if not config:
log.info("No SMB global config found in archive skipping.")
return
payload = _smb_config_payload(config)
log.info("%s SMB global config", _bold("──"))
log.info(
" netbiosname=%-20s workgroup=%-15s encryption=%s",
repr(payload.get("netbiosname")),
repr(payload.get("workgroup")),
repr(payload.get("encryption")),
)
if dry_run:
log.info(" %s would call smb.update", _cyan("[DRY RUN]"))
summary.cfg_applied = True
return
try:
await client.call("smb.update", [payload])
log.info(" %s", _bold_green("APPLIED"))
summary.cfg_applied = True
except RuntimeError as exc:
log.error(" %s: %s", _bold_red("FAILED"), exc)
summary.errors.append(f"SMB config: {exc}")
# ─────────────────────────────────────────────────────────────────────────────
# CLI
# ─────────────────────────────────────────────────────────────────────────────
@@ -1140,10 +1069,6 @@ async def run(
await migrate_nfs_shares(
client, archive["nfs_shares"], args.dry_run, summary)
if "smb-config" in migrate_set:
await migrate_smb_config(
client, archive["smb_config"], args.dry_run, summary)
# During dry runs, verify that every path we would create a share for
# actually exists as a ZFS dataset on the destination system.
if args.dry_run and summary.paths_to_create:
@@ -1293,17 +1218,16 @@ def interactive_mode() -> None:
print(f"\n {_bold('What to migrate?')}")
print(f" {_cyan('1.')} SMB shares")
print(f" {_cyan('2.')} NFS shares")
print(f" {_cyan('3.')} SMB global config")
sel_raw = _prompt(
"Selection (space-separated numbers, Enter for all)", default="1 2 3"
"Selection (space-separated numbers, Enter for all)", default="1 2"
)
_sel_map = {"1": "smb", "2": "nfs", "3": "smb-config"}
_sel_map = {"1": "smb", "2": "nfs"}
migrate: list[str] = []
for tok in sel_raw.split():
if tok in _sel_map and _sel_map[tok] not in migrate:
migrate.append(_sel_map[tok])
if not migrate:
migrate = ["smb", "nfs", "smb-config"]
migrate = ["smb", "nfs"]
# 5 ── Parse archive once (reused for dry + live runs) ────────────────────
print()
@@ -1429,12 +1353,12 @@ def main() -> None:
p.add_argument(
"--migrate",
nargs="+",
choices=["smb", "nfs", "smb-config"],
default=["smb", "nfs", "smb-config"],
choices=["smb", "nfs"],
default=["smb", "nfs"],
metavar="TYPE",
help=(
"What to migrate. Choices: smb nfs smb-config "
"(default: all three). Example: --migrate smb nfs"
"What to migrate. Choices: smb nfs "
"(default: both). Example: --migrate smb"
),
)
p.add_argument(