Redesign CSV templates with human-readable column headers
- Replace API field names (guestok, abe, ro, maproot_user, etc.) with plain-English headers (Guest Access, Access-Based Enumeration, Read Only, Map Root User, etc.) for customer clarity - Drop comment rows that rendered poorly in spreadsheet apps - Use two realistic example rows instead to teach by example - Update csv_source.py to map friendly header names to API field names before validation and coercion (raw API names still accepted) - Update README column reference to match new header names Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -10,7 +10,42 @@ from .colors import log
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Column type metadata
|
||||
# Column name mappings (human-readable header → API field name)
|
||||
# Both the friendly names and the raw API names are accepted.
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
_SMB_COL_MAP: dict[str, str] = {
|
||||
"share name": "name",
|
||||
"path": "path",
|
||||
"description": "comment",
|
||||
"purpose": "purpose",
|
||||
"read only": "ro",
|
||||
"browsable": "browsable",
|
||||
"guest access": "guestok",
|
||||
"access-based enumeration": "abe",
|
||||
"hosts allow": "hostsallow",
|
||||
"hosts deny": "hostsdeny",
|
||||
"time machine": "timemachine",
|
||||
"enabled": "enabled",
|
||||
}
|
||||
|
||||
_NFS_COL_MAP: dict[str, str] = {
|
||||
"path": "path",
|
||||
"description": "comment",
|
||||
"read only": "ro",
|
||||
"map root user": "maproot_user",
|
||||
"map root group": "maproot_group",
|
||||
"map all user": "mapall_user",
|
||||
"map all group": "mapall_group",
|
||||
"security": "security",
|
||||
"allowed hosts": "hosts",
|
||||
"allowed networks": "networks",
|
||||
"enabled": "enabled",
|
||||
}
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Column type metadata (keyed by API field name)
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
# Columns coerced to bool
|
||||
@@ -84,11 +119,18 @@ def _coerce_row(
|
||||
return result
|
||||
|
||||
|
||||
def _normalize_col(col: str, col_map: dict[str, str]) -> str:
|
||||
"""Map a header name to its API field name; falls back to the lowercased original."""
|
||||
key = col.strip().lower()
|
||||
return col_map.get(key, key)
|
||||
|
||||
|
||||
def _parse_csv(
|
||||
csv_path: str,
|
||||
bool_cols: frozenset[str],
|
||||
list_cols: frozenset[str],
|
||||
required: frozenset[str],
|
||||
col_map: dict[str, str],
|
||||
label: str,
|
||||
) -> list[dict]:
|
||||
path = Path(csv_path)
|
||||
@@ -104,8 +146,12 @@ def _parse_csv(
|
||||
log.error("%s CSV has no header row: %s", label, csv_path)
|
||||
sys.exit(1)
|
||||
|
||||
header = {c.strip() for c in reader.fieldnames if c is not None}
|
||||
missing_req = required - header
|
||||
# Normalise header names using the column map
|
||||
normalised_header = {
|
||||
_normalize_col(c, col_map)
|
||||
for c in reader.fieldnames if c is not None
|
||||
}
|
||||
missing_req = required - normalised_header
|
||||
if missing_req:
|
||||
log.error(
|
||||
"%s CSV is missing required column(s): %s",
|
||||
@@ -114,7 +160,10 @@ def _parse_csv(
|
||||
sys.exit(1)
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
normalised = {(k or "").strip(): v for k, v in row.items()}
|
||||
normalised = {
|
||||
_normalize_col(k, col_map): v
|
||||
for k, v in row.items() if k is not None
|
||||
}
|
||||
share = _coerce_row(normalised, bool_cols, list_cols, required, row_num)
|
||||
if share is not None:
|
||||
shares.append(share)
|
||||
@@ -133,12 +182,12 @@ def _parse_csv(
|
||||
|
||||
def parse_smb_csv(csv_path: str) -> list[dict]:
|
||||
"""Parse an SMB shares CSV. Returns share dicts compatible with migrate.py."""
|
||||
return _parse_csv(csv_path, _SMB_BOOL_COLS, _SMB_LIST_COLS, _SMB_REQUIRED, "SMB")
|
||||
return _parse_csv(csv_path, _SMB_BOOL_COLS, _SMB_LIST_COLS, _SMB_REQUIRED, _SMB_COL_MAP, "SMB")
|
||||
|
||||
|
||||
def parse_nfs_csv(csv_path: str) -> list[dict]:
|
||||
"""Parse an NFS shares CSV. Returns share dicts compatible with migrate.py."""
|
||||
return _parse_csv(csv_path, _NFS_BOOL_COLS, _NFS_LIST_COLS, _NFS_REQUIRED, "NFS")
|
||||
return _parse_csv(csv_path, _NFS_BOOL_COLS, _NFS_LIST_COLS, _NFS_REQUIRED, _NFS_COL_MAP, "NFS")
|
||||
|
||||
|
||||
def parse_csv_sources(smb_csv: str | None, nfs_csv: str | None) -> dict[str, Any]:
|
||||
|
||||
Reference in New Issue
Block a user