Redesign CSV templates with human-readable column headers
- Replace API field names (guestok, abe, ro, maproot_user, etc.) with plain-English headers (Guest Access, Access-Based Enumeration, Read Only, Map Root User, etc.) for customer clarity - Drop comment rows that rendered poorly in spreadsheet apps - Use two realistic example rows instead to teach by example - Update csv_source.py to map friendly header names to API field names before validation and coercion (raw API names still accepted) - Update README column reference to match new header names Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
10
README.md
10
README.md
@@ -95,11 +95,15 @@ Copy and fill in the templates included in this repository:
|
||||
|
||||
Each template includes a header row, annotated comment rows explaining valid values for each column, and one example data row to replace. Lines beginning with `#` are ignored by the parser.
|
||||
|
||||
**SMB columns:** `name` *(required)*, `path` *(required)*, `comment`, `purpose`, `ro`, `browsable`, `guestok`, `abe`, `hostsallow`, `hostsdeny`, `timemachine`, `enabled`
|
||||
**SMB columns:** `Share Name` *(required)*, `Path` *(required)*, `Description`, `Purpose`, `Read Only`, `Browsable`, `Guest Access`, `Access-Based Enumeration`, `Hosts Allow`, `Hosts Deny`, `Time Machine`, `Enabled`
|
||||
|
||||
**NFS columns:** `path` *(required)*, `comment`, `ro`, `maproot_user`, `maproot_group`, `mapall_user`, `mapall_group`, `security`, `hosts`, `networks`, `enabled`
|
||||
**NFS columns:** `Path` *(required)*, `Description`, `Read Only`, `Map Root User`, `Map Root Group`, `Map All User`, `Map All Group`, `Security`, `Allowed Hosts`, `Allowed Networks`, `Enabled`
|
||||
|
||||
Boolean columns accept `true` / `false`. List columns (`hostsallow`, `hostsdeny`, `security`, `hosts`, `networks`) accept space-separated values.
|
||||
Boolean columns (`Read Only`, `Browsable`, etc.) accept `true` or `false`. List columns (`Hosts Allow`, `Hosts Deny`, `Security`, `Allowed Hosts`, `Allowed Networks`) accept space-separated values.
|
||||
|
||||
Valid `Purpose` values: `NO_PRESET`, `DEFAULT_SHARE`, `ENHANCED_TIMEMACHINE`, `MULTI_PROTOCOL_NFS`, `PRIVATE_DATASETS`, `WORM_DROPBOX`
|
||||
|
||||
Valid `Security` values: `SYS`, `KRB5`, `KRB5I`, `KRB5P`
|
||||
|
||||
### Generating an API Key
|
||||
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
path,comment,ro,maproot_user,maproot_group,mapall_user,mapall_group,security,hosts,networks,enabled
|
||||
# Required columns : path
|
||||
# security values : SYS KRB5 KRB5I KRB5P (space-separated for multiple; e.g. "SYS KRB5")
|
||||
# hosts : space-separated hostnames or IPs allowed to mount (empty = any host)
|
||||
# networks : space-separated CIDR networks (e.g. "192.168.1.0/24 10.0.0.0/8")
|
||||
# Boolean columns : ro enabled (true or false)
|
||||
# Lines starting with # are ignored. Delete the example row below and add your shares.
|
||||
/mnt/pool/export,Example NFS export,false,root,wheel,,,SYS,,,true
|
||||
Path,Description,Read Only,Map Root User,Map Root Group,Map All User,Map All Group,Security,Allowed Hosts,Allowed Networks,Enabled
|
||||
/mnt/tank/data,Primary data export,false,root,wheel,,,SYS,,192.168.1.0/24,true
|
||||
/mnt/tank/media,Media files read-only,true,,,,,,,,true
|
||||
|
||||
|
Can't render this file because it contains an unexpected character in line 3 and column 83.
|
@@ -1,7 +1,3 @@
|
||||
name,path,comment,purpose,ro,browsable,guestok,abe,hostsallow,hostsdeny,timemachine,enabled
|
||||
# Required columns : name path
|
||||
# purpose values : NO_PRESET DEFAULT_SHARE ENHANCED_TIMEMACHINE MULTI_PROTOCOL_NFS PRIVATE_DATASETS WORM_DROPBOX
|
||||
# List columns : hostsallow hostsdeny (space-separated; e.g. "192.168.1.10 192.168.1.11")
|
||||
# Boolean columns : ro browsable guestok abe timemachine enabled (true or false)
|
||||
# Lines starting with # are ignored. Delete the example row below and add your shares.
|
||||
example-share,/mnt/pool/share,Example share description,NO_PRESET,false,true,false,false,,,false,true
|
||||
Share Name,Path,Description,Purpose,Read Only,Browsable,Guest Access,Access-Based Enumeration,Hosts Allow,Hosts Deny,Time Machine,Enabled
|
||||
Accounting,/mnt/tank/accounting,Accounting department files,NO_PRESET,false,true,false,false,,,false,true
|
||||
Public,/mnt/tank/public,Public read-only share,NO_PRESET,true,true,true,false,,,false,true
|
||||
|
||||
|
Can't render this file because it contains an unexpected character in line 4 and column 68.
|
@@ -10,7 +10,42 @@ from .colors import log
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Column type metadata
|
||||
# Column name mappings (human-readable header → API field name)
|
||||
# Both the friendly names and the raw API names are accepted.
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
_SMB_COL_MAP: dict[str, str] = {
|
||||
"share name": "name",
|
||||
"path": "path",
|
||||
"description": "comment",
|
||||
"purpose": "purpose",
|
||||
"read only": "ro",
|
||||
"browsable": "browsable",
|
||||
"guest access": "guestok",
|
||||
"access-based enumeration": "abe",
|
||||
"hosts allow": "hostsallow",
|
||||
"hosts deny": "hostsdeny",
|
||||
"time machine": "timemachine",
|
||||
"enabled": "enabled",
|
||||
}
|
||||
|
||||
_NFS_COL_MAP: dict[str, str] = {
|
||||
"path": "path",
|
||||
"description": "comment",
|
||||
"read only": "ro",
|
||||
"map root user": "maproot_user",
|
||||
"map root group": "maproot_group",
|
||||
"map all user": "mapall_user",
|
||||
"map all group": "mapall_group",
|
||||
"security": "security",
|
||||
"allowed hosts": "hosts",
|
||||
"allowed networks": "networks",
|
||||
"enabled": "enabled",
|
||||
}
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Column type metadata (keyed by API field name)
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
# Columns coerced to bool
|
||||
@@ -84,11 +119,18 @@ def _coerce_row(
|
||||
return result
|
||||
|
||||
|
||||
def _normalize_col(col: str, col_map: dict[str, str]) -> str:
|
||||
"""Map a header name to its API field name; falls back to the lowercased original."""
|
||||
key = col.strip().lower()
|
||||
return col_map.get(key, key)
|
||||
|
||||
|
||||
def _parse_csv(
|
||||
csv_path: str,
|
||||
bool_cols: frozenset[str],
|
||||
list_cols: frozenset[str],
|
||||
required: frozenset[str],
|
||||
col_map: dict[str, str],
|
||||
label: str,
|
||||
) -> list[dict]:
|
||||
path = Path(csv_path)
|
||||
@@ -104,8 +146,12 @@ def _parse_csv(
|
||||
log.error("%s CSV has no header row: %s", label, csv_path)
|
||||
sys.exit(1)
|
||||
|
||||
header = {c.strip() for c in reader.fieldnames if c is not None}
|
||||
missing_req = required - header
|
||||
# Normalise header names using the column map
|
||||
normalised_header = {
|
||||
_normalize_col(c, col_map)
|
||||
for c in reader.fieldnames if c is not None
|
||||
}
|
||||
missing_req = required - normalised_header
|
||||
if missing_req:
|
||||
log.error(
|
||||
"%s CSV is missing required column(s): %s",
|
||||
@@ -114,7 +160,10 @@ def _parse_csv(
|
||||
sys.exit(1)
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
normalised = {(k or "").strip(): v for k, v in row.items()}
|
||||
normalised = {
|
||||
_normalize_col(k, col_map): v
|
||||
for k, v in row.items() if k is not None
|
||||
}
|
||||
share = _coerce_row(normalised, bool_cols, list_cols, required, row_num)
|
||||
if share is not None:
|
||||
shares.append(share)
|
||||
@@ -133,12 +182,12 @@ def _parse_csv(
|
||||
|
||||
def parse_smb_csv(csv_path: str) -> list[dict]:
|
||||
"""Parse an SMB shares CSV. Returns share dicts compatible with migrate.py."""
|
||||
return _parse_csv(csv_path, _SMB_BOOL_COLS, _SMB_LIST_COLS, _SMB_REQUIRED, "SMB")
|
||||
return _parse_csv(csv_path, _SMB_BOOL_COLS, _SMB_LIST_COLS, _SMB_REQUIRED, _SMB_COL_MAP, "SMB")
|
||||
|
||||
|
||||
def parse_nfs_csv(csv_path: str) -> list[dict]:
|
||||
"""Parse an NFS shares CSV. Returns share dicts compatible with migrate.py."""
|
||||
return _parse_csv(csv_path, _NFS_BOOL_COLS, _NFS_LIST_COLS, _NFS_REQUIRED, "NFS")
|
||||
return _parse_csv(csv_path, _NFS_BOOL_COLS, _NFS_LIST_COLS, _NFS_REQUIRED, _NFS_COL_MAP, "NFS")
|
||||
|
||||
|
||||
def parse_csv_sources(smb_csv: str | None, nfs_csv: str | None) -> dict[str, Any]:
|
||||
|
||||
Reference in New Issue
Block a user