Working base cli webui
This commit is contained in:
@@ -4,9 +4,8 @@ import sys
|
||||
from types import ModuleType
|
||||
from typing import Optional
|
||||
|
||||
from . import config, flakes, join, machines, secrets, vms, webui
|
||||
from . import webui
|
||||
from .custom_logger import register
|
||||
from .ssh import cli as ssh_cli
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,34 +27,9 @@ def create_parser(prog: Optional[str] = None) -> argparse.ArgumentParser:
|
||||
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
parser_flake = subparsers.add_parser(
|
||||
"flakes", help="create a clan flake inside the current directory"
|
||||
)
|
||||
flakes.register_parser(parser_flake)
|
||||
|
||||
parser_join = subparsers.add_parser("join", help="join a remote clan")
|
||||
join.register_parser(parser_join)
|
||||
|
||||
parser_config = subparsers.add_parser("config", help="set nixos configuration")
|
||||
config.register_parser(parser_config)
|
||||
|
||||
parser_ssh = subparsers.add_parser("ssh", help="ssh to a remote machine")
|
||||
ssh_cli.register_parser(parser_ssh)
|
||||
|
||||
parser_secrets = subparsers.add_parser("secrets", help="manage secrets")
|
||||
secrets.register_parser(parser_secrets)
|
||||
|
||||
parser_machine = subparsers.add_parser(
|
||||
"machines", help="Manage machines and their configuration"
|
||||
)
|
||||
machines.register_parser(parser_machine)
|
||||
|
||||
parser_webui = subparsers.add_parser("webui", help="start webui")
|
||||
webui.register_parser(parser_webui)
|
||||
|
||||
parser_vms = subparsers.add_parser("vms", help="manage virtual machines")
|
||||
vms.register_parser(parser_vms)
|
||||
|
||||
# if args.debug:
|
||||
register(logging.DEBUG)
|
||||
log.debug("Debug log activated")
|
||||
|
||||
@@ -1,374 +0,0 @@
|
||||
# !/usr/bin/env python3
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Tuple, get_origin
|
||||
|
||||
from clan_cli.dirs import machine_settings_file, specific_flake_dir
|
||||
from clan_cli.errors import ClanError
|
||||
from clan_cli.git import commit_file
|
||||
from clan_cli.nix import nix_eval
|
||||
from clan_cli.types import FlakeName
|
||||
|
||||
script_dir = Path(__file__).parent
|
||||
|
||||
|
||||
# nixos option type description to python type
|
||||
def map_type(type: str) -> Any:
|
||||
if type == "boolean":
|
||||
return bool
|
||||
elif type in [
|
||||
"integer",
|
||||
"signed integer",
|
||||
"16 bit unsigned integer; between 0 and 65535 (both inclusive)",
|
||||
]:
|
||||
return int
|
||||
elif type == "string":
|
||||
return str
|
||||
# lib.type.passwdEntry
|
||||
elif type == "string, not containing newlines or colons":
|
||||
return str
|
||||
elif type.startswith("null or "):
|
||||
subtype = type.removeprefix("null or ")
|
||||
return Optional[map_type(subtype)]
|
||||
elif type.startswith("attribute set of"):
|
||||
subtype = type.removeprefix("attribute set of ")
|
||||
return dict[str, map_type(subtype)] # type: ignore
|
||||
elif type.startswith("list of"):
|
||||
subtype = type.removeprefix("list of ")
|
||||
return list[map_type(subtype)] # type: ignore
|
||||
else:
|
||||
raise ClanError(f"Unknown type {type}")
|
||||
|
||||
|
||||
# merge two dicts recursively
|
||||
def merge(a: dict, b: dict, path: list[str] = []) -> dict:
|
||||
for key in b:
|
||||
if key in a:
|
||||
if isinstance(a[key], dict) and isinstance(b[key], dict):
|
||||
merge(a[key], b[key], path + [str(key)])
|
||||
elif isinstance(a[key], list) and isinstance(b[key], list):
|
||||
a[key].extend(b[key])
|
||||
elif a[key] != b[key]:
|
||||
a[key] = b[key]
|
||||
else:
|
||||
a[key] = b[key]
|
||||
return a
|
||||
|
||||
|
||||
# A container inheriting from list, but overriding __contains__ to return True
|
||||
# for all values.
|
||||
# This is used to allow any value for the "choices" field of argparse
|
||||
class AllContainer(list):
|
||||
def __contains__(self, item: Any) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
# value is always a list, as the arg parser cannot know the type upfront
|
||||
# and therefore always allows multiple arguments.
|
||||
def cast(value: Any, type: Any, opt_description: str) -> Any:
|
||||
try:
|
||||
# handle bools
|
||||
if isinstance(type, bool):
|
||||
if value[0] in ["true", "True", "yes", "y", "1"]:
|
||||
return True
|
||||
elif value[0] in ["false", "False", "no", "n", "0"]:
|
||||
return False
|
||||
else:
|
||||
raise ClanError(f"Invalid value {value} for boolean")
|
||||
# handle lists
|
||||
elif get_origin(type) == list:
|
||||
subtype = type.__args__[0]
|
||||
return [cast([x], subtype, opt_description) for x in value]
|
||||
# handle dicts
|
||||
elif get_origin(type) == dict:
|
||||
if not isinstance(value, dict):
|
||||
raise ClanError(
|
||||
f"Cannot set {opt_description} directly. Specify a suboption like {opt_description}.<name>"
|
||||
)
|
||||
subtype = type.__args__[1]
|
||||
return {k: cast(v, subtype, opt_description) for k, v in value.items()}
|
||||
elif str(type) == "typing.Optional[str]":
|
||||
if value[0] in ["null", "None"]:
|
||||
return None
|
||||
return value[0]
|
||||
else:
|
||||
if len(value) > 1:
|
||||
raise ClanError(f"Too many values for {opt_description}")
|
||||
return type(value[0])
|
||||
except ValueError:
|
||||
raise ClanError(
|
||||
f"Invalid type for option {opt_description} (expected {type.__name__})"
|
||||
)
|
||||
|
||||
|
||||
def options_for_machine(
|
||||
flake_name: FlakeName, machine_name: str, show_trace: bool = False
|
||||
) -> dict:
|
||||
clan_dir = specific_flake_dir(flake_name)
|
||||
flags = []
|
||||
if show_trace:
|
||||
flags.append("--show-trace")
|
||||
flags.append(
|
||||
f"{clan_dir}#nixosConfigurations.{machine_name}.config.clanCore.optionsNix"
|
||||
)
|
||||
cmd = nix_eval(flags=flags)
|
||||
proc = subprocess.run(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
raise ClanError(
|
||||
f"Failed to read options for machine {machine_name}:\n{shlex.join(cmd)}\nexit with {proc.returncode}"
|
||||
)
|
||||
return json.loads(proc.stdout)
|
||||
|
||||
|
||||
def read_machine_option_value(
|
||||
flake_name: FlakeName, machine_name: str, option: str, show_trace: bool = False
|
||||
) -> str:
|
||||
clan_dir = specific_flake_dir(flake_name)
|
||||
# use nix eval to read from .#nixosConfigurations.default.config.{option}
|
||||
# this will give us the evaluated config with the options attribute
|
||||
cmd = nix_eval(
|
||||
flags=[
|
||||
"--show-trace",
|
||||
f"{clan_dir}#nixosConfigurations.{machine_name}.config.{option}",
|
||||
],
|
||||
)
|
||||
proc = subprocess.run(cmd, stdout=subprocess.PIPE, text=True)
|
||||
if proc.returncode != 0:
|
||||
raise ClanError(
|
||||
f"Failed to read option {option}:\n{shlex.join(cmd)}\nexit with {proc.returncode}"
|
||||
)
|
||||
value = json.loads(proc.stdout)
|
||||
# print the value so that the output can be copied and fed as an input.
|
||||
# for example a list should be displayed as space separated values surrounded by quotes.
|
||||
if isinstance(value, list):
|
||||
out = " ".join([json.dumps(x) for x in value])
|
||||
elif isinstance(value, dict):
|
||||
out = json.dumps(value, indent=2)
|
||||
else:
|
||||
out = json.dumps(value, indent=2)
|
||||
return out
|
||||
|
||||
|
||||
def get_or_set_option(args: argparse.Namespace) -> None:
|
||||
if args.value == []:
|
||||
print(
|
||||
read_machine_option_value(
|
||||
args.flake, args.machine, args.option, args.show_trace
|
||||
)
|
||||
)
|
||||
else:
|
||||
# load options
|
||||
if args.options_file is None:
|
||||
options = options_for_machine(
|
||||
args.flake, machine_name=args.machine, show_trace=args.show_trace
|
||||
)
|
||||
else:
|
||||
with open(args.options_file) as f:
|
||||
options = json.load(f)
|
||||
# compute settings json file location
|
||||
if args.settings_file is None:
|
||||
settings_file = machine_settings_file(args.flake, args.machine)
|
||||
else:
|
||||
settings_file = args.settings_file
|
||||
# set the option with the given value
|
||||
set_option(
|
||||
flake_name=args.flake,
|
||||
option=args.option,
|
||||
value=args.value,
|
||||
options=options,
|
||||
settings_file=settings_file,
|
||||
option_description=args.option,
|
||||
show_trace=args.show_trace,
|
||||
)
|
||||
if not args.quiet:
|
||||
new_value = read_machine_option_value(args.flake, args.machine, args.option)
|
||||
print(f"New Value for {args.option}:")
|
||||
print(new_value)
|
||||
|
||||
|
||||
def find_option(
|
||||
option: str, value: Any, options: dict, option_description: Optional[str] = None
|
||||
) -> Tuple[str, Any]:
|
||||
"""
|
||||
The option path specified by the user doesn't have to match exactly to an
|
||||
entry in the options.json file. Examples
|
||||
|
||||
Example 1:
|
||||
$ clan config services.openssh.settings.SomeSetting 42
|
||||
This is a freeform option that does not appear in the options.json
|
||||
The actual option is `services.openssh.settings`
|
||||
And the value must be wrapped: {"SomeSettings": 42}
|
||||
|
||||
Example 2:
|
||||
$ clan config users.users.my-user.name my-name
|
||||
The actual option is `users.users.<name>.name`
|
||||
"""
|
||||
|
||||
# option description is used for error messages
|
||||
if option_description is None:
|
||||
option_description = option
|
||||
|
||||
option_path = option.split(".")
|
||||
|
||||
# fuzzy search the option paths, so when
|
||||
# specified option path: "foo.bar.baz.bum"
|
||||
# available option path: "foo.<name>.baz.<name>"
|
||||
# we can still find the option
|
||||
first = option_path[0]
|
||||
regex = rf"({first}|<name>)"
|
||||
for elem in option_path[1:]:
|
||||
regex += rf"\.({elem}|<name>)"
|
||||
for opt in options.keys():
|
||||
if re.match(regex, opt):
|
||||
return opt, value
|
||||
|
||||
# if the regex search did not find the option, start stripping the last
|
||||
# element of the option path and find matching parent option
|
||||
# (see examples above for why this is needed)
|
||||
if len(option_path) == 1:
|
||||
raise ClanError(f"Option {option_description} not found")
|
||||
option_path_parent = option_path[:-1]
|
||||
attr_prefix = option_path[-1]
|
||||
return find_option(
|
||||
option=".".join(option_path_parent),
|
||||
value={attr_prefix: value},
|
||||
options=options,
|
||||
option_description=option_description,
|
||||
)
|
||||
|
||||
|
||||
def set_option(
|
||||
flake_name: FlakeName,
|
||||
option: str,
|
||||
value: Any,
|
||||
options: dict,
|
||||
settings_file: Path,
|
||||
option_description: str = "",
|
||||
show_trace: bool = False,
|
||||
) -> None:
|
||||
option_path_orig = option.split(".")
|
||||
|
||||
# returns for example:
|
||||
# option: "users.users.<name>.name"
|
||||
# value: "my-name"
|
||||
option, value = find_option(
|
||||
option=option,
|
||||
value=value,
|
||||
options=options,
|
||||
option_description=option_description,
|
||||
)
|
||||
option_path = option.split(".")
|
||||
|
||||
option_path_store = option_path_orig[: len(option_path)]
|
||||
|
||||
target_type = map_type(options[option]["type"])
|
||||
casted = cast(value, target_type, option)
|
||||
|
||||
# construct a nested dict from the option path and set the value
|
||||
result: dict[str, Any] = {}
|
||||
current = result
|
||||
for part in option_path_store[:-1]:
|
||||
current[part] = {}
|
||||
current = current[part]
|
||||
current[option_path_store[-1]] = value
|
||||
|
||||
current[option_path_store[-1]] = casted
|
||||
|
||||
# check if there is an existing config file
|
||||
if os.path.exists(settings_file):
|
||||
with open(settings_file) as f:
|
||||
current_config = json.load(f)
|
||||
else:
|
||||
current_config = {}
|
||||
# merge and save the new config file
|
||||
new_config = merge(current_config, result)
|
||||
settings_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(settings_file, "w") as f:
|
||||
json.dump(new_config, f, indent=2)
|
||||
print(file=f) # add newline at the end of the file to make git happy
|
||||
|
||||
if settings_file.resolve().is_relative_to(specific_flake_dir(flake_name)):
|
||||
commit_file(settings_file, commit_message=f"Set option {option_description}")
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_parser(
|
||||
parser: Optional[argparse.ArgumentParser],
|
||||
) -> None:
|
||||
if parser is None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Set or show NixOS options",
|
||||
)
|
||||
|
||||
# inject callback function to process the input later
|
||||
parser.set_defaults(func=get_or_set_option)
|
||||
parser.add_argument(
|
||||
"--machine",
|
||||
"-m",
|
||||
help="Machine to configure",
|
||||
type=str,
|
||||
default="default",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--show-trace",
|
||||
help="Show nix trace on evaluation error",
|
||||
action="store_true",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--options-file",
|
||||
help="JSON file with options",
|
||||
type=Path,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--settings-file",
|
||||
help="JSON file with settings",
|
||||
type=Path,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--quiet",
|
||||
help="Do not print the value",
|
||||
action="store_true",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"option",
|
||||
help="Option to read or set (e.g. foo.bar)",
|
||||
type=str,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"value",
|
||||
# force this arg to be set
|
||||
nargs="*",
|
||||
help="option value to set (if omitted, the current value is printed)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to set machine options for",
|
||||
)
|
||||
|
||||
|
||||
def main(argv: Optional[list[str]] = None) -> None:
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
parser = argparse.ArgumentParser()
|
||||
register_parser(parser)
|
||||
parser.parse_args(argv[1:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1 +0,0 @@
|
||||
../../../../lib/jsonschema
|
||||
@@ -1,84 +0,0 @@
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
from clan_cli.dirs import (
|
||||
machine_settings_file,
|
||||
nixpkgs_source,
|
||||
specific_flake_dir,
|
||||
specific_machine_dir,
|
||||
)
|
||||
from clan_cli.git import commit_file, find_git_repo_root
|
||||
from clan_cli.nix import nix_eval
|
||||
|
||||
from ..types import FlakeName
|
||||
|
||||
|
||||
def config_for_machine(flake_name: FlakeName, machine_name: str) -> dict:
|
||||
# read the config from a json file located at {flake}/machines/{machine_name}/settings.json
|
||||
if not specific_machine_dir(flake_name, machine_name).exists():
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Machine {machine_name} not found. Create the machine first`",
|
||||
)
|
||||
settings_path = machine_settings_file(flake_name, machine_name)
|
||||
if not settings_path.exists():
|
||||
return {}
|
||||
with open(settings_path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def set_config_for_machine(
|
||||
flake_name: FlakeName, machine_name: str, config: dict
|
||||
) -> None:
|
||||
# write the config to a json file located at {flake}/machines/{machine_name}/settings.json
|
||||
if not specific_machine_dir(flake_name, machine_name).exists():
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Machine {machine_name} not found. Create the machine first`",
|
||||
)
|
||||
settings_path = machine_settings_file(flake_name, machine_name)
|
||||
settings_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(settings_path, "w") as f:
|
||||
json.dump(config, f)
|
||||
repo_dir = find_git_repo_root()
|
||||
|
||||
if repo_dir is not None:
|
||||
commit_file(settings_path, repo_dir)
|
||||
|
||||
|
||||
def schema_for_machine(flake_name: FlakeName, machine_name: str) -> dict:
|
||||
flake = specific_flake_dir(flake_name)
|
||||
|
||||
# use nix eval to lib.evalModules .#nixosModules.machine-{machine_name}
|
||||
proc = subprocess.run(
|
||||
nix_eval(
|
||||
flags=[
|
||||
"--impure",
|
||||
"--show-trace",
|
||||
"--expr",
|
||||
f"""
|
||||
let
|
||||
flake = builtins.getFlake (toString {flake});
|
||||
lib = import {nixpkgs_source()}/lib;
|
||||
options = flake.nixosConfigurations.{machine_name}.options;
|
||||
clanOptions = options.clan;
|
||||
jsonschemaLib = import {Path(__file__).parent / "jsonschema"} {{ inherit lib; }};
|
||||
jsonschema = jsonschemaLib.parseOptions clanOptions;
|
||||
in
|
||||
jsonschema
|
||||
""",
|
||||
],
|
||||
),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
print(proc.stderr, file=sys.stderr)
|
||||
raise Exception(
|
||||
f"Failed to read schema for machine {machine_name}:\n{proc.stderr}"
|
||||
)
|
||||
return json.loads(proc.stdout)
|
||||
@@ -1,109 +0,0 @@
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Type, Union
|
||||
|
||||
from ..errors import ClanError
|
||||
from ..nix import nix_eval
|
||||
|
||||
script_dir = Path(__file__).parent
|
||||
|
||||
|
||||
type_map: dict[str, type] = {
|
||||
"array": list,
|
||||
"boolean": bool,
|
||||
"integer": int,
|
||||
"number": float,
|
||||
"string": str,
|
||||
}
|
||||
|
||||
|
||||
def schema_from_module_file(
|
||||
file: Union[str, Path] = f"{script_dir}/jsonschema/example-schema.json",
|
||||
) -> dict[str, Any]:
|
||||
absolute_path = Path(file).absolute()
|
||||
# define a nix expression that loads the given module file using lib.evalModules
|
||||
nix_expr = f"""
|
||||
let
|
||||
lib = import <nixpkgs/lib>;
|
||||
slib = import {script_dir}/jsonschema {{inherit lib;}};
|
||||
in
|
||||
slib.parseModule {absolute_path}
|
||||
"""
|
||||
# run the nix expression and parse the output as json
|
||||
cmd = nix_eval(["--expr", nix_expr])
|
||||
proc = subprocess.run(cmd, stdout=subprocess.PIPE, check=True)
|
||||
return json.loads(proc.stdout)
|
||||
|
||||
|
||||
def subtype_from_schema(schema: dict[str, Any]) -> Type:
|
||||
if schema["type"] == "object":
|
||||
if "additionalProperties" in schema:
|
||||
sub_type = subtype_from_schema(schema["additionalProperties"])
|
||||
return dict[str, sub_type] # type: ignore
|
||||
elif "properties" in schema:
|
||||
raise ClanError("Nested dicts are not supported")
|
||||
else:
|
||||
raise ClanError("Unknown object type")
|
||||
elif schema["type"] == "array":
|
||||
if "items" not in schema:
|
||||
raise ClanError("Untyped arrays are not supported")
|
||||
sub_type = subtype_from_schema(schema["items"])
|
||||
return list[sub_type] # type: ignore
|
||||
else:
|
||||
return type_map[schema["type"]]
|
||||
|
||||
|
||||
def type_from_schema_path(
|
||||
schema: dict[str, Any],
|
||||
path: list[str],
|
||||
full_path: Optional[list[str]] = None,
|
||||
) -> Type:
|
||||
if full_path is None:
|
||||
full_path = path
|
||||
if len(path) == 0:
|
||||
return subtype_from_schema(schema)
|
||||
elif schema["type"] == "object":
|
||||
if "properties" in schema:
|
||||
subtype = type_from_schema_path(schema["properties"][path[0]], path[1:])
|
||||
return subtype
|
||||
elif "additionalProperties" in schema:
|
||||
subtype = type_from_schema_path(schema["additionalProperties"], path[1:])
|
||||
return subtype
|
||||
else:
|
||||
raise ClanError(f"Unknown type for path {path}")
|
||||
else:
|
||||
raise ClanError(f"Unknown type for path {path}")
|
||||
|
||||
|
||||
def options_types_from_schema(schema: dict[str, Any]) -> dict[str, Type]:
|
||||
result: dict[str, Type] = {}
|
||||
for name, value in schema.get("properties", {}).items():
|
||||
assert isinstance(value, dict)
|
||||
type_ = value["type"]
|
||||
if type_ == "object":
|
||||
# handle additionalProperties
|
||||
if "additionalProperties" in value:
|
||||
sub_type = value["additionalProperties"].get("type")
|
||||
if sub_type not in type_map:
|
||||
raise ClanError(
|
||||
f"Unsupported object type {sub_type} (field {name})"
|
||||
)
|
||||
result[f"{name}.<name>"] = type_map[sub_type]
|
||||
continue
|
||||
# handle properties
|
||||
sub_result = options_types_from_schema(value)
|
||||
for sub_name, sub_type in sub_result.items():
|
||||
result[f"{name}.{sub_name}"] = sub_type
|
||||
continue
|
||||
elif type_ == "array":
|
||||
if "items" not in value:
|
||||
raise ClanError(f"Untyped arrays are not supported (field: {name})")
|
||||
sub_type = value["items"].get("type")
|
||||
if sub_type not in type_map:
|
||||
raise ClanError(f"Unsupported list type {sub_type} (field {name})")
|
||||
sub_type_: type = type_map[sub_type]
|
||||
result[name] = list[sub_type_] # type: ignore
|
||||
continue
|
||||
result[name] = type_map[type_]
|
||||
return result
|
||||
@@ -1,20 +0,0 @@
|
||||
# !/usr/bin/env python3
|
||||
import argparse
|
||||
|
||||
from .create import register_create_parser
|
||||
from .list import register_list_parser
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
create_parser = subparser.add_parser("create", help="Create a clan flake")
|
||||
register_create_parser(create_parser)
|
||||
|
||||
list_parser = subparser.add_parser("list", help="List clan flakes")
|
||||
register_list_parser(list_parser)
|
||||
@@ -1,79 +0,0 @@
|
||||
# !/usr/bin/env python3
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
from pydantic import AnyUrl
|
||||
from pydantic.tools import parse_obj_as
|
||||
|
||||
from ..async_cmd import CmdOut, run, runforcli
|
||||
from ..dirs import clan_flakes_dir
|
||||
from ..errors import ClanError
|
||||
from ..nix import nix_command, nix_shell
|
||||
|
||||
DEFAULT_URL: AnyUrl = parse_obj_as(
|
||||
AnyUrl,
|
||||
"git+https://git.clan.lol/clan/clan-core?ref=Qubasa-main#new-clan", # TODO: Change me back to main branch
|
||||
)
|
||||
|
||||
|
||||
async def create_flake(directory: Path, url: AnyUrl) -> Dict[str, CmdOut]:
|
||||
if not directory.exists():
|
||||
directory.mkdir()
|
||||
else:
|
||||
raise ClanError(f"Flake at '{directory}' already exists")
|
||||
response = {}
|
||||
command = nix_command(
|
||||
[
|
||||
"flake",
|
||||
"init",
|
||||
"-t",
|
||||
url,
|
||||
]
|
||||
)
|
||||
out = await run(command, cwd=directory)
|
||||
response["flake init"] = out
|
||||
|
||||
command = nix_shell(["git"], ["git", "init"])
|
||||
out = await run(command, cwd=directory)
|
||||
response["git init"] = out
|
||||
|
||||
command = nix_shell(["git"], ["git", "add", "."])
|
||||
out = await run(command, cwd=directory)
|
||||
response["git add"] = out
|
||||
|
||||
command = nix_shell(["git"], ["git", "config", "user.name", "clan-tool"])
|
||||
out = await run(command, cwd=directory)
|
||||
response["git config"] = out
|
||||
|
||||
command = nix_shell(["git"], ["git", "config", "user.email", "clan@example.com"])
|
||||
out = await run(command, cwd=directory)
|
||||
response["git config"] = out
|
||||
|
||||
command = nix_shell(["git"], ["git", "commit", "-a", "-m", "Initial commit"])
|
||||
out = await run(command, cwd=directory)
|
||||
response["git commit"] = out
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def create_flake_command(args: argparse.Namespace) -> None:
|
||||
flake_dir = clan_flakes_dir() / args.name
|
||||
runforcli(create_flake, flake_dir, args.url)
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_create_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"name",
|
||||
type=str,
|
||||
help="name for the flake",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--url",
|
||||
type=str,
|
||||
help="url for the flake",
|
||||
default=DEFAULT_URL,
|
||||
)
|
||||
# parser.add_argument("name", type=str, help="name of the flake")
|
||||
parser.set_defaults(func=create_flake_command)
|
||||
@@ -1,27 +0,0 @@
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ..dirs import clan_flakes_dir
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_flakes() -> list[str]:
|
||||
path = clan_flakes_dir()
|
||||
log.debug(f"Listing machines in {path}")
|
||||
if not path.exists():
|
||||
return []
|
||||
objs: list[str] = []
|
||||
for f in os.listdir(path):
|
||||
objs.append(f)
|
||||
return objs
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
for flake in list_flakes():
|
||||
print(flake)
|
||||
|
||||
|
||||
def register_list_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.set_defaults(func=list_command)
|
||||
@@ -1,35 +0,0 @@
|
||||
# !/usr/bin/env python3
|
||||
import argparse
|
||||
import subprocess
|
||||
import urllib
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def join(args: argparse.Namespace) -> None:
|
||||
# start webui in background
|
||||
uri = args.flake_uri.removeprefix("clan://")
|
||||
subprocess.run(
|
||||
["clan", "--debug", "webui", f"/join?flake={urllib.parse.quote_plus(uri)}"],
|
||||
# stdout=sys.stdout,
|
||||
# stderr=sys.stderr,
|
||||
)
|
||||
print(f"joined clan {args.flake_uri}")
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_parser(
|
||||
parser: Optional[argparse.ArgumentParser],
|
||||
) -> None:
|
||||
if parser is None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="join a remote clan",
|
||||
)
|
||||
|
||||
# inject callback function to process the input later
|
||||
parser.set_defaults(func=join)
|
||||
|
||||
parser.add_argument(
|
||||
"flake_uri",
|
||||
help="flake uri to join",
|
||||
type=str,
|
||||
)
|
||||
@@ -1,33 +0,0 @@
|
||||
# !/usr/bin/env python3
|
||||
import argparse
|
||||
|
||||
from .create import register_create_parser
|
||||
from .delete import register_delete_parser
|
||||
from .install import register_install_parser
|
||||
from .list import register_list_parser
|
||||
from .update import register_update_parser
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
|
||||
update_parser = subparser.add_parser("update", help="Update a machine")
|
||||
register_update_parser(update_parser)
|
||||
|
||||
create_parser = subparser.add_parser("create", help="Create a machine")
|
||||
register_create_parser(create_parser)
|
||||
|
||||
delete_parser = subparser.add_parser("delete", help="Delete a machine")
|
||||
register_delete_parser(delete_parser)
|
||||
|
||||
list_parser = subparser.add_parser("list", help="List machines")
|
||||
register_list_parser(list_parser)
|
||||
|
||||
install_parser = subparser.add_parser("install", help="Install a machine")
|
||||
register_install_parser(install_parser)
|
||||
@@ -1,54 +0,0 @@
|
||||
import argparse
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from ..async_cmd import CmdOut, run, runforcli
|
||||
from ..dirs import specific_flake_dir, specific_machine_dir
|
||||
from ..errors import ClanError
|
||||
from ..nix import nix_shell
|
||||
from ..types import FlakeName
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def create_machine(flake_name: FlakeName, machine_name: str) -> Dict[str, CmdOut]:
|
||||
folder = specific_machine_dir(flake_name, machine_name)
|
||||
if folder.exists():
|
||||
raise ClanError(f"Machine '{machine_name}' already exists")
|
||||
folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# create empty settings.json file inside the folder
|
||||
with open(folder / "settings.json", "w") as f:
|
||||
f.write("{}")
|
||||
response = {}
|
||||
out = await run(nix_shell(["git"], ["git", "add", str(folder)]), cwd=folder)
|
||||
response["git add"] = out
|
||||
|
||||
out = await run(
|
||||
nix_shell(
|
||||
["git"],
|
||||
["git", "commit", "-m", f"Added machine {machine_name}", str(folder)],
|
||||
),
|
||||
cwd=folder,
|
||||
)
|
||||
response["git commit"] = out
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def create_command(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
flake_dir = specific_flake_dir(args.flake)
|
||||
runforcli(create_machine, flake_dir, args.machine)
|
||||
except ClanError as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def register_create_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("machine", type=str)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser.set_defaults(func=create_command)
|
||||
@@ -1,23 +0,0 @@
|
||||
import argparse
|
||||
import shutil
|
||||
|
||||
from ..dirs import specific_machine_dir
|
||||
from ..errors import ClanError
|
||||
|
||||
|
||||
def delete_command(args: argparse.Namespace) -> None:
|
||||
folder = specific_machine_dir(args.flake, args.host)
|
||||
if folder.exists():
|
||||
shutil.rmtree(folder)
|
||||
else:
|
||||
raise ClanError(f"Machine {args.host} does not exist")
|
||||
|
||||
|
||||
def register_delete_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("host", type=str)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser.set_defaults(func=delete_command)
|
||||
@@ -1,10 +0,0 @@
|
||||
from ..dirs import specific_machine_dir
|
||||
from ..types import FlakeName
|
||||
|
||||
|
||||
def machine_has_fact(flake_name: FlakeName, machine: str, fact: str) -> bool:
|
||||
return (specific_machine_dir(flake_name, machine) / "facts" / fact).exists()
|
||||
|
||||
|
||||
def machine_get_fact(flake_name: FlakeName, machine: str, fact: str) -> str:
|
||||
return (specific_machine_dir(flake_name, machine) / "facts" / fact).read_text()
|
||||
@@ -1,65 +0,0 @@
|
||||
import argparse
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from ..dirs import specific_flake_dir
|
||||
from ..machines.machines import Machine
|
||||
from ..nix import nix_shell
|
||||
from ..secrets.generate import generate_secrets
|
||||
|
||||
|
||||
def install_nixos(machine: Machine) -> None:
|
||||
h = machine.host
|
||||
target_host = f"{h.user or 'root'}@{h.host}"
|
||||
|
||||
flake_attr = h.meta.get("flake_attr", "")
|
||||
|
||||
generate_secrets(machine)
|
||||
|
||||
with TemporaryDirectory() as tmpdir_:
|
||||
tmpdir = Path(tmpdir_)
|
||||
machine.upload_secrets(tmpdir / machine.secrets_upload_directory)
|
||||
|
||||
subprocess.run(
|
||||
nix_shell(
|
||||
["nixos-anywhere"],
|
||||
[
|
||||
"nixos-anywhere",
|
||||
"-f",
|
||||
f"{machine.flake_dir}#{flake_attr}",
|
||||
"-t",
|
||||
"--no-reboot",
|
||||
"--extra-files",
|
||||
str(tmpdir),
|
||||
target_host,
|
||||
],
|
||||
),
|
||||
check=True,
|
||||
)
|
||||
|
||||
|
||||
def install_command(args: argparse.Namespace) -> None:
|
||||
machine = Machine(args.machine, flake_dir=specific_flake_dir(args.flake))
|
||||
machine.deployment_address = args.target_host
|
||||
|
||||
install_nixos(machine)
|
||||
|
||||
|
||||
def register_install_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"machine",
|
||||
type=str,
|
||||
help="machine to install",
|
||||
)
|
||||
parser.add_argument(
|
||||
"target_host",
|
||||
type=str,
|
||||
help="ssh address to install to in the form of user@host:2222",
|
||||
)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to install machine from",
|
||||
)
|
||||
parser.set_defaults(func=install_command)
|
||||
@@ -1,35 +0,0 @@
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ..dirs import machines_dir
|
||||
from ..types import FlakeName
|
||||
from .types import validate_hostname
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_machines(flake_name: FlakeName) -> list[str]:
|
||||
path = machines_dir(flake_name)
|
||||
log.debug(f"Listing machines in {path}")
|
||||
if not path.exists():
|
||||
return []
|
||||
objs: list[str] = []
|
||||
for f in os.listdir(path):
|
||||
if validate_hostname(f):
|
||||
objs.append(f)
|
||||
return objs
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
for machine in list_machines(args.flake):
|
||||
print(machine)
|
||||
|
||||
|
||||
def register_list_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser.set_defaults(func=list_command)
|
||||
@@ -1,112 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from ..nix import nix_build, nix_config, nix_eval
|
||||
from ..ssh import Host, parse_deployment_address
|
||||
|
||||
|
||||
def build_machine_data(machine_name: str, clan_dir: Path) -> dict:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
|
||||
outpath = subprocess.run(
|
||||
nix_build(
|
||||
[
|
||||
f'path:{clan_dir}#clanInternals.machines."{system}"."{machine_name}".config.system.clan.deployment.file'
|
||||
]
|
||||
),
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
text=True,
|
||||
).stdout.strip()
|
||||
return json.loads(Path(outpath).read_text())
|
||||
|
||||
|
||||
class Machine:
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
flake_dir: Path,
|
||||
machine_data: Optional[dict] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Creates a Machine
|
||||
@name: the name of the machine
|
||||
@clan_dir: the directory of the clan, optional, if not set it will be determined from the current working directory
|
||||
@machine_json: can be optionally used to skip evaluation of the machine, location of the json file with machine data
|
||||
"""
|
||||
self.name = name
|
||||
self.flake_dir = flake_dir
|
||||
|
||||
if machine_data is None:
|
||||
self.machine_data = build_machine_data(name, self.flake_dir)
|
||||
else:
|
||||
self.machine_data = machine_data
|
||||
|
||||
self.deployment_address = self.machine_data["deploymentAddress"]
|
||||
self.upload_secrets = self.machine_data["uploadSecrets"]
|
||||
self.generate_secrets = self.machine_data["generateSecrets"]
|
||||
self.secrets_upload_directory = self.machine_data["secretsUploadDirectory"]
|
||||
|
||||
@property
|
||||
def host(self) -> Host:
|
||||
return parse_deployment_address(
|
||||
self.name, self.deployment_address, meta={"machine": self}
|
||||
)
|
||||
|
||||
def run_upload_secrets(self, secrets_dir: Path) -> bool:
|
||||
"""
|
||||
Upload the secrets to the provided directory
|
||||
@secrets_dir: the directory to store the secrets in
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
env["CLAN_DIR"] = str(self.flake_dir)
|
||||
env["PYTHONPATH"] = str(
|
||||
":".join(sys.path)
|
||||
) # TODO do this in the clanCore module
|
||||
env["SECRETS_DIR"] = str(secrets_dir)
|
||||
print(f"uploading secrets... {self.upload_secrets}")
|
||||
proc = subprocess.run(
|
||||
[self.upload_secrets],
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if proc.returncode == 23:
|
||||
print("no secrets to upload")
|
||||
return False
|
||||
elif proc.returncode != 0:
|
||||
print("failed generate secrets directory")
|
||||
exit(1)
|
||||
return True
|
||||
|
||||
def eval_nix(self, attr: str) -> str:
|
||||
"""
|
||||
eval a nix attribute of the machine
|
||||
@attr: the attribute to get
|
||||
"""
|
||||
output = subprocess.run(
|
||||
nix_eval([f"path:{self.flake_dir}#{attr}"]),
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
text=True,
|
||||
).stdout.strip()
|
||||
return output
|
||||
|
||||
def build_nix(self, attr: str) -> Path:
|
||||
"""
|
||||
build a nix attribute of the machine
|
||||
@attr: the attribute to get
|
||||
"""
|
||||
outpath = subprocess.run(
|
||||
nix_build([f"path:{self.flake_dir}#{attr}"]),
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
text=True,
|
||||
).stdout.strip()
|
||||
return Path(outpath)
|
||||
@@ -1,22 +0,0 @@
|
||||
import argparse
|
||||
import re
|
||||
|
||||
VALID_HOSTNAME = re.compile(r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", re.IGNORECASE)
|
||||
|
||||
|
||||
def validate_hostname(hostname: str) -> bool:
|
||||
if len(hostname) > 63:
|
||||
return False
|
||||
return VALID_HOSTNAME.match(hostname) is not None
|
||||
|
||||
|
||||
def machine_name_type(arg_value: str) -> str:
|
||||
if len(arg_value) > 63:
|
||||
raise argparse.ArgumentTypeError(
|
||||
"Machine name must be less than 63 characters long"
|
||||
)
|
||||
if not VALID_HOSTNAME.match(arg_value):
|
||||
raise argparse.ArgumentTypeError(
|
||||
"Invalid character in machine name. Allowed characters are a-z, 0-9, ., -, and _. Must not start with a number"
|
||||
)
|
||||
return arg_value
|
||||
@@ -1,159 +0,0 @@
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from ..dirs import specific_flake_dir
|
||||
from ..machines.machines import Machine
|
||||
from ..nix import nix_build, nix_command, nix_config
|
||||
from ..secrets.generate import generate_secrets
|
||||
from ..secrets.upload import upload_secrets
|
||||
from ..ssh import Host, HostGroup, HostKeyCheck, parse_deployment_address
|
||||
|
||||
|
||||
def deploy_nixos(hosts: HostGroup, clan_dir: Path) -> None:
|
||||
"""
|
||||
Deploy to all hosts in parallel
|
||||
"""
|
||||
|
||||
def deploy(h: Host) -> None:
|
||||
target = f"{h.user or 'root'}@{h.host}"
|
||||
ssh_arg = f"-p {h.port}" if h.port else ""
|
||||
env = os.environ.copy()
|
||||
env["NIX_SSHOPTS"] = ssh_arg
|
||||
res = h.run_local(
|
||||
nix_command(["flake", "archive", "--to", f"ssh://{target}", "--json"]),
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
extra_env=env,
|
||||
)
|
||||
data = json.loads(res.stdout)
|
||||
path = data["path"]
|
||||
|
||||
if h.host_key_check != HostKeyCheck.STRICT:
|
||||
ssh_arg += " -o StrictHostKeyChecking=no"
|
||||
if h.host_key_check == HostKeyCheck.NONE:
|
||||
ssh_arg += " -o UserKnownHostsFile=/dev/null"
|
||||
|
||||
ssh_arg += " -i " + h.key if h.key else ""
|
||||
|
||||
flake_attr = h.meta.get("flake_attr", "")
|
||||
|
||||
generate_secrets(h.meta["machine"])
|
||||
upload_secrets(h.meta["machine"])
|
||||
|
||||
target_host = h.meta.get("target_host")
|
||||
if target_host:
|
||||
target_user = h.meta.get("target_user")
|
||||
if target_user:
|
||||
target_host = f"{target_user}@{target_host}"
|
||||
extra_args = h.meta.get("extra_args", [])
|
||||
cmd = (
|
||||
["nixos-rebuild", "switch"]
|
||||
+ extra_args
|
||||
+ [
|
||||
"--fast",
|
||||
"--option",
|
||||
"keep-going",
|
||||
"true",
|
||||
"--option",
|
||||
"accept-flake-config",
|
||||
"true",
|
||||
"--build-host",
|
||||
"",
|
||||
"--flake",
|
||||
f"{path}#{flake_attr}",
|
||||
]
|
||||
)
|
||||
if target_host:
|
||||
cmd.extend(["--target-host", target_host])
|
||||
ret = h.run(cmd, check=False)
|
||||
# re-retry switch if the first time fails
|
||||
if ret.returncode != 0:
|
||||
ret = h.run(cmd)
|
||||
|
||||
hosts.run_function(deploy)
|
||||
|
||||
|
||||
# function to speedup eval if we want to evauluate all machines
|
||||
def get_all_machines(clan_dir: Path) -> HostGroup:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
machines_json = subprocess.run(
|
||||
nix_build([f'{clan_dir}#clanInternals.all-machines-json."{system}"']),
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
text=True,
|
||||
).stdout
|
||||
|
||||
machines = json.loads(Path(machines_json).read_text())
|
||||
|
||||
hosts = []
|
||||
for name, machine_data in machines.items():
|
||||
# very hacky. would be better to do a MachinesGroup instead
|
||||
host = parse_deployment_address(
|
||||
name,
|
||||
machine_data["deploymentAddress"],
|
||||
meta={
|
||||
"machine": Machine(
|
||||
name=name, flake_dir=clan_dir, machine_data=machine_data
|
||||
)
|
||||
},
|
||||
)
|
||||
hosts.append(host)
|
||||
return HostGroup(hosts)
|
||||
|
||||
|
||||
def get_selected_machines(machine_names: list[str], flake_dir: Path) -> HostGroup:
|
||||
hosts = []
|
||||
for name in machine_names:
|
||||
machine = Machine(name=name, flake_dir=flake_dir)
|
||||
hosts.append(machine.host)
|
||||
return HostGroup(hosts)
|
||||
|
||||
|
||||
# FIXME: we want some kind of inventory here.
|
||||
def update(args: argparse.Namespace) -> None:
|
||||
flake_dir = specific_flake_dir(args.flake)
|
||||
if len(args.machines) == 1 and args.target_host is not None:
|
||||
machine = Machine(name=args.machines[0], flake_dir=flake_dir)
|
||||
machine.deployment_address = args.target_host
|
||||
host = parse_deployment_address(
|
||||
args.machines[0],
|
||||
args.target_host,
|
||||
meta={"machine": machine},
|
||||
)
|
||||
machines = HostGroup([host])
|
||||
|
||||
elif args.target_host is not None:
|
||||
print("target host can only be specified for a single machine")
|
||||
exit(1)
|
||||
else:
|
||||
if len(args.machines) == 0:
|
||||
machines = get_all_machines(flake_dir)
|
||||
else:
|
||||
machines = get_selected_machines(args.machines, flake_dir)
|
||||
|
||||
deploy_nixos(machines, flake_dir)
|
||||
|
||||
|
||||
def register_update_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"machines",
|
||||
type=str,
|
||||
help="machine to update. if empty, update all machines",
|
||||
nargs="*",
|
||||
default=[],
|
||||
)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to update machine for",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-host",
|
||||
type=str,
|
||||
help="address of the machine to update, in the format of user@host:1234",
|
||||
)
|
||||
parser.set_defaults(func=update)
|
||||
@@ -1,46 +0,0 @@
|
||||
# !/usr/bin/env python3
|
||||
import argparse
|
||||
|
||||
from .generate import register_generate_parser
|
||||
from .groups import register_groups_parser
|
||||
from .import_sops import register_import_sops_parser
|
||||
from .key import register_key_parser
|
||||
from .machines import register_machines_parser
|
||||
from .secrets import register_secrets_parser
|
||||
from .upload import register_upload_parser
|
||||
from .users import register_users_parser
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
|
||||
groups_parser = subparser.add_parser("groups", help="manage groups")
|
||||
register_groups_parser(groups_parser)
|
||||
|
||||
users_parser = subparser.add_parser("users", help="manage users")
|
||||
register_users_parser(users_parser)
|
||||
|
||||
machines_parser = subparser.add_parser("machines", help="manage machines")
|
||||
register_machines_parser(machines_parser)
|
||||
|
||||
import_sops_parser = subparser.add_parser("import-sops", help="import a sops file")
|
||||
register_import_sops_parser(import_sops_parser)
|
||||
|
||||
parser_generate = subparser.add_parser(
|
||||
"generate", help="generate secrets for machines if they don't exist yet"
|
||||
)
|
||||
register_generate_parser(parser_generate)
|
||||
|
||||
parser_upload = subparser.add_parser("upload", help="upload secrets for machines")
|
||||
register_upload_parser(parser_upload)
|
||||
|
||||
parser_key = subparser.add_parser("key", help="create and show age keys")
|
||||
register_key_parser(parser_key)
|
||||
|
||||
register_secrets_parser(subparser)
|
||||
@@ -1,44 +0,0 @@
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from ..dirs import specific_flake_dir
|
||||
from ..errors import ClanError
|
||||
from ..types import FlakeName
|
||||
|
||||
|
||||
def get_sops_folder(flake_name: FlakeName) -> Path:
|
||||
return specific_flake_dir(flake_name) / "sops"
|
||||
|
||||
|
||||
def gen_sops_subfolder(subdir: str) -> Callable[[FlakeName], Path]:
|
||||
def folder(flake_name: FlakeName) -> Path:
|
||||
return specific_flake_dir(flake_name) / "sops" / subdir
|
||||
|
||||
return folder
|
||||
|
||||
|
||||
sops_secrets_folder = gen_sops_subfolder("secrets")
|
||||
sops_users_folder = gen_sops_subfolder("users")
|
||||
sops_machines_folder = gen_sops_subfolder("machines")
|
||||
sops_groups_folder = gen_sops_subfolder("groups")
|
||||
|
||||
|
||||
def list_objects(path: Path, is_valid: Callable[[str], bool]) -> list[str]:
|
||||
objs: list[str] = []
|
||||
if not path.exists():
|
||||
return objs
|
||||
for f in os.listdir(path):
|
||||
if is_valid(f):
|
||||
objs.append(f)
|
||||
return objs
|
||||
|
||||
|
||||
def remove_object(path: Path, name: str) -> None:
|
||||
try:
|
||||
shutil.rmtree(path / name)
|
||||
except FileNotFoundError:
|
||||
raise ClanError(f"{name} not found in {path}")
|
||||
if not os.listdir(path):
|
||||
os.rmdir(path)
|
||||
@@ -1,47 +0,0 @@
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from clan_cli.errors import ClanError
|
||||
|
||||
from ..dirs import specific_flake_dir
|
||||
from ..machines.machines import Machine
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_secrets(machine: Machine) -> None:
|
||||
env = os.environ.copy()
|
||||
env["CLAN_DIR"] = str(machine.flake_dir)
|
||||
env["PYTHONPATH"] = ":".join(sys.path) # TODO do this in the clanCore module
|
||||
|
||||
print(f"generating secrets... {machine.generate_secrets}")
|
||||
proc = subprocess.run(
|
||||
[machine.generate_secrets],
|
||||
env=env,
|
||||
)
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise ClanError("failed to generate secrets")
|
||||
else:
|
||||
print("successfully generated secrets")
|
||||
|
||||
|
||||
def generate_command(args: argparse.Namespace) -> None:
|
||||
machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
|
||||
generate_secrets(machine)
|
||||
|
||||
|
||||
def register_generate_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"machine",
|
||||
help="The machine to generate secrets for",
|
||||
)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser.set_defaults(func=generate_command)
|
||||
@@ -1,307 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from ..errors import ClanError
|
||||
from ..machines.types import machine_name_type, validate_hostname
|
||||
from ..types import FlakeName
|
||||
from . import secrets
|
||||
from .folders import (
|
||||
sops_groups_folder,
|
||||
sops_machines_folder,
|
||||
sops_secrets_folder,
|
||||
sops_users_folder,
|
||||
)
|
||||
from .sops import update_keys
|
||||
from .types import (
|
||||
VALID_USER_NAME,
|
||||
group_name_type,
|
||||
secret_name_type,
|
||||
user_name_type,
|
||||
)
|
||||
|
||||
|
||||
def machines_folder(flake_name: FlakeName, group: str) -> Path:
|
||||
return sops_groups_folder(flake_name) / group / "machines"
|
||||
|
||||
|
||||
def users_folder(flake_name: FlakeName, group: str) -> Path:
|
||||
return sops_groups_folder(flake_name) / group / "users"
|
||||
|
||||
|
||||
class Group:
|
||||
def __init__(
|
||||
self, flake_name: FlakeName, name: str, machines: list[str], users: list[str]
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.machines = machines
|
||||
self.users = users
|
||||
self.flake_name = flake_name
|
||||
|
||||
|
||||
def list_groups(flake_name: FlakeName) -> list[Group]:
|
||||
groups: list[Group] = []
|
||||
folder = sops_groups_folder(flake_name)
|
||||
if not folder.exists():
|
||||
return groups
|
||||
|
||||
for name in os.listdir(folder):
|
||||
group_folder = folder / name
|
||||
if not group_folder.is_dir():
|
||||
continue
|
||||
machines_path = machines_folder(flake_name, name)
|
||||
machines = []
|
||||
if machines_path.is_dir():
|
||||
for f in machines_path.iterdir():
|
||||
if validate_hostname(f.name):
|
||||
machines.append(f.name)
|
||||
users_path = users_folder(flake_name, name)
|
||||
users = []
|
||||
if users_path.is_dir():
|
||||
for f in users_path.iterdir():
|
||||
if VALID_USER_NAME.match(f.name):
|
||||
users.append(f.name)
|
||||
groups.append(Group(flake_name, name, machines, users))
|
||||
return groups
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
for group in list_groups(args.flake):
|
||||
print(group.name)
|
||||
if group.machines:
|
||||
print("machines:")
|
||||
for machine in group.machines:
|
||||
print(f" {machine}")
|
||||
if group.users:
|
||||
print("users:")
|
||||
for user in group.users:
|
||||
print(f" {user}")
|
||||
print()
|
||||
|
||||
|
||||
def list_directory(directory: Path) -> str:
|
||||
if not directory.exists():
|
||||
return f"{directory} does not exist"
|
||||
msg = f"\n{directory} contains:"
|
||||
for f in directory.iterdir():
|
||||
msg += f"\n {f.name}"
|
||||
return msg
|
||||
|
||||
|
||||
def update_group_keys(flake_name: FlakeName, group: str) -> None:
|
||||
for secret_ in secrets.list_secrets(flake_name):
|
||||
secret = sops_secrets_folder(flake_name) / secret_
|
||||
if (secret / "groups" / group).is_symlink():
|
||||
update_keys(
|
||||
secret,
|
||||
list(sorted(secrets.collect_keys_for_path(secret))),
|
||||
)
|
||||
|
||||
|
||||
def add_member(
|
||||
flake_name: FlakeName, group_folder: Path, source_folder: Path, name: str
|
||||
) -> None:
|
||||
source = source_folder / name
|
||||
if not source.exists():
|
||||
msg = f"{name} does not exist in {source_folder}: "
|
||||
msg += list_directory(source_folder)
|
||||
raise ClanError(msg)
|
||||
group_folder.mkdir(parents=True, exist_ok=True)
|
||||
user_target = group_folder / name
|
||||
if user_target.exists():
|
||||
if not user_target.is_symlink():
|
||||
raise ClanError(
|
||||
f"Cannot add user {name}. {user_target} exists but is not a symlink"
|
||||
)
|
||||
os.remove(user_target)
|
||||
user_target.symlink_to(os.path.relpath(source, user_target.parent))
|
||||
update_group_keys(flake_name, group_folder.parent.name)
|
||||
|
||||
|
||||
def remove_member(flake_name: FlakeName, group_folder: Path, name: str) -> None:
|
||||
target = group_folder / name
|
||||
if not target.exists():
|
||||
msg = f"{name} does not exist in group in {group_folder}: "
|
||||
msg += list_directory(group_folder)
|
||||
raise ClanError(msg)
|
||||
os.remove(target)
|
||||
|
||||
if len(os.listdir(group_folder)) > 0:
|
||||
update_group_keys(flake_name, group_folder.parent.name)
|
||||
|
||||
if len(os.listdir(group_folder)) == 0:
|
||||
os.rmdir(group_folder)
|
||||
|
||||
if len(os.listdir(group_folder.parent)) == 0:
|
||||
os.rmdir(group_folder.parent)
|
||||
|
||||
|
||||
def add_user(flake_name: FlakeName, group: str, name: str) -> None:
|
||||
add_member(
|
||||
flake_name, users_folder(flake_name, group), sops_users_folder(flake_name), name
|
||||
)
|
||||
|
||||
|
||||
def add_user_command(args: argparse.Namespace) -> None:
|
||||
add_user(args.flake, args.group, args.user)
|
||||
|
||||
|
||||
def remove_user(flake_name: FlakeName, group: str, name: str) -> None:
|
||||
remove_member(flake_name, users_folder(flake_name, group), name)
|
||||
|
||||
|
||||
def remove_user_command(args: argparse.Namespace) -> None:
|
||||
remove_user(args.flake, args.group, args.user)
|
||||
|
||||
|
||||
def add_machine(flake_name: FlakeName, group: str, name: str) -> None:
|
||||
add_member(
|
||||
flake_name,
|
||||
machines_folder(flake_name, group),
|
||||
sops_machines_folder(flake_name),
|
||||
name,
|
||||
)
|
||||
|
||||
|
||||
def add_machine_command(args: argparse.Namespace) -> None:
|
||||
add_machine(args.flake, args.group, args.machine)
|
||||
|
||||
|
||||
def remove_machine(flake_name: FlakeName, group: str, name: str) -> None:
|
||||
remove_member(flake_name, machines_folder(flake_name, group), name)
|
||||
|
||||
|
||||
def remove_machine_command(args: argparse.Namespace) -> None:
|
||||
remove_machine(args.flake, args.group, args.machine)
|
||||
|
||||
|
||||
def add_group_argument(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("group", help="the name of the secret", type=group_name_type)
|
||||
|
||||
|
||||
def add_secret(flake_name: FlakeName, group: str, name: str) -> None:
|
||||
secrets.allow_member(
|
||||
secrets.groups_folder(flake_name, name), sops_groups_folder(flake_name), group
|
||||
)
|
||||
|
||||
|
||||
def add_secret_command(args: argparse.Namespace) -> None:
|
||||
add_secret(args.flake, args.group, args.secret)
|
||||
|
||||
|
||||
def remove_secret(flake_name: FlakeName, group: str, name: str) -> None:
|
||||
secrets.disallow_member(secrets.groups_folder(flake_name, name), group)
|
||||
|
||||
|
||||
def remove_secret_command(args: argparse.Namespace) -> None:
|
||||
remove_secret(args.flake, args.group, args.secret)
|
||||
|
||||
|
||||
def register_groups_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
|
||||
# List groups
|
||||
list_parser = subparser.add_parser("list", help="list groups")
|
||||
list_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
list_parser.set_defaults(func=list_command)
|
||||
|
||||
# Add user
|
||||
add_machine_parser = subparser.add_parser(
|
||||
"add-machine", help="add a machine to group"
|
||||
)
|
||||
add_group_argument(add_machine_parser)
|
||||
add_machine_parser.add_argument(
|
||||
"machine", help="the name of the machines to add", type=machine_name_type
|
||||
)
|
||||
add_machine_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
add_machine_parser.set_defaults(func=add_machine_command)
|
||||
|
||||
# Remove machine
|
||||
remove_machine_parser = subparser.add_parser(
|
||||
"remove-machine", help="remove a machine from group"
|
||||
)
|
||||
add_group_argument(remove_machine_parser)
|
||||
remove_machine_parser.add_argument(
|
||||
"machine", help="the name of the machines to remove", type=machine_name_type
|
||||
)
|
||||
remove_machine_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
remove_machine_parser.set_defaults(func=remove_machine_command)
|
||||
|
||||
# Add user
|
||||
add_user_parser = subparser.add_parser("add-user", help="add a user to group")
|
||||
add_group_argument(add_user_parser)
|
||||
add_user_parser.add_argument(
|
||||
"user", help="the name of the user to add", type=user_name_type
|
||||
)
|
||||
add_user_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
add_user_parser.set_defaults(func=add_user_command)
|
||||
|
||||
# Remove user
|
||||
remove_user_parser = subparser.add_parser(
|
||||
"remove-user", help="remove a user from group"
|
||||
)
|
||||
add_group_argument(remove_user_parser)
|
||||
remove_user_parser.add_argument(
|
||||
"user", help="the name of the user to remove", type=user_name_type
|
||||
)
|
||||
remove_user_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
remove_user_parser.set_defaults(func=remove_user_command)
|
||||
|
||||
# Add secret
|
||||
add_secret_parser = subparser.add_parser(
|
||||
"add-secret", help="allow a user to access a secret"
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"group", help="the name of the user", type=group_name_type
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"secret", help="the name of the secret", type=secret_name_type
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
add_secret_parser.set_defaults(func=add_secret_command)
|
||||
|
||||
# Remove secret
|
||||
remove_secret_parser = subparser.add_parser(
|
||||
"remove-secret", help="remove a group's access to a secret"
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"group", help="the name of the group", type=group_name_type
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"secret", help="the name of the secret", type=secret_name_type
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
remove_secret_parser.set_defaults(func=remove_secret_command)
|
||||
@@ -1,94 +0,0 @@
|
||||
import argparse
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from ..errors import ClanError
|
||||
from ..nix import nix_shell
|
||||
from .secrets import encrypt_secret, sops_secrets_folder
|
||||
|
||||
|
||||
def import_sops(args: argparse.Namespace) -> None:
|
||||
file = Path(args.sops_file)
|
||||
file_type = file.suffix
|
||||
|
||||
try:
|
||||
file.read_text()
|
||||
except OSError as e:
|
||||
raise ClanError(f"Could not read file {file}: {e}") from e
|
||||
if file_type == ".yaml":
|
||||
cmd = ["sops"]
|
||||
if args.input_type:
|
||||
cmd += ["--input-type", args.input_type]
|
||||
cmd += ["--output-type", "json", "--decrypt", args.sops_file]
|
||||
cmd = nix_shell(["sops"], cmd)
|
||||
try:
|
||||
res = subprocess.run(cmd, check=True, text=True, stdout=subprocess.PIPE)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ClanError(f"Could not import sops file {file}: {e}") from e
|
||||
secrets = json.loads(res.stdout)
|
||||
for k, v in secrets.items():
|
||||
k = args.prefix + k
|
||||
if not isinstance(v, str):
|
||||
print(
|
||||
f"WARNING: {k} is not a string but {type(v)}, skipping",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
if (sops_secrets_folder(args.flake) / k / "secret").exists():
|
||||
print(
|
||||
f"WARNING: {k} already exists, skipping",
|
||||
file=sys.stderr,
|
||||
)
|
||||
continue
|
||||
encrypt_secret(
|
||||
args.flake,
|
||||
sops_secrets_folder(args.flake) / k,
|
||||
v,
|
||||
add_groups=args.group,
|
||||
add_machines=args.machine,
|
||||
add_users=args.user,
|
||||
)
|
||||
|
||||
|
||||
def register_import_sops_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--input-type",
|
||||
type=str,
|
||||
default=None,
|
||||
help="the input type of the sops file (yaml, json, ...). If not specified, it will be guessed from the file extension",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--group",
|
||||
type=str,
|
||||
action="append",
|
||||
default=[],
|
||||
help="the group to import the secrets to",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--machine",
|
||||
type=str,
|
||||
action="append",
|
||||
default=[],
|
||||
help="the machine to import the secrets to",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--user",
|
||||
type=str,
|
||||
action="append",
|
||||
default=[],
|
||||
help="the user to import the secrets to",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--prefix",
|
||||
type=str,
|
||||
default="",
|
||||
help="the prefix to use for the secret names",
|
||||
)
|
||||
parser.add_argument(
|
||||
"sops_file",
|
||||
type=str,
|
||||
help="the sops file to import (- for stdin)",
|
||||
)
|
||||
parser.set_defaults(func=import_sops)
|
||||
@@ -1,48 +0,0 @@
|
||||
import argparse
|
||||
|
||||
from .. import tty
|
||||
from ..errors import ClanError
|
||||
from .sops import default_sops_key_path, generate_private_key, get_public_key
|
||||
|
||||
|
||||
def generate_key() -> str:
|
||||
path = default_sops_key_path()
|
||||
if path.exists():
|
||||
raise ClanError(f"Key already exists at {path}")
|
||||
priv_key, pub_key = generate_private_key()
|
||||
path.write_text(priv_key)
|
||||
return pub_key
|
||||
|
||||
|
||||
def show_key() -> str:
|
||||
return get_public_key(default_sops_key_path().read_text())
|
||||
|
||||
|
||||
def generate_command(args: argparse.Namespace) -> None:
|
||||
pub_key = generate_key()
|
||||
tty.info(
|
||||
f"Generated age private key at '{default_sops_key_path()}' for your user. Please back it up on a secure location or you will lose access to your secrets."
|
||||
)
|
||||
tty.info(
|
||||
f"Also add your age public key to the repository with 'clan secrets users add youruser {pub_key}' (replace youruser with your user name)"
|
||||
)
|
||||
pass
|
||||
|
||||
|
||||
def show_command(args: argparse.Namespace) -> None:
|
||||
print(show_key())
|
||||
|
||||
|
||||
def register_key_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
|
||||
parser_generate = subparser.add_parser("generate", help="generate age key")
|
||||
parser_generate.set_defaults(func=generate_command)
|
||||
|
||||
parser_show = subparser.add_parser("show", help="show age public key")
|
||||
parser_show.set_defaults(func=show_command)
|
||||
@@ -1,170 +0,0 @@
|
||||
import argparse
|
||||
|
||||
from ..machines.types import machine_name_type, validate_hostname
|
||||
from ..types import FlakeName
|
||||
from . import secrets
|
||||
from .folders import list_objects, remove_object, sops_machines_folder
|
||||
from .sops import read_key, write_key
|
||||
from .types import public_or_private_age_key_type, secret_name_type
|
||||
|
||||
|
||||
def add_machine(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
|
||||
write_key(sops_machines_folder(flake_name) / name, key, force)
|
||||
|
||||
|
||||
def remove_machine(flake_name: FlakeName, name: str) -> None:
|
||||
remove_object(sops_machines_folder(flake_name), name)
|
||||
|
||||
|
||||
def get_machine(flake_name: FlakeName, name: str) -> str:
|
||||
return read_key(sops_machines_folder(flake_name) / name)
|
||||
|
||||
|
||||
def has_machine(flake_name: FlakeName, name: str) -> bool:
|
||||
return (sops_machines_folder(flake_name) / name / "key.json").exists()
|
||||
|
||||
|
||||
def list_machines(flake_name: FlakeName) -> list[str]:
|
||||
path = sops_machines_folder(flake_name)
|
||||
|
||||
def validate(name: str) -> bool:
|
||||
return validate_hostname(name) and has_machine(flake_name, name)
|
||||
|
||||
return list_objects(path, validate)
|
||||
|
||||
|
||||
def add_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
|
||||
secrets.allow_member(
|
||||
secrets.machines_folder(flake_name, secret),
|
||||
sops_machines_folder(flake_name),
|
||||
machine,
|
||||
)
|
||||
|
||||
|
||||
def remove_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
|
||||
secrets.disallow_member(secrets.machines_folder(flake_name, secret), machine)
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
lst = list_machines(args.flake)
|
||||
if len(lst) > 0:
|
||||
print("\n".join(lst))
|
||||
|
||||
|
||||
def add_command(args: argparse.Namespace) -> None:
|
||||
add_machine(args.flake, args.machine, args.key, args.force)
|
||||
|
||||
|
||||
def get_command(args: argparse.Namespace) -> None:
|
||||
print(get_machine(args.flake, args.machine))
|
||||
|
||||
|
||||
def remove_command(args: argparse.Namespace) -> None:
|
||||
remove_machine(args.flake, args.machine)
|
||||
|
||||
|
||||
def add_secret_command(args: argparse.Namespace) -> None:
|
||||
add_secret(args.flake, args.machine, args.secret)
|
||||
|
||||
|
||||
def remove_secret_command(args: argparse.Namespace) -> None:
|
||||
remove_secret(args.flake, args.machine, args.secret)
|
||||
|
||||
|
||||
def register_machines_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
# Parser
|
||||
list_parser = subparser.add_parser("list", help="list machines")
|
||||
list_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
list_parser.set_defaults(func=list_command)
|
||||
|
||||
# Parser
|
||||
add_parser = subparser.add_parser("add", help="add a machine")
|
||||
add_parser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
help="overwrite existing machine",
|
||||
action="store_true",
|
||||
default=False,
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"machine", help="the name of the machine", type=machine_name_type
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"key",
|
||||
help="public key or private key of the user",
|
||||
type=public_or_private_age_key_type,
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
add_parser.set_defaults(func=add_command)
|
||||
|
||||
# Parser
|
||||
get_parser = subparser.add_parser("get", help="get a machine public key")
|
||||
get_parser.add_argument(
|
||||
"machine", help="the name of the machine", type=machine_name_type
|
||||
)
|
||||
get_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
get_parser.set_defaults(func=get_command)
|
||||
|
||||
# Parser
|
||||
remove_parser = subparser.add_parser("remove", help="remove a machine")
|
||||
remove_parser.add_argument(
|
||||
"machine", help="the name of the machine", type=machine_name_type
|
||||
)
|
||||
remove_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
remove_parser.set_defaults(func=remove_command)
|
||||
|
||||
# Parser
|
||||
add_secret_parser = subparser.add_parser(
|
||||
"add-secret", help="allow a machine to access a secret"
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"machine", help="the name of the machine", type=machine_name_type
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"secret", help="the name of the secret", type=secret_name_type
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
add_secret_parser.set_defaults(func=add_secret_command)
|
||||
|
||||
# Parser
|
||||
remove_secret_parser = subparser.add_parser(
|
||||
"remove-secret", help="remove a group's access to a secret"
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"machine", help="the name of the group", type=machine_name_type
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"secret", help="the name of the secret", type=secret_name_type
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
remove_secret_parser.set_defaults(func=remove_secret_command)
|
||||
@@ -1,326 +0,0 @@
|
||||
import argparse
|
||||
import getpass
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import IO
|
||||
|
||||
from .. import tty
|
||||
from ..errors import ClanError
|
||||
from ..types import FlakeName
|
||||
from .folders import (
|
||||
list_objects,
|
||||
sops_groups_folder,
|
||||
sops_machines_folder,
|
||||
sops_secrets_folder,
|
||||
sops_users_folder,
|
||||
)
|
||||
from .sops import decrypt_file, encrypt_file, ensure_sops_key, read_key, update_keys
|
||||
from .types import VALID_SECRET_NAME, secret_name_type
|
||||
|
||||
|
||||
def collect_keys_for_type(folder: Path) -> set[str]:
|
||||
if not folder.exists():
|
||||
return set()
|
||||
keys = set()
|
||||
for p in folder.iterdir():
|
||||
if not p.is_symlink():
|
||||
continue
|
||||
try:
|
||||
target = p.resolve()
|
||||
except FileNotFoundError:
|
||||
tty.warn(f"Ignoring broken symlink {p}")
|
||||
continue
|
||||
kind = target.parent.name
|
||||
if folder.name != kind:
|
||||
tty.warn(f"Expected {p} to point to {folder} but points to {target.parent}")
|
||||
continue
|
||||
keys.add(read_key(target))
|
||||
return keys
|
||||
|
||||
|
||||
def collect_keys_for_path(path: Path) -> set[str]:
|
||||
keys = set([])
|
||||
keys.update(collect_keys_for_type(path / "machines"))
|
||||
keys.update(collect_keys_for_type(path / "users"))
|
||||
groups = path / "groups"
|
||||
if not groups.is_dir():
|
||||
return keys
|
||||
for group in groups.iterdir():
|
||||
keys.update(collect_keys_for_type(group / "machines"))
|
||||
keys.update(collect_keys_for_type(group / "users"))
|
||||
return keys
|
||||
|
||||
|
||||
def encrypt_secret(
|
||||
flake_name: FlakeName,
|
||||
secret: Path,
|
||||
value: IO[str] | str | None,
|
||||
add_users: list[str] = [],
|
||||
add_machines: list[str] = [],
|
||||
add_groups: list[str] = [],
|
||||
) -> None:
|
||||
key = ensure_sops_key(flake_name)
|
||||
keys = set([])
|
||||
|
||||
for user in add_users:
|
||||
allow_member(
|
||||
users_folder(flake_name, secret.name),
|
||||
sops_users_folder(flake_name),
|
||||
user,
|
||||
False,
|
||||
)
|
||||
|
||||
for machine in add_machines:
|
||||
allow_member(
|
||||
machines_folder(flake_name, secret.name),
|
||||
sops_machines_folder(flake_name),
|
||||
machine,
|
||||
False,
|
||||
)
|
||||
|
||||
for group in add_groups:
|
||||
allow_member(
|
||||
groups_folder(flake_name, secret.name),
|
||||
sops_groups_folder(flake_name),
|
||||
group,
|
||||
False,
|
||||
)
|
||||
|
||||
keys = collect_keys_for_path(secret)
|
||||
|
||||
if key.pubkey not in keys:
|
||||
keys.add(key.pubkey)
|
||||
allow_member(
|
||||
users_folder(flake_name, secret.name),
|
||||
sops_users_folder(flake_name),
|
||||
key.username,
|
||||
False,
|
||||
)
|
||||
|
||||
encrypt_file(secret / "secret", value, list(sorted(keys)))
|
||||
|
||||
|
||||
def remove_secret(flake_name: FlakeName, secret: str) -> None:
|
||||
path = sops_secrets_folder(flake_name) / secret
|
||||
if not path.exists():
|
||||
raise ClanError(f"Secret '{secret}' does not exist")
|
||||
shutil.rmtree(path)
|
||||
|
||||
|
||||
def remove_command(args: argparse.Namespace) -> None:
|
||||
remove_secret(args.flake, args.secret)
|
||||
|
||||
|
||||
def add_secret_argument(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("secret", help="the name of the secret", type=secret_name_type)
|
||||
|
||||
|
||||
def machines_folder(flake_name: FlakeName, group: str) -> Path:
|
||||
return sops_secrets_folder(flake_name) / group / "machines"
|
||||
|
||||
|
||||
def users_folder(flake_name: FlakeName, group: str) -> Path:
|
||||
return sops_secrets_folder(flake_name) / group / "users"
|
||||
|
||||
|
||||
def groups_folder(flake_name: FlakeName, group: str) -> Path:
|
||||
return sops_secrets_folder(flake_name) / group / "groups"
|
||||
|
||||
|
||||
def list_directory(directory: Path) -> str:
|
||||
if not directory.exists():
|
||||
return f"{directory} does not exist"
|
||||
msg = f"\n{directory} contains:"
|
||||
for f in directory.iterdir():
|
||||
msg += f"\n {f.name}"
|
||||
return msg
|
||||
|
||||
|
||||
def allow_member(
|
||||
group_folder: Path, source_folder: Path, name: str, do_update_keys: bool = True
|
||||
) -> None:
|
||||
source = source_folder / name
|
||||
if not source.exists():
|
||||
msg = f"{name} does not exist in {source_folder}: "
|
||||
msg += list_directory(source_folder)
|
||||
raise ClanError(msg)
|
||||
group_folder.mkdir(parents=True, exist_ok=True)
|
||||
user_target = group_folder / name
|
||||
if user_target.exists():
|
||||
if not user_target.is_symlink():
|
||||
raise ClanError(
|
||||
f"Cannot add user {name}. {user_target} exists but is not a symlink"
|
||||
)
|
||||
os.remove(user_target)
|
||||
|
||||
user_target.symlink_to(os.path.relpath(source, user_target.parent))
|
||||
if do_update_keys:
|
||||
update_keys(
|
||||
group_folder.parent,
|
||||
list(sorted(collect_keys_for_path(group_folder.parent))),
|
||||
)
|
||||
|
||||
|
||||
def disallow_member(group_folder: Path, name: str) -> None:
|
||||
target = group_folder / name
|
||||
if not target.exists():
|
||||
msg = f"{name} does not exist in group in {group_folder}: "
|
||||
msg += list_directory(group_folder)
|
||||
raise ClanError(msg)
|
||||
|
||||
keys = collect_keys_for_path(group_folder.parent)
|
||||
|
||||
if len(keys) < 2:
|
||||
raise ClanError(
|
||||
f"Cannot remove {name} from {group_folder.parent.name}. No keys left. Use 'clan secrets remove {name}' to remove the secret."
|
||||
)
|
||||
os.remove(target)
|
||||
|
||||
if len(os.listdir(group_folder)) == 0:
|
||||
os.rmdir(group_folder)
|
||||
|
||||
if len(os.listdir(group_folder.parent)) == 0:
|
||||
os.rmdir(group_folder.parent)
|
||||
|
||||
update_keys(
|
||||
target.parent.parent, list(sorted(collect_keys_for_path(group_folder.parent)))
|
||||
)
|
||||
|
||||
|
||||
def has_secret(flake_name: FlakeName, secret: str) -> bool:
|
||||
return (sops_secrets_folder(flake_name) / secret / "secret").exists()
|
||||
|
||||
|
||||
def list_secrets(flake_name: FlakeName) -> list[str]:
|
||||
path = sops_secrets_folder(flake_name)
|
||||
|
||||
def validate(name: str) -> bool:
|
||||
return VALID_SECRET_NAME.match(name) is not None and has_secret(
|
||||
flake_name, name
|
||||
)
|
||||
|
||||
return list_objects(path, validate)
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
lst = list_secrets(args.flake)
|
||||
if len(lst) > 0:
|
||||
print("\n".join(lst))
|
||||
|
||||
|
||||
def decrypt_secret(flake_name: FlakeName, secret: str) -> str:
|
||||
ensure_sops_key(flake_name)
|
||||
secret_path = sops_secrets_folder(flake_name) / secret / "secret"
|
||||
if not secret_path.exists():
|
||||
raise ClanError(f"Secret '{secret}' does not exist")
|
||||
return decrypt_file(secret_path)
|
||||
|
||||
|
||||
def get_command(args: argparse.Namespace) -> None:
|
||||
print(decrypt_secret(args.flake, args.secret), end="")
|
||||
|
||||
|
||||
def set_command(args: argparse.Namespace) -> None:
|
||||
env_value = os.environ.get("SOPS_NIX_SECRET")
|
||||
secret_value: str | IO[str] | None = sys.stdin
|
||||
if args.edit:
|
||||
secret_value = None
|
||||
elif env_value:
|
||||
secret_value = env_value
|
||||
elif tty.is_interactive():
|
||||
secret_value = getpass.getpass(prompt="Paste your secret: ")
|
||||
encrypt_secret(
|
||||
args.flake,
|
||||
sops_secrets_folder(args.flake) / args.secret,
|
||||
secret_value,
|
||||
args.user,
|
||||
args.machine,
|
||||
args.group,
|
||||
)
|
||||
|
||||
|
||||
def rename_command(args: argparse.Namespace) -> None:
|
||||
old_path = sops_secrets_folder(args.flake) / args.secret
|
||||
new_path = sops_secrets_folder(args.flake) / args.new_name
|
||||
if not old_path.exists():
|
||||
raise ClanError(f"Secret '{args.secret}' does not exist")
|
||||
if new_path.exists():
|
||||
raise ClanError(f"Secret '{args.new_name}' already exists")
|
||||
os.rename(old_path, new_path)
|
||||
|
||||
|
||||
def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
|
||||
parser_list = subparser.add_parser("list", help="list secrets")
|
||||
parser_list.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser_list.set_defaults(func=list_command)
|
||||
|
||||
parser_get = subparser.add_parser("get", help="get a secret")
|
||||
add_secret_argument(parser_get)
|
||||
parser_get.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser_get.set_defaults(func=get_command)
|
||||
|
||||
parser_set = subparser.add_parser("set", help="set a secret")
|
||||
add_secret_argument(parser_set)
|
||||
parser_set.add_argument(
|
||||
"--group",
|
||||
type=str,
|
||||
action="append",
|
||||
default=[],
|
||||
help="the group to import the secrets to (can be repeated)",
|
||||
)
|
||||
parser_set.add_argument(
|
||||
"--machine",
|
||||
type=str,
|
||||
action="append",
|
||||
default=[],
|
||||
help="the machine to import the secrets to (can be repeated)",
|
||||
)
|
||||
parser_set.add_argument(
|
||||
"--user",
|
||||
type=str,
|
||||
action="append",
|
||||
default=[],
|
||||
help="the user to import the secrets to (can be repeated)",
|
||||
)
|
||||
parser_set.add_argument(
|
||||
"-e",
|
||||
"--edit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="edit the secret with $EDITOR instead of pasting it",
|
||||
)
|
||||
parser_set.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser_set.set_defaults(func=set_command)
|
||||
|
||||
parser_rename = subparser.add_parser("rename", help="rename a secret")
|
||||
add_secret_argument(parser_rename)
|
||||
parser_rename.add_argument("new_name", type=str, help="the new name of the secret")
|
||||
parser_rename.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser_rename.set_defaults(func=rename_command)
|
||||
|
||||
parser_remove = subparser.add_parser("remove", help="remove a secret")
|
||||
add_secret_argument(parser_remove)
|
||||
parser_remove.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser_remove.set_defaults(func=remove_command)
|
||||
@@ -1,219 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import IO, Iterator
|
||||
|
||||
from ..dirs import user_config_dir
|
||||
from ..errors import ClanError
|
||||
from ..nix import nix_shell
|
||||
from ..types import FlakeName
|
||||
from .folders import sops_machines_folder, sops_users_folder
|
||||
|
||||
|
||||
class SopsKey:
|
||||
def __init__(self, pubkey: str, username: str) -> None:
|
||||
self.pubkey = pubkey
|
||||
self.username = username
|
||||
|
||||
|
||||
def get_public_key(privkey: str) -> str:
|
||||
cmd = nix_shell(["age"], ["age-keygen", "-y"])
|
||||
try:
|
||||
res = subprocess.run(cmd, input=privkey, stdout=subprocess.PIPE, text=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ClanError(
|
||||
"Failed to get public key for age private key. Is the key malformed?"
|
||||
) from e
|
||||
return res.stdout.strip()
|
||||
|
||||
|
||||
def generate_private_key() -> tuple[str, str]:
|
||||
cmd = nix_shell(["age"], ["age-keygen"])
|
||||
try:
|
||||
proc = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, text=True)
|
||||
res = proc.stdout.strip()
|
||||
pubkey = None
|
||||
private_key = None
|
||||
for line in res.splitlines():
|
||||
if line.startswith("# public key:"):
|
||||
pubkey = line.split(":")[1].strip()
|
||||
if not line.startswith("#"):
|
||||
private_key = line
|
||||
if not pubkey:
|
||||
raise ClanError("Could not find public key in age-keygen output")
|
||||
if not private_key:
|
||||
raise ClanError("Could not find private key in age-keygen output")
|
||||
return private_key, pubkey
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise ClanError("Failed to generate private sops key") from e
|
||||
|
||||
|
||||
def get_user_name(flake_name: FlakeName, user: str) -> str:
|
||||
"""Ask the user for their name until a unique one is provided."""
|
||||
while True:
|
||||
name = input(
|
||||
f"Your key is not yet added to the repository. Enter your user name for which your sops key will be stored in the repository [default: {user}]: "
|
||||
)
|
||||
if name:
|
||||
user = name
|
||||
if not (sops_users_folder(flake_name) / user).exists():
|
||||
return user
|
||||
print(f"{sops_users_folder(flake_name) / user} already exists")
|
||||
|
||||
|
||||
def ensure_user_or_machine(flake_name: FlakeName, pub_key: str) -> SopsKey:
|
||||
key = SopsKey(pub_key, username="")
|
||||
folders = [sops_users_folder(flake_name), sops_machines_folder(flake_name)]
|
||||
for folder in folders:
|
||||
if folder.exists():
|
||||
for user in folder.iterdir():
|
||||
if not (user / "key.json").exists():
|
||||
continue
|
||||
|
||||
if read_key(user) == pub_key:
|
||||
key.username = user.name
|
||||
return key
|
||||
|
||||
raise ClanError(
|
||||
f"Your sops key is not yet added to the repository. Please add it with 'clan secrets users add youruser {pub_key}' (replace youruser with your user name)"
|
||||
)
|
||||
|
||||
|
||||
def default_sops_key_path() -> Path:
|
||||
raw_path = os.environ.get("SOPS_AGE_KEY_FILE")
|
||||
if raw_path:
|
||||
return Path(raw_path)
|
||||
else:
|
||||
return user_config_dir() / "sops" / "age" / "keys.txt"
|
||||
|
||||
|
||||
def ensure_sops_key(flake_name: FlakeName) -> SopsKey:
|
||||
key = os.environ.get("SOPS_AGE_KEY")
|
||||
if key:
|
||||
return ensure_user_or_machine(flake_name, get_public_key(key))
|
||||
path = default_sops_key_path()
|
||||
if path.exists():
|
||||
return ensure_user_or_machine(flake_name, get_public_key(path.read_text()))
|
||||
else:
|
||||
raise ClanError(
|
||||
"No sops key found. Please generate one with 'clan secrets key generate'."
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def sops_manifest(keys: list[str]) -> Iterator[Path]:
|
||||
with NamedTemporaryFile(delete=False, mode="w") as manifest:
|
||||
json.dump(
|
||||
dict(creation_rules=[dict(key_groups=[dict(age=keys)])]), manifest, indent=2
|
||||
)
|
||||
manifest.flush()
|
||||
yield Path(manifest.name)
|
||||
|
||||
|
||||
def update_keys(secret_path: Path, keys: list[str]) -> None:
|
||||
with sops_manifest(keys) as manifest:
|
||||
cmd = nix_shell(
|
||||
["sops"],
|
||||
[
|
||||
"sops",
|
||||
"--config",
|
||||
str(manifest),
|
||||
"updatekeys",
|
||||
"--yes",
|
||||
str(secret_path / "secret"),
|
||||
],
|
||||
)
|
||||
res = subprocess.run(cmd)
|
||||
if res.returncode != 0:
|
||||
raise ClanError(
|
||||
f"Failed to update keys for {secret_path}: sops exited with {res.returncode}"
|
||||
)
|
||||
|
||||
|
||||
def encrypt_file(
|
||||
secret_path: Path, content: IO[str] | str | None, keys: list[str]
|
||||
) -> None:
|
||||
folder = secret_path.parent
|
||||
folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with sops_manifest(keys) as manifest:
|
||||
if not content:
|
||||
args = ["sops", "--config", str(manifest)]
|
||||
args.extend([str(secret_path)])
|
||||
cmd = nix_shell(["sops"], args)
|
||||
p = subprocess.run(cmd)
|
||||
# returns 200 if the file is changed
|
||||
if p.returncode != 0 and p.returncode != 200:
|
||||
raise ClanError(
|
||||
f"Failed to encrypt {secret_path}: sops exited with {p.returncode}"
|
||||
)
|
||||
return
|
||||
|
||||
# hopefully /tmp is written to an in-memory file to avoid leaking secrets
|
||||
with NamedTemporaryFile(delete=False) as f:
|
||||
try:
|
||||
with open(f.name, "w") as fd:
|
||||
if isinstance(content, str):
|
||||
fd.write(content)
|
||||
else:
|
||||
shutil.copyfileobj(content, fd)
|
||||
# we pass an empty manifest to pick up existing configuration of the user
|
||||
args = ["sops", "--config", str(manifest)]
|
||||
args.extend(["-i", "--encrypt", str(f.name)])
|
||||
cmd = nix_shell(["sops"], args)
|
||||
subprocess.run(cmd, check=True)
|
||||
# atomic copy of the encrypted file
|
||||
with NamedTemporaryFile(dir=folder, delete=False) as f2:
|
||||
shutil.copyfile(f.name, f2.name)
|
||||
os.rename(f2.name, secret_path)
|
||||
finally:
|
||||
try:
|
||||
os.remove(f.name)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def decrypt_file(secret_path: Path) -> str:
|
||||
with sops_manifest([]) as manifest:
|
||||
cmd = nix_shell(
|
||||
["sops"], ["sops", "--config", str(manifest), "--decrypt", str(secret_path)]
|
||||
)
|
||||
res = subprocess.run(cmd, stdout=subprocess.PIPE, text=True)
|
||||
if res.returncode != 0:
|
||||
raise ClanError(
|
||||
f"Failed to decrypt {secret_path}: sops exited with {res.returncode}"
|
||||
)
|
||||
return res.stdout
|
||||
|
||||
|
||||
def write_key(path: Path, publickey: str, overwrite: bool) -> None:
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
flags = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
|
||||
if not overwrite:
|
||||
flags |= os.O_EXCL
|
||||
fd = os.open(path / "key.json", flags)
|
||||
except FileExistsError:
|
||||
raise ClanError(f"{path.name} already exists in {path}")
|
||||
with os.fdopen(fd, "w") as f:
|
||||
json.dump({"publickey": publickey, "type": "age"}, f, indent=2)
|
||||
|
||||
|
||||
def read_key(path: Path) -> str:
|
||||
with open(path / "key.json") as f:
|
||||
try:
|
||||
key = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
raise ClanError(f"Failed to decode {path.name}: {e}")
|
||||
if key["type"] != "age":
|
||||
raise ClanError(
|
||||
f"{path.name} is not an age key but {key['type']}. This is not supported"
|
||||
)
|
||||
publickey = key.get("publickey")
|
||||
if not publickey:
|
||||
raise ClanError(f"{path.name} does not contain a public key")
|
||||
return publickey
|
||||
@@ -1,131 +0,0 @@
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any
|
||||
import logging
|
||||
|
||||
from clan_cli.nix import nix_shell
|
||||
|
||||
from ..dirs import specific_flake_dir
|
||||
from ..errors import ClanError
|
||||
from ..types import FlakeName
|
||||
from .folders import sops_secrets_folder
|
||||
from .machines import add_machine, has_machine
|
||||
from .secrets import decrypt_secret, encrypt_secret, has_secret
|
||||
from .sops import generate_private_key
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def generate_host_key(flake_name: FlakeName, machine_name: str) -> None:
|
||||
if has_machine(flake_name, machine_name):
|
||||
return
|
||||
priv_key, pub_key = generate_private_key()
|
||||
encrypt_secret(
|
||||
flake_name,
|
||||
sops_secrets_folder(flake_name) / f"{machine_name}-age.key",
|
||||
priv_key,
|
||||
)
|
||||
add_machine(flake_name, machine_name, pub_key, False)
|
||||
|
||||
|
||||
def generate_secrets_group(
|
||||
flake_name: FlakeName,
|
||||
secret_group: str,
|
||||
machine_name: str,
|
||||
tempdir: Path,
|
||||
secret_options: dict[str, Any],
|
||||
) -> None:
|
||||
clan_dir = specific_flake_dir(flake_name)
|
||||
secrets = secret_options["secrets"]
|
||||
needs_regeneration = any(
|
||||
not has_secret(flake_name, f"{machine_name}-{secret['name']}")
|
||||
for secret in secrets.values()
|
||||
)
|
||||
generator = secret_options["generator"]
|
||||
subdir = tempdir / secret_group
|
||||
if needs_regeneration:
|
||||
facts_dir = subdir / "facts"
|
||||
facts_dir.mkdir(parents=True)
|
||||
secrets_dir = subdir / "secrets"
|
||||
secrets_dir.mkdir(parents=True)
|
||||
|
||||
text = f"""\
|
||||
set -euo pipefail
|
||||
export facts={shlex.quote(str(facts_dir))}
|
||||
export secrets={shlex.quote(str(secrets_dir))}
|
||||
{generator}
|
||||
"""
|
||||
try:
|
||||
cmd = nix_shell(["bash"], ["bash", "-c", text])
|
||||
subprocess.run(cmd, check=True)
|
||||
except subprocess.CalledProcessError:
|
||||
msg = "failed to the following command:\n"
|
||||
msg += text
|
||||
raise ClanError(msg)
|
||||
for secret in secrets.values():
|
||||
secret_file = secrets_dir / secret["name"]
|
||||
if not secret_file.is_file():
|
||||
msg = f"did not generate a file for '{secret['name']}' when running the following command:\n"
|
||||
msg += text
|
||||
raise ClanError(msg)
|
||||
encrypt_secret(
|
||||
flake_name,
|
||||
sops_secrets_folder(flake_name) / f"{machine_name}-{secret['name']}",
|
||||
secret_file.read_text(),
|
||||
add_machines=[machine_name],
|
||||
)
|
||||
for fact in secret_options["facts"].values():
|
||||
fact_file = facts_dir / fact["name"]
|
||||
if not fact_file.is_file():
|
||||
msg = f"did not generate a file for '{fact['name']}' when running the following command:\n"
|
||||
msg += text
|
||||
raise ClanError(msg)
|
||||
fact_path = clan_dir.joinpath(fact["path"])
|
||||
fact_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copyfile(fact_file, fact_path)
|
||||
|
||||
|
||||
# this is called by the sops.nix clan core module
|
||||
def generate_secrets_from_nix(
|
||||
flake_name: FlakeName,
|
||||
machine_name: str,
|
||||
secret_submodules: dict[str, Any],
|
||||
) -> None:
|
||||
generate_host_key(flake_name, machine_name)
|
||||
errors = {}
|
||||
log.debug("Generating secrets for machine %s and flake %s", machine_name, flake_name)
|
||||
with TemporaryDirectory() as d:
|
||||
# if any of the secrets are missing, we regenerate all connected facts/secrets
|
||||
for secret_group, secret_options in secret_submodules.items():
|
||||
try:
|
||||
generate_secrets_group(
|
||||
flake_name, secret_group, machine_name, Path(d), secret_options
|
||||
)
|
||||
except ClanError as e:
|
||||
errors[secret_group] = e
|
||||
for secret_group, error in errors.items():
|
||||
print(f"failed to generate secrets for {machine_name}/{secret_group}:")
|
||||
print(error, file=sys.stderr)
|
||||
if len(errors) > 0:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# this is called by the sops.nix clan core module
|
||||
def upload_age_key_from_nix(
|
||||
flake_name: FlakeName,
|
||||
machine_name: str,
|
||||
) -> None:
|
||||
log.debug("Uploading secrets for machine %s and flake %s", machine_name, flake_name)
|
||||
secret_name = f"{machine_name}-age.key"
|
||||
if not has_secret(
|
||||
flake_name, secret_name
|
||||
): # skip uploading the secret, not managed by us
|
||||
return
|
||||
secret = decrypt_secret(flake_name, secret_name)
|
||||
|
||||
secrets_dir = Path(os.environ["SECRETS_DIR"])
|
||||
(secrets_dir / "key.txt").write_text(secret)
|
||||
@@ -1,52 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from ..errors import ClanError
|
||||
from .sops import get_public_key
|
||||
|
||||
VALID_SECRET_NAME = re.compile(r"^[a-zA-Z0-9._-]+$")
|
||||
VALID_USER_NAME = re.compile(r"^[a-z_]([a-z0-9_-]{0,31})?$")
|
||||
|
||||
|
||||
def secret_name_type(arg_value: str) -> str:
|
||||
if not VALID_SECRET_NAME.match(arg_value):
|
||||
raise argparse.ArgumentTypeError(
|
||||
"Invalid character in secret name. Allowed characters are a-z, A-Z, 0-9, ., -, and _"
|
||||
)
|
||||
return arg_value
|
||||
|
||||
|
||||
def public_or_private_age_key_type(arg_value: str) -> str:
|
||||
if os.path.isfile(arg_value):
|
||||
arg_value = Path(arg_value).read_text().strip()
|
||||
if arg_value.startswith("age1"):
|
||||
return arg_value.strip()
|
||||
if arg_value.startswith("AGE-SECRET-KEY-"):
|
||||
return get_public_key(arg_value)
|
||||
if not arg_value.startswith("age1"):
|
||||
raise ClanError(
|
||||
f"Please provide an age key starting with age1, got: '{arg_value}'"
|
||||
)
|
||||
return arg_value
|
||||
|
||||
|
||||
def group_or_user_name_type(what: str) -> Callable[[str], str]:
|
||||
def name_type(arg_value: str) -> str:
|
||||
if len(arg_value) > 32:
|
||||
raise argparse.ArgumentTypeError(
|
||||
f"{what.capitalize()} name must be less than 32 characters long"
|
||||
)
|
||||
if not VALID_USER_NAME.match(arg_value):
|
||||
raise argparse.ArgumentTypeError(
|
||||
f"Invalid character in {what} name. Allowed characters are a-z, 0-9, -, and _. Must start with a letter or _"
|
||||
)
|
||||
return arg_value
|
||||
|
||||
return name_type
|
||||
|
||||
|
||||
user_name_type = group_or_user_name_type("user")
|
||||
group_name_type = group_or_user_name_type("group")
|
||||
@@ -1,55 +0,0 @@
|
||||
import argparse
|
||||
import logging
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from ..dirs import specific_flake_dir
|
||||
from ..machines.machines import Machine
|
||||
from ..nix import nix_shell
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def upload_secrets(machine: Machine) -> None:
|
||||
with TemporaryDirectory() as tempdir_:
|
||||
tempdir = Path(tempdir_)
|
||||
should_upload = machine.run_upload_secrets(tempdir)
|
||||
|
||||
if should_upload:
|
||||
host = machine.host
|
||||
|
||||
ssh_cmd = host.ssh_cmd()
|
||||
subprocess.run(
|
||||
nix_shell(
|
||||
["rsync"],
|
||||
[
|
||||
"rsync",
|
||||
"-e",
|
||||
" ".join(["ssh"] + ssh_cmd[2:]),
|
||||
"-az",
|
||||
"--delete",
|
||||
f"{str(tempdir)}/",
|
||||
f"{host.user}@{host.host}:{machine.secrets_upload_directory}/",
|
||||
],
|
||||
),
|
||||
check=True,
|
||||
)
|
||||
|
||||
|
||||
def upload_command(args: argparse.Namespace) -> None:
|
||||
machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
|
||||
upload_secrets(machine)
|
||||
|
||||
|
||||
def register_upload_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"machine",
|
||||
help="The machine to upload secrets to",
|
||||
)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser.set_defaults(func=upload_command)
|
||||
@@ -1,155 +0,0 @@
|
||||
import argparse
|
||||
|
||||
from ..types import FlakeName
|
||||
from . import secrets
|
||||
from .folders import list_objects, remove_object, sops_users_folder
|
||||
from .sops import read_key, write_key
|
||||
from .types import (
|
||||
VALID_USER_NAME,
|
||||
public_or_private_age_key_type,
|
||||
secret_name_type,
|
||||
user_name_type,
|
||||
)
|
||||
|
||||
|
||||
def add_user(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
|
||||
write_key(sops_users_folder(flake_name) / name, key, force)
|
||||
|
||||
|
||||
def remove_user(flake_name: FlakeName, name: str) -> None:
|
||||
remove_object(sops_users_folder(flake_name), name)
|
||||
|
||||
|
||||
def get_user(flake_name: FlakeName, name: str) -> str:
|
||||
return read_key(sops_users_folder(flake_name) / name)
|
||||
|
||||
|
||||
def list_users(flake_name: FlakeName) -> list[str]:
|
||||
path = sops_users_folder(flake_name)
|
||||
|
||||
def validate(name: str) -> bool:
|
||||
return (
|
||||
VALID_USER_NAME.match(name) is not None
|
||||
and (path / name / "key.json").exists()
|
||||
)
|
||||
|
||||
return list_objects(path, validate)
|
||||
|
||||
|
||||
def add_secret(flake_name: FlakeName, user: str, secret: str) -> None:
|
||||
secrets.allow_member(
|
||||
secrets.users_folder(flake_name, secret), sops_users_folder(flake_name), user
|
||||
)
|
||||
|
||||
|
||||
def remove_secret(flake_name: FlakeName, user: str, secret: str) -> None:
|
||||
secrets.disallow_member(secrets.users_folder(flake_name, secret), user)
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
lst = list_users(args.flake)
|
||||
if len(lst) > 0:
|
||||
print("\n".join(lst))
|
||||
|
||||
|
||||
def add_command(args: argparse.Namespace) -> None:
|
||||
add_user(args.flake, args.user, args.key, args.force)
|
||||
|
||||
|
||||
def get_command(args: argparse.Namespace) -> None:
|
||||
print(get_user(args.flake, args.user))
|
||||
|
||||
|
||||
def remove_command(args: argparse.Namespace) -> None:
|
||||
remove_user(args.flake, args.user)
|
||||
|
||||
|
||||
def add_secret_command(args: argparse.Namespace) -> None:
|
||||
add_secret(args.flake, args.user, args.secret)
|
||||
|
||||
|
||||
def remove_secret_command(args: argparse.Namespace) -> None:
|
||||
remove_secret(args.flake, args.user, args.secret)
|
||||
|
||||
|
||||
def register_users_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
list_parser = subparser.add_parser("list", help="list users")
|
||||
list_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
list_parser.set_defaults(func=list_command)
|
||||
|
||||
add_parser = subparser.add_parser("add", help="add a user")
|
||||
add_parser.add_argument(
|
||||
"-f", "--force", help="overwrite existing user", action="store_true"
|
||||
)
|
||||
add_parser.add_argument("user", help="the name of the user", type=user_name_type)
|
||||
add_parser.add_argument(
|
||||
"key",
|
||||
help="public key or private key of the user",
|
||||
type=public_or_private_age_key_type,
|
||||
)
|
||||
add_parser.set_defaults(func=add_command)
|
||||
add_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
|
||||
get_parser = subparser.add_parser("get", help="get a user public key")
|
||||
get_parser.add_argument("user", help="the name of the user", type=user_name_type)
|
||||
get_parser.set_defaults(func=get_command)
|
||||
get_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
|
||||
remove_parser = subparser.add_parser("remove", help="remove a user")
|
||||
remove_parser.add_argument("user", help="the name of the user", type=user_name_type)
|
||||
remove_parser.set_defaults(func=remove_command)
|
||||
remove_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
|
||||
add_secret_parser = subparser.add_parser(
|
||||
"add-secret", help="allow a user to access a secret"
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"user", help="the name of the group", type=user_name_type
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"secret", help="the name of the secret", type=secret_name_type
|
||||
)
|
||||
add_secret_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
add_secret_parser.set_defaults(func=add_secret_command)
|
||||
|
||||
remove_secret_parser = subparser.add_parser(
|
||||
"remove-secret", help="remove a user's access to a secret"
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"user", help="the name of the group", type=user_name_type
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"secret", help="the name of the secret", type=secret_name_type
|
||||
)
|
||||
remove_secret_parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
remove_secret_parser.set_defaults(func=remove_secret_command)
|
||||
@@ -1,863 +0,0 @@
|
||||
# Adapted from https://github.com/numtide/deploykit
|
||||
|
||||
import fcntl
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import select
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from shlex import quote
|
||||
from threading import Thread
|
||||
from typing import (
|
||||
IO,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
Iterator,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
# https://no-color.org
|
||||
DISABLE_COLOR = not sys.stderr.isatty() or os.environ.get("NO_COLOR", "") != ""
|
||||
|
||||
|
||||
def ansi_color(color: int) -> str:
|
||||
return f"\x1b[{color}m"
|
||||
|
||||
|
||||
class CommandFormatter(logging.Formatter):
|
||||
"""
|
||||
print errors in red and warnings in yellow
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
"%(prefix_color)s[%(command_prefix)s]%(color_reset)s %(color)s%(message)s%(color_reset)s"
|
||||
)
|
||||
self.hostnames: List[str] = []
|
||||
self.hostname_color_offset = 1 # first host shouldn't get agressive red
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
colorcode = 0
|
||||
if record.levelno == logging.ERROR:
|
||||
colorcode = 31 # red
|
||||
if record.levelno == logging.WARN:
|
||||
colorcode = 33 # yellow
|
||||
|
||||
color, prefix_color, color_reset = "", "", ""
|
||||
if not DISABLE_COLOR:
|
||||
command_prefix = getattr(record, "command_prefix", "")
|
||||
color = ansi_color(colorcode)
|
||||
prefix_color = ansi_color(self.hostname_colorcode(command_prefix))
|
||||
color_reset = "\x1b[0m"
|
||||
|
||||
setattr(record, "color", color)
|
||||
setattr(record, "prefix_color", prefix_color)
|
||||
setattr(record, "color_reset", color_reset)
|
||||
|
||||
return super().format(record)
|
||||
|
||||
def hostname_colorcode(self, hostname: str) -> int:
|
||||
try:
|
||||
index = self.hostnames.index(hostname)
|
||||
except ValueError:
|
||||
self.hostnames += [hostname]
|
||||
index = self.hostnames.index(hostname)
|
||||
return 31 + (index + self.hostname_color_offset) % 7
|
||||
|
||||
|
||||
def setup_loggers() -> Tuple[logging.Logger, logging.Logger]:
|
||||
# If we use the default logger here (logging.error etc) or a logger called
|
||||
# "deploykit", then cmdlog messages are also posted on the default logger.
|
||||
# To avoid this message duplication, we set up a main and command logger
|
||||
# and use a "deploykit" main logger.
|
||||
kitlog = logging.getLogger("deploykit.main")
|
||||
kitlog.setLevel(logging.INFO)
|
||||
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(logging.Formatter())
|
||||
|
||||
kitlog.addHandler(ch)
|
||||
|
||||
# use specific logger for command outputs
|
||||
cmdlog = logging.getLogger("deploykit.command")
|
||||
cmdlog.setLevel(logging.INFO)
|
||||
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(CommandFormatter())
|
||||
|
||||
cmdlog.addHandler(ch)
|
||||
return (kitlog, cmdlog)
|
||||
|
||||
|
||||
# loggers for: general deploykit, command output
|
||||
kitlog, cmdlog = setup_loggers()
|
||||
|
||||
info = kitlog.info
|
||||
warn = kitlog.warning
|
||||
error = kitlog.error
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _pipe() -> Iterator[Tuple[IO[str], IO[str]]]:
|
||||
(pipe_r, pipe_w) = os.pipe()
|
||||
read_end = os.fdopen(pipe_r, "r")
|
||||
write_end = os.fdopen(pipe_w, "w")
|
||||
|
||||
try:
|
||||
fl = fcntl.fcntl(read_end, fcntl.F_GETFL)
|
||||
fcntl.fcntl(read_end, fcntl.F_SETFL, fl | os.O_NONBLOCK)
|
||||
|
||||
yield (read_end, write_end)
|
||||
finally:
|
||||
read_end.close()
|
||||
write_end.close()
|
||||
|
||||
|
||||
FILE = Union[None, int]
|
||||
|
||||
# Seconds until a message is printed when _run produces no output.
|
||||
NO_OUTPUT_TIMEOUT = 20
|
||||
|
||||
|
||||
class HostKeyCheck(Enum):
|
||||
# Strictly check ssh host keys, prompt for unknown ones
|
||||
STRICT = 0
|
||||
# Trust on ssh keys on first use
|
||||
TOFU = 1
|
||||
# Do not check ssh host keys
|
||||
NONE = 2
|
||||
|
||||
|
||||
class Host:
|
||||
def __init__(
|
||||
self,
|
||||
host: str,
|
||||
user: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
key: Optional[str] = None,
|
||||
forward_agent: bool = False,
|
||||
command_prefix: Optional[str] = None,
|
||||
host_key_check: HostKeyCheck = HostKeyCheck.STRICT,
|
||||
meta: Dict[str, Any] = {},
|
||||
verbose_ssh: bool = False,
|
||||
ssh_options: dict[str, str] = {},
|
||||
) -> None:
|
||||
"""
|
||||
Creates a Host
|
||||
@host the hostname to connect to via ssh
|
||||
@port the port to connect to via ssh
|
||||
@forward_agent: wheter to forward ssh agent
|
||||
@command_prefix: string to prefix each line of the command output with, defaults to host
|
||||
@host_key_check: wether to check ssh host keys
|
||||
@verbose_ssh: Enables verbose logging on ssh connections
|
||||
@meta: meta attributes associated with the host. Those can be accessed in custom functions passed to `run_function`
|
||||
"""
|
||||
self.host = host
|
||||
self.user = user
|
||||
self.port = port
|
||||
self.key = key
|
||||
if command_prefix:
|
||||
self.command_prefix = command_prefix
|
||||
else:
|
||||
self.command_prefix = host
|
||||
self.forward_agent = forward_agent
|
||||
self.host_key_check = host_key_check
|
||||
self.meta = meta
|
||||
self.verbose_ssh = verbose_ssh
|
||||
self.ssh_options = ssh_options
|
||||
|
||||
def _prefix_output(
|
||||
self,
|
||||
displayed_cmd: str,
|
||||
print_std_fd: Optional[IO[str]],
|
||||
print_err_fd: Optional[IO[str]],
|
||||
stdout: Optional[IO[str]],
|
||||
stderr: Optional[IO[str]],
|
||||
timeout: float = math.inf,
|
||||
) -> Tuple[str, str]:
|
||||
rlist = []
|
||||
if print_std_fd is not None:
|
||||
rlist.append(print_std_fd)
|
||||
if print_err_fd is not None:
|
||||
rlist.append(print_err_fd)
|
||||
if stdout is not None:
|
||||
rlist.append(stdout)
|
||||
|
||||
if stderr is not None:
|
||||
rlist.append(stderr)
|
||||
|
||||
print_std_buf = ""
|
||||
print_err_buf = ""
|
||||
stdout_buf = ""
|
||||
stderr_buf = ""
|
||||
|
||||
start = time.time()
|
||||
last_output = time.time()
|
||||
while len(rlist) != 0:
|
||||
r, _, _ = select.select(rlist, [], [], min(timeout, NO_OUTPUT_TIMEOUT))
|
||||
|
||||
def print_from(
|
||||
print_fd: IO[str], print_buf: str, is_err: bool = False
|
||||
) -> Tuple[float, str]:
|
||||
read = os.read(print_fd.fileno(), 4096)
|
||||
if len(read) == 0:
|
||||
rlist.remove(print_fd)
|
||||
print_buf += read.decode("utf-8")
|
||||
if (read == b"" and len(print_buf) != 0) or "\n" in print_buf:
|
||||
# print and empty the print_buf, if the stream is draining,
|
||||
# but there is still something in the buffer or on newline.
|
||||
lines = print_buf.rstrip("\n").split("\n")
|
||||
for line in lines:
|
||||
if not is_err:
|
||||
cmdlog.info(
|
||||
line, extra=dict(command_prefix=self.command_prefix)
|
||||
)
|
||||
pass
|
||||
else:
|
||||
cmdlog.error(
|
||||
line, extra=dict(command_prefix=self.command_prefix)
|
||||
)
|
||||
print_buf = ""
|
||||
last_output = time.time()
|
||||
return (last_output, print_buf)
|
||||
|
||||
if print_std_fd in r and print_std_fd is not None:
|
||||
(last_output, print_std_buf) = print_from(
|
||||
print_std_fd, print_std_buf, is_err=False
|
||||
)
|
||||
if print_err_fd in r and print_err_fd is not None:
|
||||
(last_output, print_err_buf) = print_from(
|
||||
print_err_fd, print_err_buf, is_err=True
|
||||
)
|
||||
|
||||
now = time.time()
|
||||
elapsed = now - start
|
||||
if now - last_output > NO_OUTPUT_TIMEOUT:
|
||||
elapsed_msg = time.strftime("%H:%M:%S", time.gmtime(elapsed))
|
||||
cmdlog.warn(
|
||||
f"still waiting for '{displayed_cmd}' to finish... ({elapsed_msg} elapsed)",
|
||||
extra=dict(command_prefix=self.command_prefix),
|
||||
)
|
||||
|
||||
def handle_fd(fd: Optional[IO[Any]]) -> str:
|
||||
if fd and fd in r:
|
||||
read = os.read(fd.fileno(), 4096)
|
||||
if len(read) == 0:
|
||||
rlist.remove(fd)
|
||||
else:
|
||||
return read.decode("utf-8")
|
||||
return ""
|
||||
|
||||
stdout_buf += handle_fd(stdout)
|
||||
stderr_buf += handle_fd(stderr)
|
||||
|
||||
if now - last_output >= timeout:
|
||||
break
|
||||
return stdout_buf, stderr_buf
|
||||
|
||||
def _run(
|
||||
self,
|
||||
cmd: List[str],
|
||||
displayed_cmd: str,
|
||||
shell: bool,
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
timeout: float = math.inf,
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
with ExitStack() as stack:
|
||||
read_std_fd, write_std_fd = (None, None)
|
||||
read_err_fd, write_err_fd = (None, None)
|
||||
|
||||
if stdout is None or stderr is None:
|
||||
read_std_fd, write_std_fd = stack.enter_context(_pipe())
|
||||
read_err_fd, write_err_fd = stack.enter_context(_pipe())
|
||||
|
||||
if stdout is None:
|
||||
stdout_read = None
|
||||
stdout_write = write_std_fd
|
||||
elif stdout == subprocess.PIPE:
|
||||
stdout_read, stdout_write = stack.enter_context(_pipe())
|
||||
else:
|
||||
raise Exception(f"unsupported value for stdout parameter: {stdout}")
|
||||
|
||||
if stderr is None:
|
||||
stderr_read = None
|
||||
stderr_write = write_err_fd
|
||||
elif stderr == subprocess.PIPE:
|
||||
stderr_read, stderr_write = stack.enter_context(_pipe())
|
||||
else:
|
||||
raise Exception(f"unsupported value for stderr parameter: {stderr}")
|
||||
|
||||
env = os.environ.copy()
|
||||
env.update(extra_env)
|
||||
|
||||
with subprocess.Popen(
|
||||
cmd,
|
||||
text=True,
|
||||
shell=shell,
|
||||
stdout=stdout_write,
|
||||
stderr=stderr_write,
|
||||
env=env,
|
||||
cwd=cwd,
|
||||
) as p:
|
||||
if write_std_fd is not None:
|
||||
write_std_fd.close()
|
||||
if write_err_fd is not None:
|
||||
write_err_fd.close()
|
||||
if stdout == subprocess.PIPE:
|
||||
assert stdout_write is not None
|
||||
stdout_write.close()
|
||||
if stderr == subprocess.PIPE:
|
||||
assert stderr_write is not None
|
||||
stderr_write.close()
|
||||
|
||||
start = time.time()
|
||||
stdout_data, stderr_data = self._prefix_output(
|
||||
displayed_cmd,
|
||||
read_std_fd,
|
||||
read_err_fd,
|
||||
stdout_read,
|
||||
stderr_read,
|
||||
timeout,
|
||||
)
|
||||
try:
|
||||
ret = p.wait(timeout=max(0, timeout - (time.time() - start)))
|
||||
except subprocess.TimeoutExpired:
|
||||
p.kill()
|
||||
raise
|
||||
if ret != 0:
|
||||
if check:
|
||||
raise subprocess.CalledProcessError(
|
||||
ret, cmd=cmd, output=stdout_data, stderr=stderr_data
|
||||
)
|
||||
else:
|
||||
cmdlog.warning(
|
||||
f"[Command failed: {ret}] {displayed_cmd}",
|
||||
extra=dict(command_prefix=self.command_prefix),
|
||||
)
|
||||
return subprocess.CompletedProcess(
|
||||
cmd, ret, stdout=stdout_data, stderr=stderr_data
|
||||
)
|
||||
raise RuntimeError("unreachable")
|
||||
|
||||
def run_local(
|
||||
self,
|
||||
cmd: Union[str, List[str]],
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
timeout: float = math.inf,
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
"""
|
||||
Command to run locally for the host
|
||||
|
||||
@cmd the commmand to run
|
||||
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocess.PIPE
|
||||
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
|
||||
@extra_env environment variables to override whe running the command
|
||||
@cwd current working directory to run the process in
|
||||
@timeout: Timeout in seconds for the command to complete
|
||||
|
||||
@return subprocess.CompletedProcess result of the command
|
||||
"""
|
||||
shell = False
|
||||
if isinstance(cmd, str):
|
||||
cmd = [cmd]
|
||||
shell = True
|
||||
displayed_cmd = " ".join(cmd)
|
||||
cmdlog.info(
|
||||
f"$ {displayed_cmd}", extra=dict(command_prefix=self.command_prefix)
|
||||
)
|
||||
return self._run(
|
||||
cmd,
|
||||
displayed_cmd,
|
||||
shell=shell,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
extra_env=extra_env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
cmd: Union[str, List[str]],
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
become_root: bool = False,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
verbose_ssh: bool = False,
|
||||
timeout: float = math.inf,
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
"""
|
||||
Command to run on the host via ssh
|
||||
|
||||
@cmd the commmand to run
|
||||
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
|
||||
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
|
||||
@become_root if the ssh_user is not root than sudo is prepended
|
||||
@extra_env environment variables to override whe running the command
|
||||
@cwd current working directory to run the process in
|
||||
@verbose_ssh: Enables verbose logging on ssh connections
|
||||
@timeout: Timeout in seconds for the command to complete
|
||||
|
||||
@return subprocess.CompletedProcess result of the ssh command
|
||||
"""
|
||||
sudo = ""
|
||||
if become_root and self.user != "root":
|
||||
sudo = "sudo -- "
|
||||
vars = []
|
||||
for k, v in extra_env.items():
|
||||
vars.append(f"{shlex.quote(k)}={shlex.quote(v)}")
|
||||
|
||||
displayed_cmd = ""
|
||||
export_cmd = ""
|
||||
if vars:
|
||||
export_cmd = f"export {' '.join(vars)}; "
|
||||
displayed_cmd += export_cmd
|
||||
if isinstance(cmd, list):
|
||||
displayed_cmd += " ".join(cmd)
|
||||
else:
|
||||
displayed_cmd += cmd
|
||||
cmdlog.info(
|
||||
f"$ {displayed_cmd}", extra=dict(command_prefix=self.command_prefix)
|
||||
)
|
||||
|
||||
bash_cmd = export_cmd
|
||||
bash_args = []
|
||||
if isinstance(cmd, list):
|
||||
bash_cmd += 'exec "$@"'
|
||||
bash_args += cmd
|
||||
else:
|
||||
bash_cmd += cmd
|
||||
# FIXME we assume bash to be present here? Should be documented...
|
||||
ssh_cmd = self.ssh_cmd(verbose_ssh=verbose_ssh) + [
|
||||
"--",
|
||||
f"{sudo}bash -c {quote(bash_cmd)} -- {' '.join(map(quote, bash_args))}",
|
||||
]
|
||||
return self._run(
|
||||
ssh_cmd,
|
||||
displayed_cmd,
|
||||
shell=False,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def ssh_cmd(
|
||||
self,
|
||||
verbose_ssh: bool = False,
|
||||
) -> List:
|
||||
if self.user is not None:
|
||||
ssh_target = f"{self.user}@{self.host}"
|
||||
else:
|
||||
ssh_target = self.host
|
||||
|
||||
ssh_opts = ["-A"] if self.forward_agent else []
|
||||
|
||||
for k, v in self.ssh_options.items():
|
||||
ssh_opts.extend(["-o", f"{k}={shlex.quote(v)}"])
|
||||
|
||||
if self.port:
|
||||
ssh_opts.extend(["-p", str(self.port)])
|
||||
if self.key:
|
||||
ssh_opts.extend(["-i", self.key])
|
||||
|
||||
if self.host_key_check != HostKeyCheck.STRICT:
|
||||
ssh_opts.extend(["-o", "StrictHostKeyChecking=no"])
|
||||
if self.host_key_check == HostKeyCheck.NONE:
|
||||
ssh_opts.extend(["-o", "UserKnownHostsFile=/dev/null"])
|
||||
if verbose_ssh or self.verbose_ssh:
|
||||
ssh_opts.extend(["-v"])
|
||||
|
||||
return ["ssh", ssh_target] + ssh_opts
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class HostResult(Generic[T]):
|
||||
def __init__(self, host: Host, result: Union[T, Exception]) -> None:
|
||||
self.host = host
|
||||
self._result = result
|
||||
|
||||
@property
|
||||
def error(self) -> Optional[Exception]:
|
||||
"""
|
||||
Returns an error if the command failed
|
||||
"""
|
||||
if isinstance(self._result, Exception):
|
||||
return self._result
|
||||
return None
|
||||
|
||||
@property
|
||||
def result(self) -> T:
|
||||
"""
|
||||
Unwrap the result
|
||||
"""
|
||||
if isinstance(self._result, Exception):
|
||||
raise self._result
|
||||
return self._result
|
||||
|
||||
|
||||
Results = List[HostResult[subprocess.CompletedProcess[str]]]
|
||||
|
||||
|
||||
def _worker(
|
||||
func: Callable[[Host], T],
|
||||
host: Host,
|
||||
results: List[HostResult[T]],
|
||||
idx: int,
|
||||
) -> None:
|
||||
try:
|
||||
results[idx] = HostResult(host, func(host))
|
||||
except Exception as e:
|
||||
kitlog.exception(e)
|
||||
results[idx] = HostResult(host, e)
|
||||
|
||||
|
||||
class HostGroup:
|
||||
def __init__(self, hosts: List[Host]) -> None:
|
||||
self.hosts = hosts
|
||||
|
||||
def _run_local(
|
||||
self,
|
||||
cmd: Union[str, List[str]],
|
||||
host: Host,
|
||||
results: Results,
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
verbose_ssh: bool = False,
|
||||
timeout: float = math.inf,
|
||||
) -> None:
|
||||
try:
|
||||
proc = host.run_local(
|
||||
cmd,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
extra_env=extra_env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
timeout=timeout,
|
||||
)
|
||||
results.append(HostResult(host, proc))
|
||||
except Exception as e:
|
||||
kitlog.exception(e)
|
||||
results.append(HostResult(host, e))
|
||||
|
||||
def _run_remote(
|
||||
self,
|
||||
cmd: Union[str, List[str]],
|
||||
host: Host,
|
||||
results: Results,
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
verbose_ssh: bool = False,
|
||||
timeout: float = math.inf,
|
||||
) -> None:
|
||||
try:
|
||||
proc = host.run(
|
||||
cmd,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
extra_env=extra_env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
verbose_ssh=verbose_ssh,
|
||||
timeout=timeout,
|
||||
)
|
||||
results.append(HostResult(host, proc))
|
||||
except Exception as e:
|
||||
kitlog.exception(e)
|
||||
results.append(HostResult(host, e))
|
||||
|
||||
def _reraise_errors(self, results: List[HostResult[Any]]) -> None:
|
||||
errors = 0
|
||||
for result in results:
|
||||
e = result.error
|
||||
if e:
|
||||
cmdlog.error(
|
||||
f"failed with: {e}",
|
||||
extra=dict(command_prefix=result.host.command_prefix),
|
||||
)
|
||||
errors += 1
|
||||
if errors > 0:
|
||||
raise Exception(
|
||||
f"{errors} hosts failed with an error. Check the logs above"
|
||||
)
|
||||
|
||||
def _run(
|
||||
self,
|
||||
cmd: Union[str, List[str]],
|
||||
local: bool = False,
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
verbose_ssh: bool = False,
|
||||
timeout: float = math.inf,
|
||||
) -> Results:
|
||||
results: Results = []
|
||||
threads = []
|
||||
for host in self.hosts:
|
||||
fn = self._run_local if local else self._run_remote
|
||||
thread = Thread(
|
||||
target=fn,
|
||||
kwargs=dict(
|
||||
results=results,
|
||||
cmd=cmd,
|
||||
host=host,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
extra_env=extra_env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
verbose_ssh=verbose_ssh,
|
||||
timeout=timeout,
|
||||
),
|
||||
)
|
||||
thread.start()
|
||||
threads.append(thread)
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
if check:
|
||||
self._reraise_errors(results)
|
||||
|
||||
return results
|
||||
|
||||
def run(
|
||||
self,
|
||||
cmd: Union[str, List[str]],
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
verbose_ssh: bool = False,
|
||||
timeout: float = math.inf,
|
||||
) -> Results:
|
||||
"""
|
||||
Command to run on the remote host via ssh
|
||||
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
|
||||
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
|
||||
@cwd current working directory to run the process in
|
||||
@verbose_ssh: Enables verbose logging on ssh connections
|
||||
@timeout: Timeout in seconds for the command to complete
|
||||
|
||||
@return a lists of tuples containing Host and the result of the command for this Host
|
||||
"""
|
||||
return self._run(
|
||||
cmd,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
extra_env=extra_env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
verbose_ssh=verbose_ssh,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def run_local(
|
||||
self,
|
||||
cmd: Union[str, List[str]],
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
timeout: float = math.inf,
|
||||
) -> Results:
|
||||
"""
|
||||
Command to run locally for each host in the group in parallel
|
||||
@cmd the commmand to run
|
||||
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
|
||||
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
|
||||
@cwd current working directory to run the process in
|
||||
@extra_env environment variables to override whe running the command
|
||||
@timeout: Timeout in seconds for the command to complete
|
||||
|
||||
@return a lists of tuples containing Host and the result of the command for this Host
|
||||
"""
|
||||
return self._run(
|
||||
cmd,
|
||||
local=True,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
extra_env=extra_env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def run_function(
|
||||
self, func: Callable[[Host], T], check: bool = True
|
||||
) -> List[HostResult[T]]:
|
||||
"""
|
||||
Function to run for each host in the group in parallel
|
||||
|
||||
@func the function to call
|
||||
"""
|
||||
threads = []
|
||||
results: List[HostResult[T]] = [
|
||||
HostResult(h, Exception(f"No result set for thread {i}"))
|
||||
for (i, h) in enumerate(self.hosts)
|
||||
]
|
||||
for i, host in enumerate(self.hosts):
|
||||
thread = Thread(
|
||||
target=_worker,
|
||||
args=(func, host, results, i),
|
||||
)
|
||||
threads.append(thread)
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
if check:
|
||||
self._reraise_errors(results)
|
||||
return results
|
||||
|
||||
def filter(self, pred: Callable[[Host], bool]) -> "HostGroup":
|
||||
"""Return a new Group with the results filtered by the predicate"""
|
||||
return HostGroup(list(filter(pred, self.hosts)))
|
||||
|
||||
|
||||
def parse_deployment_address(
|
||||
machine_name: str, host: str, meta: dict[str, Any] = {}
|
||||
) -> Host:
|
||||
parts = host.split("@")
|
||||
user: Optional[str] = None
|
||||
if len(parts) > 1:
|
||||
user = parts[0]
|
||||
hostname = parts[1]
|
||||
else:
|
||||
hostname = parts[0]
|
||||
maybe_options = hostname.split("?")
|
||||
options: Dict[str, str] = {}
|
||||
if len(maybe_options) > 1:
|
||||
hostname = maybe_options[0]
|
||||
for option in maybe_options[1].split("&"):
|
||||
k, v = option.split("=")
|
||||
options[k] = v
|
||||
maybe_port = hostname.split(":")
|
||||
port = None
|
||||
if len(maybe_port) > 1:
|
||||
hostname = maybe_port[0]
|
||||
port = int(maybe_port[1])
|
||||
meta = meta.copy()
|
||||
meta["flake_attr"] = machine_name
|
||||
return Host(
|
||||
hostname,
|
||||
user=user,
|
||||
port=port,
|
||||
command_prefix=machine_name,
|
||||
meta=meta,
|
||||
ssh_options=options,
|
||||
)
|
||||
|
||||
|
||||
@overload
|
||||
def run(
|
||||
cmd: Union[List[str], str],
|
||||
text: Literal[True] = ...,
|
||||
stdout: FILE = ...,
|
||||
stderr: FILE = ...,
|
||||
extra_env: Dict[str, str] = ...,
|
||||
cwd: Union[None, str, Path] = ...,
|
||||
check: bool = ...,
|
||||
) -> subprocess.CompletedProcess[str]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def run(
|
||||
cmd: Union[List[str], str],
|
||||
text: Literal[False],
|
||||
stdout: FILE = ...,
|
||||
stderr: FILE = ...,
|
||||
extra_env: Dict[str, str] = ...,
|
||||
cwd: Union[None, str, Path] = ...,
|
||||
check: bool = ...,
|
||||
) -> subprocess.CompletedProcess[bytes]:
|
||||
...
|
||||
|
||||
|
||||
def run(
|
||||
cmd: Union[List[str], str],
|
||||
text: bool = True,
|
||||
stdout: FILE = None,
|
||||
stderr: FILE = None,
|
||||
extra_env: Dict[str, str] = {},
|
||||
cwd: Union[None, str, Path] = None,
|
||||
check: bool = True,
|
||||
) -> subprocess.CompletedProcess[Any]:
|
||||
"""
|
||||
Run command locally
|
||||
|
||||
@cmd if this parameter is a string the command is interpreted as a shell command,
|
||||
otherwise if it is a list, than the first list element is the command
|
||||
and the remaining list elements are passed as arguments to the
|
||||
command.
|
||||
@text when true, file objects for stdout and stderr are opened in text mode.
|
||||
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
|
||||
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
|
||||
@extra_env environment variables to override whe running the command
|
||||
@cwd current working directory to run the process in
|
||||
@check If check is true, and the process exits with a non-zero exit code, a
|
||||
CalledProcessError exception will be raised. Attributes of that exception
|
||||
hold the arguments, the exit code, and stdout and stderr if they were
|
||||
captured.
|
||||
"""
|
||||
if isinstance(cmd, list):
|
||||
info("$ " + " ".join(cmd))
|
||||
else:
|
||||
info(f"$ {cmd}")
|
||||
env = os.environ.copy()
|
||||
env.update(extra_env)
|
||||
|
||||
return subprocess.run(
|
||||
cmd,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
env=env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
shell=not isinstance(cmd, list),
|
||||
text=text,
|
||||
)
|
||||
@@ -1,80 +0,0 @@
|
||||
import argparse
|
||||
import json
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
|
||||
from ..nix import nix_shell
|
||||
|
||||
|
||||
def ssh(
|
||||
host: str,
|
||||
user: str = "root",
|
||||
password: Optional[str] = None,
|
||||
ssh_args: list[str] = [],
|
||||
) -> None:
|
||||
packages = ["tor", "openssh"]
|
||||
password_args = []
|
||||
if password:
|
||||
packages.append("sshpass")
|
||||
password_args = [
|
||||
"sshpass",
|
||||
"-p",
|
||||
password,
|
||||
]
|
||||
_ssh_args = ssh_args + [
|
||||
"ssh",
|
||||
"-o",
|
||||
"UserKnownHostsFile=/dev/null",
|
||||
"-o",
|
||||
"StrictHostKeyChecking=no",
|
||||
f"{user}@{host}",
|
||||
]
|
||||
cmd = nix_shell(packages, ["torify"] + password_args + _ssh_args)
|
||||
subprocess.run(cmd)
|
||||
|
||||
|
||||
def qrcode_scan(picture_file: str) -> str:
|
||||
return (
|
||||
subprocess.run(
|
||||
nix_shell(
|
||||
["zbar"],
|
||||
[
|
||||
"zbarimg",
|
||||
"--quiet",
|
||||
"--raw",
|
||||
picture_file,
|
||||
],
|
||||
),
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
.stdout.decode()
|
||||
.strip()
|
||||
)
|
||||
|
||||
|
||||
def main(args: argparse.Namespace) -> None:
|
||||
if args.json:
|
||||
with open(args.json) as file:
|
||||
ssh_data = json.load(file)
|
||||
ssh(host=ssh_data["address"], password=ssh_data["password"])
|
||||
elif args.png:
|
||||
ssh_data = json.loads(qrcode_scan(args.png))
|
||||
ssh(host=ssh_data["address"], password=ssh_data["password"])
|
||||
|
||||
|
||||
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument(
|
||||
"-j",
|
||||
"--json",
|
||||
help="specify the json file for ssh data (generated by starting the clan installer)",
|
||||
)
|
||||
group.add_argument(
|
||||
"-P",
|
||||
"--png",
|
||||
help="specify the json file for ssh data as the qrcode image (generated by starting the clan installer)",
|
||||
)
|
||||
# TODO pass all args we don't parse into ssh_args, currently it fails if arg starts with -
|
||||
parser.add_argument("ssh_args", nargs="*", default=[])
|
||||
parser.set_defaults(func=main)
|
||||
@@ -1,21 +0,0 @@
|
||||
import argparse
|
||||
|
||||
from .create import register_create_parser
|
||||
from .inspect import register_inspect_parser
|
||||
|
||||
|
||||
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="command to execute",
|
||||
help="the command to execute",
|
||||
required=True,
|
||||
)
|
||||
|
||||
inspect_parser = subparser.add_parser(
|
||||
"inspect", help="inspect the vm configuration"
|
||||
)
|
||||
register_inspect_parser(inspect_parser)
|
||||
|
||||
create_parser = subparser.add_parser("create", help="create a VM from a machine")
|
||||
register_create_parser(create_parser)
|
||||
@@ -1,181 +0,0 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Iterator, Dict
|
||||
from uuid import UUID
|
||||
|
||||
from ..dirs import clan_flakes_dir, specific_flake_dir
|
||||
from ..nix import nix_build, nix_config, nix_eval, nix_shell
|
||||
from ..task_manager import BaseTask, Command, create_task
|
||||
from ..types import validate_path
|
||||
from .inspect import VmConfig, inspect_vm
|
||||
from ..errors import ClanError
|
||||
from ..debug import repro_env_break
|
||||
|
||||
|
||||
class BuildVmTask(BaseTask):
|
||||
def __init__(self, uuid: UUID, vm: VmConfig) -> None:
|
||||
super().__init__(uuid, num_cmds=7)
|
||||
self.vm = vm
|
||||
|
||||
def get_vm_create_info(self, cmds: Iterator[Command]) -> dict:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
|
||||
clan_dir = self.vm.flake_url
|
||||
machine = self.vm.flake_attr
|
||||
cmd = next(cmds)
|
||||
cmd.run(
|
||||
nix_build(
|
||||
[
|
||||
f'{clan_dir}#clanInternals.machines."{system}"."{machine}".config.system.clan.vm.create'
|
||||
]
|
||||
)
|
||||
)
|
||||
vm_json = "".join(cmd.stdout).strip()
|
||||
self.log.debug(f"VM JSON path: {vm_json}")
|
||||
with open(vm_json) as f:
|
||||
return json.load(f)
|
||||
|
||||
def get_clan_name(self, cmds: Iterator[Command]) -> str:
|
||||
clan_dir = self.vm.flake_url
|
||||
cmd = next(cmds)
|
||||
cmd.run(nix_eval([f"{clan_dir}#clanInternals.clanName"]))
|
||||
clan_name = cmd.stdout[0].strip().strip('"')
|
||||
return clan_name
|
||||
|
||||
def run(self) -> None:
|
||||
cmds = self.commands()
|
||||
|
||||
machine = self.vm.flake_attr
|
||||
self.log.debug(f"Creating VM for {machine}")
|
||||
|
||||
# TODO: We should get this from the vm argument
|
||||
vm_config = self.get_vm_create_info(cmds)
|
||||
clan_name = self.get_clan_name(cmds)
|
||||
|
||||
self.log.debug(f"Building VM for clan name: {clan_name}")
|
||||
|
||||
flake_dir = clan_flakes_dir() / clan_name
|
||||
validate_path(clan_flakes_dir(), flake_dir)
|
||||
flake_dir.mkdir(exist_ok=True)
|
||||
|
||||
xchg_dir = flake_dir / "xchg"
|
||||
xchg_dir.mkdir()
|
||||
secrets_dir = flake_dir / "secrets"
|
||||
secrets_dir.mkdir()
|
||||
disk_img = f"{flake_dir}/disk.img"
|
||||
|
||||
env = os.environ.copy()
|
||||
env["CLAN_DIR"] = str(self.vm.flake_url)
|
||||
|
||||
env["PYTHONPATH"] = str(
|
||||
":".join(sys.path)
|
||||
) # TODO do this in the clanCore module
|
||||
env["SECRETS_DIR"] = str(secrets_dir)
|
||||
|
||||
cmd = next(cmds)
|
||||
repro_env_break(work_dir=flake_dir, env=env, cmd=[vm_config["generateSecrets"], clan_name])
|
||||
if Path(self.vm.flake_url).is_dir():
|
||||
cmd.run(
|
||||
[vm_config["generateSecrets"], clan_name],
|
||||
env=env,
|
||||
)
|
||||
else:
|
||||
self.log.warning("won't generate secrets for non local clan")
|
||||
|
||||
cmd = next(cmds)
|
||||
cmd.run(
|
||||
[vm_config["uploadSecrets"]],
|
||||
env=env,
|
||||
)
|
||||
|
||||
cmd = next(cmds)
|
||||
cmd.run(
|
||||
nix_shell(
|
||||
["qemu"],
|
||||
[
|
||||
"qemu-img",
|
||||
"create",
|
||||
"-f",
|
||||
"raw",
|
||||
disk_img,
|
||||
"1024M",
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
cmd = next(cmds)
|
||||
cmd.run(
|
||||
nix_shell(
|
||||
["e2fsprogs"],
|
||||
[
|
||||
"mkfs.ext4",
|
||||
"-L",
|
||||
"nixos",
|
||||
disk_img,
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
cmd = next(cmds)
|
||||
cmdline = [
|
||||
(Path(vm_config["toplevel"]) / "kernel-params").read_text(),
|
||||
f'init={vm_config["toplevel"]}/init',
|
||||
f'regInfo={vm_config["regInfo"]}/registration',
|
||||
"console=ttyS0,115200n8",
|
||||
"console=tty0",
|
||||
]
|
||||
qemu_command = [
|
||||
# fmt: off
|
||||
"qemu-kvm",
|
||||
"-name", machine,
|
||||
"-m", f'{vm_config["memorySize"]}M',
|
||||
"-smp", str(vm_config["cores"]),
|
||||
"-device", "virtio-rng-pci",
|
||||
"-net", "nic,netdev=user.0,model=virtio", "-netdev", "user,id=user.0",
|
||||
"-virtfs", "local,path=/nix/store,security_model=none,mount_tag=nix-store",
|
||||
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=shared",
|
||||
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=xchg",
|
||||
"-virtfs", f"local,path={secrets_dir},security_model=none,mount_tag=secrets",
|
||||
"-drive", f'cache=writeback,file={disk_img},format=raw,id=drive1,if=none,index=1,werror=report',
|
||||
"-device", "virtio-blk-pci,bootindex=1,drive=drive1,serial=root",
|
||||
"-device", "virtio-keyboard",
|
||||
"-usb",
|
||||
"-device", "usb-tablet,bus=usb-bus.0",
|
||||
"-kernel", f'{vm_config["toplevel"]}/kernel',
|
||||
"-initrd", vm_config["initrd"],
|
||||
"-append", " ".join(cmdline),
|
||||
# fmt: on
|
||||
]
|
||||
if not self.vm.graphics:
|
||||
qemu_command.append("-nographic")
|
||||
print("$ " + shlex.join(qemu_command))
|
||||
cmd.run(nix_shell(["qemu"], qemu_command))
|
||||
|
||||
|
||||
def create_vm(vm: VmConfig) -> BuildVmTask:
|
||||
return create_task(BuildVmTask, vm)
|
||||
|
||||
|
||||
def create_command(args: argparse.Namespace) -> None:
|
||||
clan_dir = specific_flake_dir(args.flake)
|
||||
vm = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
|
||||
|
||||
task = create_vm(vm)
|
||||
for line in task.log_lines():
|
||||
print(line, end="")
|
||||
|
||||
|
||||
def register_create_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("machine", type=str)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser.set_defaults(func=create_command)
|
||||
@@ -1,50 +0,0 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import AnyUrl, BaseModel
|
||||
|
||||
from ..async_cmd import run
|
||||
from ..dirs import specific_flake_dir
|
||||
from ..nix import nix_config, nix_eval
|
||||
|
||||
|
||||
class VmConfig(BaseModel):
|
||||
flake_url: AnyUrl | Path
|
||||
flake_attr: str
|
||||
|
||||
cores: int
|
||||
memory_size: int
|
||||
graphics: bool
|
||||
|
||||
|
||||
async def inspect_vm(flake_url: AnyUrl | Path, flake_attr: str) -> VmConfig:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
cmd = nix_eval(
|
||||
[
|
||||
f'{flake_url}#clanInternals.machines."{system}"."{flake_attr}".config.system.clan.vm.config'
|
||||
]
|
||||
)
|
||||
out = await run(cmd)
|
||||
data = json.loads(out.stdout)
|
||||
return VmConfig(flake_url=flake_url, flake_attr=flake_attr, **data)
|
||||
|
||||
|
||||
def inspect_command(args: argparse.Namespace) -> None:
|
||||
clan_dir = specific_flake_dir(args.flake)
|
||||
res = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
|
||||
print("Cores:", res.cores)
|
||||
print("Memory size:", res.memory_size)
|
||||
print("Graphics:", res.graphics)
|
||||
|
||||
|
||||
def register_inspect_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("machine", type=str)
|
||||
parser.add_argument(
|
||||
"flake",
|
||||
type=str,
|
||||
help="name of the flake to create machine for",
|
||||
)
|
||||
parser.set_defaults(func=inspect_command)
|
||||
@@ -8,7 +8,7 @@ from fastapi.staticfiles import StaticFiles
|
||||
from ..errors import ClanError
|
||||
from .assets import asset_path
|
||||
from .error_handlers import clan_error_handler
|
||||
from .routers import flake, health, machines, root, vms
|
||||
from .routers import health, root
|
||||
|
||||
origins = [
|
||||
"http://localhost:3000",
|
||||
@@ -26,14 +26,12 @@ def setup_app() -> FastAPI:
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
app.include_router(flake.router)
|
||||
|
||||
app.include_router(health.router)
|
||||
app.include_router(machines.router)
|
||||
app.include_router(vms.router)
|
||||
|
||||
|
||||
# Needs to be last in register. Because of wildcard route
|
||||
app.include_router(root.router)
|
||||
|
||||
app.add_exception_handler(ClanError, clan_error_handler)
|
||||
|
||||
app.mount("/static", StaticFiles(directory=asset_path()), name="static")
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
import json
|
||||
from json.decoder import JSONDecodeError
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Body, HTTPException, status
|
||||
from pydantic import AnyUrl
|
||||
|
||||
from clan_cli.webui.api_inputs import (
|
||||
FlakeCreateInput,
|
||||
)
|
||||
from clan_cli.webui.api_outputs import (
|
||||
FlakeAction,
|
||||
FlakeAttrResponse,
|
||||
FlakeCreateResponse,
|
||||
FlakeResponse,
|
||||
)
|
||||
|
||||
from ...async_cmd import run
|
||||
from ...flakes import create
|
||||
from ...nix import nix_command, nix_flake_show
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# TODO: Check for directory traversal
|
||||
async def get_attrs(url: AnyUrl | Path) -> list[str]:
|
||||
cmd = nix_flake_show(url)
|
||||
out = await run(cmd)
|
||||
|
||||
data: dict[str, dict] = {}
|
||||
try:
|
||||
data = json.loads(out.stdout)
|
||||
except JSONDecodeError:
|
||||
raise HTTPException(status_code=422, detail="Could not load flake.")
|
||||
|
||||
nixos_configs = data.get("nixosConfigurations", {})
|
||||
flake_attrs = list(nixos_configs.keys())
|
||||
|
||||
if not flake_attrs:
|
||||
raise HTTPException(
|
||||
status_code=422, detail="No entry or no attribute: nixosConfigurations"
|
||||
)
|
||||
return flake_attrs
|
||||
|
||||
|
||||
# TODO: Check for directory traversal
|
||||
@router.get("/api/flake/attrs")
|
||||
async def inspect_flake_attrs(url: AnyUrl | Path) -> FlakeAttrResponse:
|
||||
return FlakeAttrResponse(flake_attrs=await get_attrs(url))
|
||||
|
||||
|
||||
# TODO: Check for directory traversal
|
||||
@router.get("/api/flake")
|
||||
async def inspect_flake(
|
||||
url: AnyUrl | Path,
|
||||
) -> FlakeResponse:
|
||||
actions = []
|
||||
# Extract the flake from the given URL
|
||||
# We do this by running 'nix flake prefetch {url} --json'
|
||||
cmd = nix_command(["flake", "prefetch", str(url), "--json", "--refresh"])
|
||||
out = await run(cmd)
|
||||
data: dict[str, str] = json.loads(out.stdout)
|
||||
|
||||
if data.get("storePath") is None:
|
||||
raise HTTPException(status_code=500, detail="Could not load flake")
|
||||
|
||||
content: str
|
||||
with open(Path(data.get("storePath", "")) / Path("flake.nix")) as f:
|
||||
content = f.read()
|
||||
|
||||
# TODO: Figure out some measure when it is insecure to inspect or create a VM
|
||||
actions.append(FlakeAction(id="vms/inspect", uri="api/vms/inspect"))
|
||||
actions.append(FlakeAction(id="vms/create", uri="api/vms/create"))
|
||||
|
||||
return FlakeResponse(content=content, actions=actions)
|
||||
|
||||
|
||||
@router.post("/api/flake/create", status_code=status.HTTP_201_CREATED)
|
||||
async def create_flake(
|
||||
args: Annotated[FlakeCreateInput, Body()],
|
||||
) -> FlakeCreateResponse:
|
||||
if args.dest.exists():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Flake already exists",
|
||||
)
|
||||
|
||||
cmd_out = await create.create_flake(args.dest, args.url)
|
||||
return FlakeCreateResponse(cmd_out=cmd_out)
|
||||
@@ -1,69 +0,0 @@
|
||||
# Logging setup
|
||||
import logging
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Body
|
||||
|
||||
from ...config.machine import (
|
||||
config_for_machine,
|
||||
schema_for_machine,
|
||||
set_config_for_machine,
|
||||
)
|
||||
from ...machines.create import create_machine as _create_machine
|
||||
from ...machines.list import list_machines as _list_machines
|
||||
from ...types import FlakeName
|
||||
from ..api_outputs import (
|
||||
ConfigResponse,
|
||||
Machine,
|
||||
MachineCreate,
|
||||
MachineResponse,
|
||||
MachinesResponse,
|
||||
SchemaResponse,
|
||||
Status,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/api/{flake_name}/machines")
|
||||
async def list_machines(flake_name: FlakeName) -> MachinesResponse:
|
||||
machines = []
|
||||
for m in _list_machines(flake_name):
|
||||
machines.append(Machine(name=m, status=Status.UNKNOWN))
|
||||
return MachinesResponse(machines=machines)
|
||||
|
||||
|
||||
@router.post("/api/{flake_name}/machines", status_code=201)
|
||||
async def create_machine(
|
||||
flake_name: FlakeName, machine: Annotated[MachineCreate, Body()]
|
||||
) -> MachineResponse:
|
||||
out = await _create_machine(flake_name, machine.name)
|
||||
log.debug(out)
|
||||
return MachineResponse(machine=Machine(name=machine.name, status=Status.UNKNOWN))
|
||||
|
||||
|
||||
@router.get("/api/machines/{name}")
|
||||
async def get_machine(name: str) -> MachineResponse:
|
||||
log.error("TODO")
|
||||
return MachineResponse(machine=Machine(name=name, status=Status.UNKNOWN))
|
||||
|
||||
|
||||
@router.get("/api/{flake_name}/machines/{name}/config")
|
||||
async def get_machine_config(flake_name: FlakeName, name: str) -> ConfigResponse:
|
||||
config = config_for_machine(flake_name, name)
|
||||
return ConfigResponse(config=config)
|
||||
|
||||
|
||||
@router.put("/api/{flake_name}/machines/{name}/config")
|
||||
async def set_machine_config(
|
||||
flake_name: FlakeName, name: str, config: Annotated[dict, Body()]
|
||||
) -> ConfigResponse:
|
||||
set_config_for_machine(flake_name, name, config)
|
||||
return ConfigResponse(config=config)
|
||||
|
||||
|
||||
@router.get("/api/{flake_name}/machines/{name}/schema")
|
||||
async def get_machine_schema(flake_name: FlakeName, name: str) -> SchemaResponse:
|
||||
schema = schema_for_machine(flake_name, name)
|
||||
return SchemaResponse(schema=schema)
|
||||
@@ -1,67 +0,0 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Annotated, Iterator
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Body, status
|
||||
from fastapi.exceptions import HTTPException
|
||||
from fastapi.responses import StreamingResponse
|
||||
from pydantic import AnyUrl
|
||||
|
||||
from clan_cli.webui.routers.flake import get_attrs
|
||||
|
||||
from ...task_manager import get_task
|
||||
from ...vms import create, inspect
|
||||
from ..api_outputs import (
|
||||
VmConfig,
|
||||
VmCreateResponse,
|
||||
VmInspectResponse,
|
||||
VmStatusResponse,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# TODO: Check for directory traversal
|
||||
@router.post("/api/vms/inspect")
|
||||
async def inspect_vm(
|
||||
flake_url: Annotated[AnyUrl | Path, Body()], flake_attr: Annotated[str, Body()]
|
||||
) -> VmInspectResponse:
|
||||
config = await inspect.inspect_vm(flake_url, flake_attr)
|
||||
return VmInspectResponse(config=config)
|
||||
|
||||
|
||||
@router.get("/api/vms/{uuid}/status")
|
||||
async def get_vm_status(uuid: UUID) -> VmStatusResponse:
|
||||
task = get_task(uuid)
|
||||
log.debug(msg=f"error: {task.error}, task.status: {task.status}")
|
||||
error = str(task.error) if task.error is not None else None
|
||||
return VmStatusResponse(status=task.status, error=error)
|
||||
|
||||
|
||||
@router.get("/api/vms/{uuid}/logs")
|
||||
async def get_vm_logs(uuid: UUID) -> StreamingResponse:
|
||||
# Generator function that yields log lines as they are available
|
||||
def stream_logs() -> Iterator[str]:
|
||||
task = get_task(uuid)
|
||||
|
||||
yield from task.log_lines()
|
||||
|
||||
return StreamingResponse(
|
||||
content=stream_logs(),
|
||||
media_type="text/plain",
|
||||
)
|
||||
|
||||
|
||||
# TODO: Check for directory traversal
|
||||
@router.post("/api/vms/create")
|
||||
async def create_vm(vm: Annotated[VmConfig, Body()]) -> VmCreateResponse:
|
||||
flake_attrs = await get_attrs(vm.flake_url)
|
||||
if vm.flake_attr not in flake_attrs:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Provided attribute '{vm.flake_attr}' does not exist.",
|
||||
)
|
||||
task = create.create_vm(vm)
|
||||
return VmCreateResponse(uuid=str(task.uuid))
|
||||
Reference in New Issue
Block a user