Compare commits

...

42 Commits

Author SHA1 Message Date
fa30099991 update ui-assets.nix
All checks were successful
checks / test (push) Has been skipped
assets1 / test (push) Has been skipped
checks-impure / test (push) Has been skipped
2023-11-08 21:02:45 +00:00
b4c657501e Merge pull request 'README: Fixed missing direnv install step' (#13) from Luis-main into main
All checks were successful
checks-impure / test (push) Successful in 25s
checks / test (push) Successful in 1m32s
assets1 / test (push) Successful in 49s
Reviewed-on: #13
2023-11-08 21:47:57 +01:00
c4388733e5 Fix forgetting to delete api folder in UI 2023-11-08 21:47:57 +01:00
105b6f0b35 Merge pull request 'Improved README' (#12) from Luis-main into main
All checks were successful
assets1 / test (push) Successful in 22s
checks-impure / test (push) Successful in 24s
checks / test (push) Successful in 1m49s
Reviewed-on: #12
2023-11-01 16:11:31 +01:00
1f895a0668 README: Fixed missing direnv install step 2023-11-01 16:11:31 +01:00
adcca39dc9 Merge pull request 'Improved README' (#11) from Luis-main into main
All checks were successful
checks-impure / test (push) Successful in 26s
checks / test (push) Successful in 1m38s
assets1 / test (push) Successful in 24s
Reviewed-on: #11
2023-10-31 13:32:24 +01:00
637921e722 nix fmt
All checks were successful
checks-impure / test (pull_request) Successful in 26s
checks / test (pull_request) Successful in 1m31s
2023-10-31 13:24:10 +01:00
44f12945b8 Improved README 2023-10-31 13:24:10 +01:00
c0a5743502 update ui-assets.nix
All checks were successful
checks-impure / test (push) Has been skipped
checks / test (push) Has been skipped
assets1 / test (push) Has been skipped
2023-10-30 16:41:11 +00:00
48bc94a5de Merge pull request 'nix fmt' (#9) from Luis-main into main
All checks were successful
checks-impure / test (push) Successful in 24s
checks / test (push) Successful in 2m54s
assets1 / test (push) Successful in 48s
Reviewed-on: #9
2023-10-30 17:32:34 +01:00
1621b22c1c Fixed missing gnused in ui-assets.sh 2023-10-30 17:32:34 +01:00
1e9817dea2 Merge pull request 'nix fmt' (#8) from Luis-main into main
Some checks failed
checks-impure / test (push) Successful in 25s
checks / test (push) Successful in 2m43s
assets1 / test (push) Failing after 48s
Reviewed-on: #8
2023-10-30 17:26:29 +01:00
494067899e Generate ui assets package_name on the fly 2023-10-30 17:26:29 +01:00
b95194890d nix fmt 2023-10-30 17:26:29 +01:00
8c1c050ba3 Merge pull request 'Improved README' (#7) from Luis-main into main
Some checks failed
checks-impure / test (push) Successful in 26s
checks / test (push) Failing after 3m19s
assets1 / test (push) Successful in 22s
Reviewed-on: #7
2023-10-30 17:14:37 +01:00
1eff969fbf Improved README
Some checks failed
checks / test (pull_request) Failing after 1m29s
checks-impure / test (pull_request) Successful in 26s
2023-10-30 17:08:41 +01:00
55f252af92 update ui-assets.nix
All checks were successful
checks / test (push) Has been skipped
assets1 / test (push) Has been skipped
checks-impure / test (push) Has been skipped
2023-10-30 16:01:42 +00:00
81553a3bc6 Merge pull request 'Improved README and ui-asset workflow' (#6) from Luis-main into main
All checks were successful
checks-impure / test (push) Successful in 25s
checks / test (push) Successful in 1m28s
assets1 / test (push) Successful in 49s
Reviewed-on: #6
2023-10-30 16:59:11 +01:00
84c5b0477e Improved README and ui-asset workflow
All checks were successful
checks-impure / test (pull_request) Successful in 25s
checks / test (pull_request) Successful in 1m24s
2023-10-30 16:51:39 +01:00
5273eee89f Merge pull request 'Added Getting Started to README' (#5) from Luis-main into main
Some checks failed
checks / test (push) Failing after 3m15s
assets1 / test (push) Successful in 22s
checks-impure / test (push) Successful in 26s
Reviewed-on: Luis/consulting-website#5
2023-10-30 16:00:49 +01:00
f714682948 update ui-assets.nix
All checks were successful
checks-impure / test (push) Has been skipped
checks / test (push) Has been skipped
assets1 / test (push) Has been skipped
2023-10-30 14:59:42 +00:00
51754676bc Merge pull request 'Added Getting Started to README' (#4) from Luis-main into main
All checks were successful
checks-impure / test (push) Successful in 25s
checks / test (push) Successful in 1m50s
assets1 / test (push) Successful in 51s
Reviewed-on: Luis/consulting-website#4
2023-10-30 15:56:46 +01:00
627fd5e76d Added Getting Started to README
Some checks failed
checks-impure / test (pull_request) Successful in 24s
checks / test (pull_request) Failing after 1m40s
2023-10-30 13:37:03 +01:00
7a54c87fde Added Getting Started to README
All checks were successful
checks-impure / test (pull_request) Successful in 26s
checks / test (pull_request) Successful in 1m52s
2023-10-30 13:26:09 +01:00
ui-asset-bot
217f465dc7 update ui-assets.nix
All checks were successful
checks-impure / test (push) Has been skipped
checks / test (push) Has been skipped
assets1 / test (push) Has been skipped
2023-10-27 22:52:40 +00:00
81cf1e2f81 Merge pull request 'Added correct owner to update-ui-assets.sh' (#3) from Luis-main into main
All checks were successful
checks-impure / test (push) Successful in 25s
checks / test (push) Successful in 1m59s
assets1 / test (push) Successful in 48s
Reviewed-on: Luis/consulting-website#3
2023-10-28 00:48:34 +02:00
27c9146ef6 Merge branch 'main' into Luis-main
All checks were successful
checks-impure / test (pull_request) Successful in 25s
checks / test (pull_request) Successful in 2m2s
2023-10-28 00:47:10 +02:00
16d7947701 Added correct owner to update-ui-assets.sh
All checks were successful
checks-impure / test (pull_request) Successful in 24s
checks / test (pull_request) Successful in 2m2s
2023-10-28 00:29:27 +02:00
778130d00d Merge pull request 'Fully working ui and cli' (#1) from Luis-main into main
Some checks failed
checks-impure / test (push) Successful in 24s
checks / test (push) Successful in 1m24s
assets1 / test (push) Failing after 51s
Reviewed-on: Luis/consulting-website#1
2023-10-23 22:37:35 +02:00
d053d4fba4 Fixing broken CI
All checks were successful
checks-impure / test (pull_request) Successful in 35s
checks / test (pull_request) Successful in 6m23s
2023-10-23 03:23:57 +02:00
a659800cb8 Fixing broken CI
Some checks failed
checks-impure / test (pull_request) Successful in 41s
checks / test (pull_request) Failing after 7m10s
2023-10-23 03:17:57 +02:00
1f70b42401 Fixing broken CI
Some checks failed
checks-impure / test (pull_request) Failing after 40s
checks / test (pull_request) Successful in 4m31s
2023-10-23 03:08:27 +02:00
112f281fd9 Fixing broken CI 2023-10-23 02:50:45 +02:00
9238225556 Fixing merge-after-ci
Some checks failed
checks-impure / test (pull_request) Failing after 3m11s
checks / test (pull_request) Failing after 24m9s
2023-10-23 02:12:42 +02:00
f1b66d7996 Nix fmt doesn't complain anymore 2023-10-23 01:30:47 +02:00
7a354875c9 Fully working ui and cli 2023-10-23 01:23:06 +02:00
805efb7ec7 Working base cli webui 2023-10-23 01:18:58 +02:00
e5c0bc7fd4 Fixed AnyURL problem 2023-10-22 22:38:04 +02:00
d78a3bc684 Fixed linting problem 2023-10-22 22:20:23 +02:00
aaeccfbec5 Fixed linting problem 2023-10-22 22:17:10 +02:00
e69cc4940d After fixing problem 2023-10-22 21:24:02 +02:00
c7c47b6527 Befor fixing linting problem 2023-10-22 21:03:06 +02:00
177 changed files with 3531 additions and 13109 deletions

2
.envrc
View File

@@ -3,3 +3,5 @@ if ! has nix_direnv_version || ! nix_direnv_version 2.3.0; then
fi
use flake

View File

@@ -15,7 +15,7 @@ jobs:
id: changed-files
uses: tj-actions/changed-files@v32
with:
fetch-depth: 2
fetch-depth: 0
- name: Check if UI files are in the list of modified files
run: |
@@ -35,8 +35,8 @@ jobs:
export PATH=$PATH:$DEPS
# Setup git config
git config --global user.email "ui-asset-bot@clan.lol"
git config --global user.name "ui-asset-bot"
git config --global user.email "$BOT_EMAIL"
git config --global user.name "$BOT_NAME"
################################################
# #
@@ -66,3 +66,5 @@ jobs:
env:
MODIFIED_FILES: ${{ steps.changed-files.outputs.modified_files }}
GITEA_TOKEN: ${{ secrets.BOT_ACCESS_TOKEN }}
BOT_NAME: "ui-asset-bot"
BOT_EMAIL: "ui-asset-bot@gchq.icu"

214
README.md
View File

@@ -1,9 +1,211 @@
# clan.lol core
# Website Template
This is the monorepo of the clan.lol project
In here are all the packages we use, all the nixosModules we use/expose, the CLI and tests for everything.
Welcome to our website template repository! This template is designed to help you and your team build high-quality websites efficiently. We've carefully chosen the technologies to make development smooth and enjoyable. Here's what you can expect from this template:
## cLAN config tool
**Frontend**: Our frontend is powered by [React NextJS](https://nextjs.org/), a popular and versatile framework for building web applications.
- The quickstart guide can be found here: [here](/clan/clan-core/src/branch/main/docs/quickstart.md)
- Find the docs [here](/clan/clan-core/src/branch/main/docs/clan-config.md)
**Backend**: For the backend, we use Python along with the [FastAPI framework](https://fastapi.tiangolo.com/). To ensure seamless communication between the frontend and backend, we generate an `openapi.json` file from the Python code, which defines the REST API. This file is then used with [Orval](https://orval.dev/) to generate TypeScript bindings for the REST API. We're committed to code correctness, so we use [mypy](https://mypy-lang.org/) to ensure that our Python code is statically typed correctly. For backend testing, we rely on [pytest](https://docs.pytest.org/en/7.4.x/).
**Continuous Integration (CI)**: We've set up a CI bot that rigorously checks your code using the quality assurance (QA) tools mentioned above. If any errors are detected, it will block pull requests until they're resolved.
**Dependency Management**: We use the [Nix package manager](https://nixos.org/) to manage dependencies and ensure reproducibility, making your development process more robust.
## Supported Operating Systems
- Linux
- macOS
# Getting Started with the Development Environment
Let's get your development environment up and running:
1. **Install Nix Package Manager**:
- You can install the Nix package manager by either [downloading the Nix installer](https://github.com/DeterminateSystems/nix-installer/releases) or running this command:
```bash
curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install
```
2. **Install direnv**:
- Download the direnv package from [here](https://direnv.net/docs/installation.html) or run the following command:
```bash
curl -sfL https://direnv.net/install.sh | bash
```
3. **Add direnv to your shell**:
- Direnv needs to [hook into your shell](https://direnv.net/docs/hook.html) to work.
You can do this by executing following command:
```bash
echo 'eval "$(direnv hook zsh)"' >> ~/.zshrc && echo 'eval "$(direnv hook bash)"' >> ~/.bashrc && eval "$SHELL"
```
4. **Clone the Repository and Navigate**:
- Clone this repository and navigate to it.
5. **Allow .envrc**:
- When you enter the directory, you'll receive an error message like this:
```bash
direnv: error .envrc is blocked. Run `direnv allow` to approve its content
```
- Execute `direnv allow` to automatically execute the shell script `.envrc` when entering the directory.
6. **Build the Backend**:
- Go to the `pkgs/clan-cli` directory and execute:
```bash
direnv allow
```
- Wait for the backend to build.
7. **Start the Backend Server**:
- To start the backend server, execute:
```bash
clan webui --reload --no-open --log-level debug
```
- The server will automatically restart if any Python files change.
8. **Build the Frontend**:
- In a different shell, navigate to the `pkgs/ui` directory and execute:
```bash
direnv allow
```
- Wait for the frontend to build.
9. **Start the Frontend**:
- To start the frontend, execute:
```bash
npm run dev
```
- Access the website by going to [http://localhost:3000](http://localhost:3000).
# Setting Up Your Git Workflow
Let's set up your Git workflow to collaborate effectively:
1. **Register Your Gitea Account Locally**:
- Execute the following command to add your Gitea account locally:
```bash
tea login add
```
- Fill out the prompt as follows:
- URL of Gitea instance: `https://gitea.gchq.icu`
- Name of new Login [gitea.gchq.icu]: `gitea.gchq.icu:7171`
- Do you have an access token? No
- Username: YourUsername
- Password: YourPassword
- Set Optional settings: No
2. **Git Workflow**:
1. Add your changes to Git using `git add <file1> <file2>`.
2. Run `nix fmt` to lint your files.
3. Commit your changes with a descriptive message: `git commit -a -m "My descriptive commit message"`.
4. Make sure your branch has the latest changes from upstream by executing:
```bash
git fetch && git rebase origin/main --autostash
```
5. Use `git status` to check for merge conflicts.
6. If conflicts exist, resolve them. Here's a tutorial for resolving conflicts in [VSCode](https://code.visualstudio.com/docs/sourcecontrol/overview#_merge-conflicts).
7. After resolving conflicts, execute `git merge --continue` and repeat step 5 until there are no conflicts.
3. **Create a Pull Request**:
- To automatically open a pull request that gets merged if all tests pass, execute:
```bash
merge-after-ci
```
4. **Review Your Pull Request**:
- Visit https://gitea.gchq.icu and go to the project page. Check under "Pull Requests" for any issues with your pull request.
5. **Push Your Changes**:
- If there are issues, fix them and redo step 2. Afterward, execute:
```bash
git push origin HEAD:YourUsername-main
```
- This will directly push to your open pull request.
# Debugging
When working on the backend of your project, debugging is an essential part of the development process. Here are some methods for debugging and testing the backend of your application:
## Test Backend Locally in Devshell with Breakpoints
To test the backend locally in a development environment and set breakpoints for debugging, follow these steps:
1. Run the following command to execute your tests and allow for debugging with breakpoints:
```bash
pytest -n0 -s --maxfail=1
```
You can place `breakpoint()` in your Python code where you want to trigger a breakpoint for debugging.
## Test Backend Locally in a Nix Sandbox
To run your backend tests in a Nix sandbox, you have two options depending on whether your test functions have been marked as impure or not:
### Running Tests Marked as Impure
If your test functions need to execute `nix build` and have been marked as impure because you can't execute `nix build` inside a Nix sandbox, use the following command:
```bash
nix run .#impure-checks
```
This command will run the impure test functions.
### Running Pure Tests
For test functions that have not been marked as impure and don't require executing `nix build`, you can use the following command:
```bash
nix build .#checks.x86_64-linux.clan-pytest --rebuild
```
This command will run all pure test functions.
### Inspecting the Nix Sandbox
If you need to inspect the Nix sandbox while running tests, follow these steps:
1. Insert an endless sleep into your test code where you want to pause the execution. For example:
```python
import time
time.sleep(3600) # Sleep for one hour
```
2. Use `cntr` and `psgrep` to attach to the Nix sandbox. This allows you to interactively debug your code while it's paused. For example:
```bash
cntr exec -w your_sandbox_name
psgrep -a -x your_python_process_name
```
These debugging and testing methods will help you identify and fix issues in your backend code efficiently, ensuring the reliability and robustness of your application.
# Using this Template
To make the most of this template:
1. Set up a new Gitea account named `ui-asset-bot`. Generate an access token with all access permissions and set it under `settings/actions/secrets` as a secret called `BOT_ACCESS_TOKEN`.
- Also, edit the file `.gitea/workflows/ui_assets.yaml` and change the `BOT_EMAIL` variable to match the email you set for that account. Gitea matches commits to accounts by their email address, so this step is essential.
2. Create a second Gitea account named `merge-bot`. Edit the file `pkgs/merge-after-ci/default.nix` if the name should be different. Under "Branches," set the main branch to be protected and add `merge-bot` to the whitelisted users for pushing. Set the unprotected file pattern to `**/ui-assets.nix`.
- Enable the status check for "build / test (pull_request)."
3. Add both `merge-bot` and `ui-asset-bot` as collaborators.
- Set the option to "Delete pull request branch after merge by default."
- Also, set the default merge style to "Rebase then create merge commit."
With this template, you're well-equipped to build and collaborate on high-quality websites efficiently. Happy coding!.

View File

@@ -2,28 +2,16 @@
imports = [
./impure/flake-module.nix
];
perSystem = { pkgs, lib, self', ... }: {
perSystem = { lib, self', ... }: {
checks =
let
nixosTestArgs = {
# reference to nixpkgs for the current system
inherit pkgs;
# this gives us a reference to our flake but also all flake inputs
inherit self;
};
nixosTests = lib.optionalAttrs (pkgs.stdenv.isLinux) {
# import our test
secrets = import ./secrets nixosTestArgs;
};
schemaTests = pkgs.callPackages ./schemas.nix {
inherit self;
};
flakeOutputs = lib.mapAttrs' (name: config: lib.nameValuePair "nixos-${name}" config.config.system.build.toplevel) self.nixosConfigurations
// lib.mapAttrs' (n: lib.nameValuePair "package-${n}") self'.packages
// lib.mapAttrs' (n: lib.nameValuePair "devShell-${n}") self'.devShells
// lib.mapAttrs' (name: config: lib.nameValuePair "home-manager-${name}" config.activation-script) (self'.legacyPackages.homeConfigurations or { });
in
nixosTests // schemaTests // flakeOutputs;
flakeOutputs;
};
}

View File

@@ -1,54 +0,0 @@
{ self, lib, inputs, ... }:
let
inherit (builtins)
mapAttrs
toJSON
toFile
;
inherit (lib)
mapAttrs'
;
clanLib = self.lib;
clanModules = self.clanModules;
in
{
perSystem = { pkgs, ... }:
let
baseModule = {
imports =
(import (inputs.nixpkgs + "/nixos/modules/module-list.nix"))
++ [{
nixpkgs.hostPlatform = pkgs.system;
}];
};
optionsFromModule = module:
let
evaled = lib.evalModules {
modules = [ module baseModule ];
};
in
evaled.options.clan.networking;
clanModuleSchemas =
mapAttrs
(_: module: clanLib.jsonschema.parseOptions (optionsFromModule module))
clanModules;
mkTest = name: schema: pkgs.runCommand "schema-${name}" { } ''
${pkgs.check-jsonschema}/bin/check-jsonschema \
--check-metaschema ${toFile "schema-${name}" (toJSON schema)}
touch $out
'';
in
{
checks = mapAttrs'
(name: schema: {
name = "schema-${name}";
value = mkTest name schema;
})
clanModuleSchemas;
};
}

View File

@@ -1,34 +0,0 @@
{ self, runCommand, check-jsonschema, pkgs, lib, ... }:
let
clanModules.clanCore = self.nixosModules.clanCore;
baseModule = {
imports =
(import (pkgs.path + "/nixos/modules/module-list.nix"))
++ [{
nixpkgs.hostPlatform = "x86_64-linux";
}];
};
optionsFromModule = module:
let
evaled = lib.evalModules {
modules = [ module baseModule ];
};
in
evaled.options.clan;
clanModuleSchemas = lib.mapAttrs (_: module: self.lib.jsonschema.parseOptions (optionsFromModule module)) clanModules;
mkTest = name: schema: runCommand "schema-${name}" { } ''
${check-jsonschema}/bin/check-jsonschema \
--check-metaschema ${builtins.toFile "schema-${name}" (builtins.toJSON schema)}
touch $out
'';
in
lib.mapAttrs'
(name: schema: {
name = "schema-${name}";
value = mkTest name schema;
})
clanModuleSchemas

View File

@@ -1,6 +0,0 @@
#!/usr/bin/env bash
set -eux -o pipefail
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
export SOPS_AGE_KEY_FILE="${SCRIPT_DIR}/key.age"
nix run .# -- secrets "$@"

View File

@@ -1,21 +0,0 @@
(import ../lib/test-base.nix) {
name = "secrets";
nodes.machine = { self, config, ... }: {
imports = [
(self.nixosModules.clanCore)
];
environment.etc."secret".source = config.sops.secrets.secret.path;
environment.etc."group-secret".source = config.sops.secrets.group-secret.path;
sops.age.keyFile = ./key.age;
clanCore.clanDir = "${./.}";
clanCore.machineName = "machine";
networking.hostName = "machine";
};
testScript = ''
machine.succeed("cat /etc/secret >&2")
machine.succeed("cat /etc/group-secret >&2")
'';
}

View File

@@ -1 +0,0 @@
AGE-SECRET-KEY-1UCXEUJH6JXF8LFKWFHDM4N9AQE2CCGQZGXLUNV4TKR5KY0KC8FDQ2TY4NX

View File

@@ -1 +0,0 @@
../../../machines/machine

View File

@@ -1,4 +0,0 @@
{
"publickey": "age15x8u838dwqflr3t6csf4tlghxm4tx77y379ncqxav7y2n8qp7yzqgrwt00",
"type": "age"
}

View File

@@ -1 +0,0 @@
../../../groups/group

View File

@@ -1,20 +0,0 @@
{
"data": "ENC[AES256_GCM,data:FgF3,iv:QBbnqZ6405qmwGKhbolPr9iobngXt8rtfUwCBOnmwRA=,tag:7gqI1zLVnTkZ0xrNn/LEkA==,type:str]",
"sops": {
"kms": null,
"gcp_kms": null,
"azure_kv": null,
"hc_vault": null,
"age": [
{
"recipient": "age15x8u838dwqflr3t6csf4tlghxm4tx77y379ncqxav7y2n8qp7yzqgrwt00",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSArMHcxKzhUZzNHQmQrb28x\nRC9UMlZMeDN3S1l1eHdUWmV4VUVReHhhQ0RnCjAyUXVlY1FmclVmL2lEdFZuTmll\nVENpa3AwbjlDck5zdGdHUTRnNEdEOUkKLS0tIER3ZlNMSVFnRElkRDcxajZnVmFl\nZThyYzcvYUUvaWJYUmlwQ3dsSDdjSjgK+tj34yBzrsIjm6V+T9wTgz5FdNGOR7I/\nVB4fh8meW0vi/PCK/rajC8NbqmK8qq/lwsF/JwfZKDSdG0FOJUB1AA==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2023-09-03T12:44:56Z",
"mac": "ENC[AES256_GCM,data:d5a0WfE5ZRLKF1NZkBfOl+cVI8ZZHd2rC+qX/giALjyrzk09rLxBeY4lO827GFfMmVy/oC7ceH9pjv2O7ibUiQtcbGIQVBg/WP+dVn8fRMWtF0jpv9BhYTutkVk3kiddqPGhp3mpwvls2ot5jtCRczTPk3JSxN3B1JSJCmj9GfQ=,iv:YmlkTYFNUaFRWozO8+OpEVKaSQmh+N9zpatwUNMPNyw=,tag:mEGQ4tdo82qlhKWalQuufg==,type:str]",
"pgp": null,
"unencrypted_suffix": "_unencrypted",
"version": "3.7.3"
}
}

View File

@@ -1 +0,0 @@
../../../machines/machine

View File

@@ -1,20 +0,0 @@
{
"data": "ENC[AES256_GCM,data:bhxF,iv:iNs+IfSU/7EwssZ0GVTF2raxJkVlddfQEPGIBeUYAy8=,tag:JMOKTMW3/ic3UTj9eT9YFQ==,type:str]",
"sops": {
"kms": null,
"gcp_kms": null,
"azure_kv": null,
"hc_vault": null,
"age": [
{
"recipient": "age15x8u838dwqflr3t6csf4tlghxm4tx77y379ncqxav7y2n8qp7yzqgrwt00",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxS0g4TEt4S09LQnFKdCtk\nZTlUQWhNUHZmcmZqdGtuZkhhTkMzZDVaWWdNCi9vNnZQeklNaFBBU2x0ditlUDR0\nNGJlRmFFb09WSUFGdEh5TGViTWtacFEKLS0tIE1OMWdQMHhGeFBwSlVEamtHUkcy\ndzI1VHRkZ1o4SStpekVNZmpQSnRkeUkKYmPS9sR6U0NHxd55DjRk29LNFINysOl6\nEM2MTrntLxOHFWZ1QgNx34l4rYIIXx97ONvR0SRpxN0ECL9VonQeZg==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2023-08-23T09:11:08Z",
"mac": "ENC[AES256_GCM,data:8z819mP4FJXE/ExWM1+/dhaXIXzCglhBuZwE6ikl/jNLUAnv3jYL9c9vPrPFl2by3wXSNzqB4AOiTKDQoxDx2SBQKxeWaUnOajD6hbzskoLqCCBfVx7qOHrk/BULcBvMSxBca4RnzXXoMFTwKs2A1fXqAPvSQd1X4gX6Xm9VXWM=,iv:3YxZX+gaEcRKDN0Kuf9y1oWL+sT/J5B/5CtCf4iur9Y=,tag:0dwyjpvjCqbm9vIrz6WSWQ==,type:str]",
"pgp": null,
"unencrypted_suffix": "_unencrypted",
"version": "3.7.3"
}
}

View File

@@ -1 +0,0 @@
../../../users/admin

View File

@@ -1,4 +0,0 @@
{
"publickey": "age15x8u838dwqflr3t6csf4tlghxm4tx77y379ncqxav7y2n8qp7yzqgrwt00",
"type": "age"
}

View File

@@ -1,7 +1,9 @@
#!/usr/bin/env bash
# Because we depend on nixpkgs sources, uploading to builders takes a long time
source_up
if type nix_direnv_watch_file &>/dev/null; then
nix_direnv_watch_file flake-module.nix
nix_direnv_watch_file default.nix

View File

@@ -56,15 +56,15 @@ Add this `launch.json` to your .vscode directory to have working breakpoints in
## Run locally single-threaded for debugging
By default tests run in parallel using pytest-parallel.
pytest-parallel however breaks `breakpoint()`. To disable it, use this:
By default tests run in parallel using pytest-xdist.
pytest-xdist however breaks `breakpoint()`. To disable it, use this:
```console
pytest --workers "" -s
pytest -n0 -s
```
You can also run a single test like this:
```console
pytest --workers "" -s tests/test_secrets_cli.py::test_users
pytest -n0 -s tests/test_secrets_cli.py::test_users
```

View File

@@ -1,10 +1,13 @@
import argparse
import logging
import sys
from types import ModuleType
from typing import Optional
from . import config, flakes, join, machines, secrets, vms, webui
from .ssh import cli as ssh_cli
from . import webui
from .custom_logger import register
log = logging.getLogger(__name__)
argcomplete: Optional[ModuleType] = None
try:
@@ -24,33 +27,12 @@ def create_parser(prog: Optional[str] = None) -> argparse.ArgumentParser:
subparsers = parser.add_subparsers()
parser_flake = subparsers.add_parser(
"flakes", help="create a clan flake inside the current directory"
)
flakes.register_parser(parser_flake)
parser_join = subparsers.add_parser("join", help="join a remote clan")
join.register_parser(parser_join)
parser_config = subparsers.add_parser("config", help="set nixos configuration")
config.register_parser(parser_config)
parser_ssh = subparsers.add_parser("ssh", help="ssh to a remote machine")
ssh_cli.register_parser(parser_ssh)
parser_secrets = subparsers.add_parser("secrets", help="manage secrets")
secrets.register_parser(parser_secrets)
parser_machine = subparsers.add_parser(
"machines", help="Manage machines and their configuration"
)
machines.register_parser(parser_machine)
parser_webui = subparsers.add_parser("webui", help="start webui")
webui.register_parser(parser_webui)
parser_vms = subparsers.add_parser("vms", help="manage virtual machines")
vms.register_parser(parser_vms)
# if args.debug:
register(logging.DEBUG)
log.debug("Debug log activated")
if argcomplete:
argcomplete.autocomplete(parser)

View File

@@ -1,370 +0,0 @@
# !/usr/bin/env python3
import argparse
import json
import os
import re
import shlex
import subprocess
import sys
from pathlib import Path
from typing import Any, Optional, Tuple, get_origin
from clan_cli.dirs import machine_settings_file, specific_flake_dir
from clan_cli.errors import ClanError
from clan_cli.flakes.types import FlakeName
from clan_cli.git import commit_file
from clan_cli.nix import nix_eval
script_dir = Path(__file__).parent
# nixos option type description to python type
def map_type(type: str) -> Any:
if type == "boolean":
return bool
elif type in [
"integer",
"signed integer",
"16 bit unsigned integer; between 0 and 65535 (both inclusive)",
]:
return int
elif type == "string":
return str
# lib.type.passwdEntry
elif type == "string, not containing newlines or colons":
return str
elif type.startswith("null or "):
subtype = type.removeprefix("null or ")
return Optional[map_type(subtype)]
elif type.startswith("attribute set of"):
subtype = type.removeprefix("attribute set of ")
return dict[str, map_type(subtype)] # type: ignore
elif type.startswith("list of"):
subtype = type.removeprefix("list of ")
return list[map_type(subtype)] # type: ignore
else:
raise ClanError(f"Unknown type {type}")
# merge two dicts recursively
def merge(a: dict, b: dict, path: list[str] = []) -> dict:
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
merge(a[key], b[key], path + [str(key)])
elif isinstance(a[key], list) and isinstance(b[key], list):
a[key].extend(b[key])
elif a[key] != b[key]:
a[key] = b[key]
else:
a[key] = b[key]
return a
# A container inheriting from list, but overriding __contains__ to return True
# for all values.
# This is used to allow any value for the "choices" field of argparse
class AllContainer(list):
def __contains__(self, item: Any) -> bool:
return True
# value is always a list, as the arg parser cannot know the type upfront
# and therefore always allows multiple arguments.
def cast(value: Any, type: Any, opt_description: str) -> Any:
try:
# handle bools
if isinstance(type, bool):
if value[0] in ["true", "True", "yes", "y", "1"]:
return True
elif value[0] in ["false", "False", "no", "n", "0"]:
return False
else:
raise ClanError(f"Invalid value {value} for boolean")
# handle lists
elif get_origin(type) == list:
subtype = type.__args__[0]
return [cast([x], subtype, opt_description) for x in value]
# handle dicts
elif get_origin(type) == dict:
if not isinstance(value, dict):
raise ClanError(
f"Cannot set {opt_description} directly. Specify a suboption like {opt_description}.<name>"
)
subtype = type.__args__[1]
return {k: cast(v, subtype, opt_description) for k, v in value.items()}
elif str(type) == "typing.Optional[str]":
if value[0] in ["null", "None"]:
return None
return value[0]
else:
if len(value) > 1:
raise ClanError(f"Too many values for {opt_description}")
return type(value[0])
except ValueError:
raise ClanError(
f"Invalid type for option {opt_description} (expected {type.__name__})"
)
def options_for_machine(
flake_name: FlakeName, machine_name: str, show_trace: bool = False
) -> dict:
clan_dir = specific_flake_dir(flake_name)
flags = []
if show_trace:
flags.append("--show-trace")
flags.append(
f"{clan_dir}#nixosConfigurations.{machine_name}.config.clanCore.optionsNix"
)
cmd = nix_eval(flags=flags)
proc = subprocess.run(
cmd,
stdout=subprocess.PIPE,
text=True,
)
if proc.returncode != 0:
raise ClanError(
f"Failed to read options for machine {machine_name}:\n{shlex.join(cmd)}\nexit with {proc.returncode}"
)
return json.loads(proc.stdout)
def read_machine_option_value(
flake_name: FlakeName, machine_name: str, option: str, show_trace: bool = False
) -> str:
clan_dir = specific_flake_dir(flake_name)
# use nix eval to read from .#nixosConfigurations.default.config.{option}
# this will give us the evaluated config with the options attribute
cmd = nix_eval(
flags=[
"--show-trace",
f"{clan_dir}#nixosConfigurations.{machine_name}.config.{option}",
],
)
proc = subprocess.run(cmd, stdout=subprocess.PIPE, text=True)
if proc.returncode != 0:
raise ClanError(
f"Failed to read option {option}:\n{shlex.join(cmd)}\nexit with {proc.returncode}"
)
value = json.loads(proc.stdout)
# print the value so that the output can be copied and fed as an input.
# for example a list should be displayed as space separated values surrounded by quotes.
if isinstance(value, list):
out = " ".join([json.dumps(x) for x in value])
elif isinstance(value, dict):
out = json.dumps(value, indent=2)
else:
out = json.dumps(value, indent=2)
return out
def get_or_set_option(args: argparse.Namespace) -> None:
if args.value == []:
print(read_machine_option_value(args.machine, args.option, args.show_trace))
else:
# load options
if args.options_file is None:
options = options_for_machine(
args.flake, machine_name=args.machine, show_trace=args.show_trace
)
else:
with open(args.options_file) as f:
options = json.load(f)
# compute settings json file location
if args.settings_file is None:
settings_file = machine_settings_file(args.flake, args.machine)
else:
settings_file = args.settings_file
# set the option with the given value
set_option(
flake_name=args.flake,
option=args.option,
value=args.value,
options=options,
settings_file=settings_file,
option_description=args.option,
show_trace=args.show_trace,
)
if not args.quiet:
new_value = read_machine_option_value(args.flake, args.machine, args.option)
print(f"New Value for {args.option}:")
print(new_value)
def find_option(
option: str, value: Any, options: dict, option_description: Optional[str] = None
) -> Tuple[str, Any]:
"""
The option path specified by the user doesn't have to match exactly to an
entry in the options.json file. Examples
Example 1:
$ clan config services.openssh.settings.SomeSetting 42
This is a freeform option that does not appear in the options.json
The actual option is `services.openssh.settings`
And the value must be wrapped: {"SomeSettings": 42}
Example 2:
$ clan config users.users.my-user.name my-name
The actual option is `users.users.<name>.name`
"""
# option description is used for error messages
if option_description is None:
option_description = option
option_path = option.split(".")
# fuzzy search the option paths, so when
# specified option path: "foo.bar.baz.bum"
# available option path: "foo.<name>.baz.<name>"
# we can still find the option
first = option_path[0]
regex = rf"({first}|<name>)"
for elem in option_path[1:]:
regex += rf"\.({elem}|<name>)"
for opt in options.keys():
if re.match(regex, opt):
return opt, value
# if the regex search did not find the option, start stripping the last
# element of the option path and find matching parent option
# (see examples above for why this is needed)
if len(option_path) == 1:
raise ClanError(f"Option {option_description} not found")
option_path_parent = option_path[:-1]
attr_prefix = option_path[-1]
return find_option(
option=".".join(option_path_parent),
value={attr_prefix: value},
options=options,
option_description=option_description,
)
def set_option(
flake_name: FlakeName,
option: str,
value: Any,
options: dict,
settings_file: Path,
option_description: str = "",
show_trace: bool = False,
) -> None:
option_path_orig = option.split(".")
# returns for example:
# option: "users.users.<name>.name"
# value: "my-name"
option, value = find_option(
option=option,
value=value,
options=options,
option_description=option_description,
)
option_path = option.split(".")
option_path_store = option_path_orig[: len(option_path)]
target_type = map_type(options[option]["type"])
casted = cast(value, target_type, option)
# construct a nested dict from the option path and set the value
result: dict[str, Any] = {}
current = result
for part in option_path_store[:-1]:
current[part] = {}
current = current[part]
current[option_path_store[-1]] = value
current[option_path_store[-1]] = casted
# check if there is an existing config file
if os.path.exists(settings_file):
with open(settings_file) as f:
current_config = json.load(f)
else:
current_config = {}
# merge and save the new config file
new_config = merge(current_config, result)
settings_file.parent.mkdir(parents=True, exist_ok=True)
with open(settings_file, "w") as f:
json.dump(new_config, f, indent=2)
print(file=f) # add newline at the end of the file to make git happy
if settings_file.resolve().is_relative_to(specific_flake_dir(flake_name)):
commit_file(settings_file, commit_message=f"Set option {option_description}")
# takes a (sub)parser and configures it
def register_parser(
parser: Optional[argparse.ArgumentParser],
) -> None:
if parser is None:
parser = argparse.ArgumentParser(
description="Set or show NixOS options",
)
# inject callback function to process the input later
parser.set_defaults(func=get_or_set_option)
parser.add_argument(
"flake",
type=str,
help="name of the flake to set machine options for",
)
parser.add_argument(
"--machine",
"-m",
help="Machine to configure",
type=str,
default="default",
)
parser.add_argument(
"--show-trace",
help="Show nix trace on evaluation error",
action="store_true",
)
parser.add_argument(
"--options-file",
help="JSON file with options",
type=Path,
)
parser.add_argument(
"--settings-file",
help="JSON file with settings",
type=Path,
)
parser.add_argument(
"--quiet",
help="Do not print the value",
action="store_true",
)
parser.add_argument(
"option",
help="Option to read or set (e.g. foo.bar)",
type=str,
)
parser.add_argument(
"value",
# force this arg to be set
nargs="*",
help="option value to set (if omitted, the current value is printed)",
)
def main(argv: Optional[list[str]] = None) -> None:
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser()
register_parser(parser)
parser.parse_args(argv[1:])
if __name__ == "__main__":
main()

View File

@@ -1 +0,0 @@
../../../../lib/jsonschema

View File

@@ -1,84 +0,0 @@
import json
import subprocess
import sys
from pathlib import Path
from fastapi import HTTPException
from clan_cli.dirs import (
machine_settings_file,
nixpkgs_source,
specific_flake_dir,
specific_machine_dir,
)
from clan_cli.git import commit_file, find_git_repo_root
from clan_cli.nix import nix_eval
from ..flakes.types import FlakeName
def config_for_machine(flake_name: FlakeName, machine_name: str) -> dict:
# read the config from a json file located at {flake}/machines/{machine_name}/settings.json
if not specific_machine_dir(flake_name, machine_name).exists():
raise HTTPException(
status_code=404,
detail=f"Machine {machine_name} not found. Create the machine first`",
)
settings_path = machine_settings_file(flake_name, machine_name)
if not settings_path.exists():
return {}
with open(settings_path) as f:
return json.load(f)
def set_config_for_machine(
flake_name: FlakeName, machine_name: str, config: dict
) -> None:
# write the config to a json file located at {flake}/machines/{machine_name}/settings.json
if not specific_machine_dir(flake_name, machine_name).exists():
raise HTTPException(
status_code=404,
detail=f"Machine {machine_name} not found. Create the machine first`",
)
settings_path = machine_settings_file(flake_name, machine_name)
settings_path.parent.mkdir(parents=True, exist_ok=True)
with open(settings_path, "w") as f:
json.dump(config, f)
repo_dir = find_git_repo_root()
if repo_dir is not None:
commit_file(settings_path, repo_dir)
def schema_for_machine(flake_name: FlakeName, machine_name: str) -> dict:
flake = specific_flake_dir(flake_name)
# use nix eval to lib.evalModules .#nixosModules.machine-{machine_name}
proc = subprocess.run(
nix_eval(
flags=[
"--impure",
"--show-trace",
"--expr",
f"""
let
flake = builtins.getFlake (toString {flake});
lib = import {nixpkgs_source()}/lib;
options = flake.nixosConfigurations.{machine_name}.options;
clanOptions = options.clan;
jsonschemaLib = import {Path(__file__).parent / "jsonschema"} {{ inherit lib; }};
jsonschema = jsonschemaLib.parseOptions clanOptions;
in
jsonschema
""",
],
),
capture_output=True,
text=True,
)
if proc.returncode != 0:
print(proc.stderr, file=sys.stderr)
raise Exception(
f"Failed to read schema for machine {machine_name}:\n{proc.stderr}"
)
return json.loads(proc.stdout)

View File

@@ -1,109 +0,0 @@
import json
import subprocess
from pathlib import Path
from typing import Any, Optional, Type, Union
from ..errors import ClanError
from ..nix import nix_eval
script_dir = Path(__file__).parent
type_map: dict[str, type] = {
"array": list,
"boolean": bool,
"integer": int,
"number": float,
"string": str,
}
def schema_from_module_file(
file: Union[str, Path] = f"{script_dir}/jsonschema/example-schema.json",
) -> dict[str, Any]:
absolute_path = Path(file).absolute()
# define a nix expression that loads the given module file using lib.evalModules
nix_expr = f"""
let
lib = import <nixpkgs/lib>;
slib = import {script_dir}/jsonschema {{inherit lib;}};
in
slib.parseModule {absolute_path}
"""
# run the nix expression and parse the output as json
cmd = nix_eval(["--expr", nix_expr])
proc = subprocess.run(cmd, stdout=subprocess.PIPE, check=True)
return json.loads(proc.stdout)
def subtype_from_schema(schema: dict[str, Any]) -> Type:
if schema["type"] == "object":
if "additionalProperties" in schema:
sub_type = subtype_from_schema(schema["additionalProperties"])
return dict[str, sub_type] # type: ignore
elif "properties" in schema:
raise ClanError("Nested dicts are not supported")
else:
raise ClanError("Unknown object type")
elif schema["type"] == "array":
if "items" not in schema:
raise ClanError("Untyped arrays are not supported")
sub_type = subtype_from_schema(schema["items"])
return list[sub_type] # type: ignore
else:
return type_map[schema["type"]]
def type_from_schema_path(
schema: dict[str, Any],
path: list[str],
full_path: Optional[list[str]] = None,
) -> Type:
if full_path is None:
full_path = path
if len(path) == 0:
return subtype_from_schema(schema)
elif schema["type"] == "object":
if "properties" in schema:
subtype = type_from_schema_path(schema["properties"][path[0]], path[1:])
return subtype
elif "additionalProperties" in schema:
subtype = type_from_schema_path(schema["additionalProperties"], path[1:])
return subtype
else:
raise ClanError(f"Unknown type for path {path}")
else:
raise ClanError(f"Unknown type for path {path}")
def options_types_from_schema(schema: dict[str, Any]) -> dict[str, Type]:
result: dict[str, Type] = {}
for name, value in schema.get("properties", {}).items():
assert isinstance(value, dict)
type_ = value["type"]
if type_ == "object":
# handle additionalProperties
if "additionalProperties" in value:
sub_type = value["additionalProperties"].get("type")
if sub_type not in type_map:
raise ClanError(
f"Unsupported object type {sub_type} (field {name})"
)
result[f"{name}.<name>"] = type_map[sub_type]
continue
# handle properties
sub_result = options_types_from_schema(value)
for sub_name, sub_type in sub_result.items():
result[f"{name}.{sub_name}"] = sub_type
continue
elif type_ == "array":
if "items" not in value:
raise ClanError(f"Untyped arrays are not supported (field: {name})")
sub_type = value["items"].get("type")
if sub_type not in type_map:
raise ClanError(f"Unsupported list type {sub_type} (field {name})")
sub_type_: type = type_map[sub_type]
result[name] = list[sub_type_] # type: ignore
continue
result[name] = type_map[type_]
return result

View File

@@ -1,5 +1,7 @@
import inspect
import logging
from typing import Any
from pathlib import Path
from typing import Any, Callable
grey = "\x1b[38;20m"
yellow = "\x1b[33;20m"
@@ -9,11 +11,20 @@ green = "\u001b[32m"
blue = "\u001b[34m"
def get_formatter(color: str) -> logging.Formatter:
reset = "\x1b[0m"
return logging.Formatter(
f"{color}%(levelname)s{reset}:(%(filename)s:%(lineno)d): %(message)s"
)
def get_formatter(color: str) -> Callable[[logging.LogRecord, bool], logging.Formatter]:
def myformatter(
record: logging.LogRecord, with_location: bool
) -> logging.Formatter:
reset = "\x1b[0m"
filepath = Path(record.pathname).resolve()
if not with_location:
return logging.Formatter(f"{color}%(levelname)s{reset}: %(message)s")
return logging.Formatter(
f"{color}%(levelname)s{reset}: %(message)s\n {filepath}:%(lineno)d::%(funcName)s\n"
)
return myformatter
FORMATTER = {
@@ -26,12 +37,34 @@ FORMATTER = {
class CustomFormatter(logging.Formatter):
def format(self, record: Any) -> str:
return FORMATTER[record.levelno].format(record)
def format(self, record: logging.LogRecord) -> str:
return FORMATTER[record.levelno](record, True).format(record)
class ThreadFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
return FORMATTER[record.levelno](record, False).format(record)
def get_caller() -> str:
frame = inspect.currentframe()
if frame is None:
return "unknown"
caller_frame = frame.f_back
if caller_frame is None:
return "unknown"
caller_frame = caller_frame.f_back
if caller_frame is None:
return "unknown"
frame_info = inspect.getframeinfo(caller_frame)
ret = f"{frame_info.filename}:{frame_info.lineno}::{frame_info.function}"
return ret
def register(level: Any) -> None:
ch = logging.StreamHandler()
ch.setLevel(level)
ch.setFormatter(CustomFormatter())
logging.basicConfig(level=level, handlers=[ch])
handler = logging.StreamHandler()
handler.setLevel(level)
handler.setFormatter(CustomFormatter())
logger = logging.getLogger("registerHandler")
logger.addHandler(handler)
# logging.basicConfig(level=level, handlers=[handler])

View File

@@ -0,0 +1,72 @@
import logging
import multiprocessing as mp
import os
import shlex
import stat
import subprocess
import sys
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional
import ipdb
log = logging.getLogger(__name__)
def command_exec(cmd: List[str], work_dir: Path, env: Dict[str, str]) -> None:
subprocess.run(cmd, check=True, env=env, cwd=work_dir.resolve())
def repro_env_break(
work_dir: Path,
env: Optional[Dict[str, str]] = None,
cmd: Optional[List[str]] = None,
) -> None:
if env is None:
env = os.environ.copy()
else:
env = env.copy()
# Error checking
if "bash" in env["SHELL"]:
raise Exception("I assumed you use zsh, not bash")
# Cmd appending
args = ["xterm", "-e", "zsh", "-df"]
if cmd is not None:
mycommand = shlex.join(cmd)
write_command(mycommand, work_dir / "cmd.sh")
print(f"Adding to zsh history the command: {mycommand}", file=sys.stderr)
proc = spawn_process(func=command_exec, cmd=args, work_dir=work_dir, env=env)
try:
ipdb.set_trace()
finally:
proc.terminate()
def write_command(command: str, loc: Path) -> None:
with open(loc, "w") as f:
f.write("#!/usr/bin/env bash\n")
f.write(command)
st = os.stat(loc)
os.chmod(loc, st.st_mode | stat.S_IEXEC)
def spawn_process(func: Callable, **kwargs: Any) -> mp.Process:
mp.set_start_method(method="spawn")
proc = mp.Process(target=func, kwargs=kwargs)
proc.start()
return proc
def dump_env(env: Dict[str, str], loc: Path) -> None:
cenv = env.copy()
with open(loc, "w") as f:
f.write("#!/usr/bin/env bash\n")
for k, v in cenv.items():
if v.count("\n") > 0 or v.count('"') > 0 or v.count("'") > 0:
continue
f.write(f"export {k}='{v}'\n")
st = os.stat(loc)
os.chmod(loc, st.st_mode | stat.S_IEXEC)

View File

@@ -1,10 +1,13 @@
import logging
import os
import sys
from pathlib import Path
from typing import Optional
from .errors import ClanError
from .flakes.types import FlakeName
from .types import FlakeName
log = logging.getLogger(__name__)
def _get_clan_flake_toplevel() -> Path:
@@ -51,28 +54,31 @@ def user_data_dir() -> Path:
def clan_data_dir() -> Path:
path = user_data_dir() / "clan"
if not path.exists():
path.mkdir()
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def clan_config_dir() -> Path:
path = user_config_dir() / "clan"
if not path.exists():
path.mkdir()
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def clan_flakes_dir() -> Path:
path = clan_data_dir() / "flake"
if not path.exists():
path.mkdir()
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def specific_flake_dir(flake_name: FlakeName) -> Path:
flake_dir = clan_flakes_dir() / flake_name
if not flake_dir.exists():
raise ClanError(f"Flake {flake_name} does not exist")
raise ClanError(f"Flake '{flake_name}' does not exist")
return flake_dir

View File

@@ -1,20 +0,0 @@
# !/usr/bin/env python3
import argparse
from .create import register_create_parser
from .list import register_list_parser
# takes a (sub)parser and configures it
def register_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
create_parser = subparser.add_parser("create", help="Create a clan flake")
register_create_parser(create_parser)
list_parser = subparser.add_parser("list", help="List clan flakes")
register_list_parser(list_parser)

View File

@@ -1,69 +0,0 @@
# !/usr/bin/env python3
import argparse
from pathlib import Path
from typing import Dict
from pydantic import AnyUrl
from pydantic.tools import parse_obj_as
from ..async_cmd import CmdOut, run, runforcli
from ..dirs import clan_flakes_dir
from ..nix import nix_command, nix_shell
DEFAULT_URL: AnyUrl = parse_obj_as(
AnyUrl, "git+https://git.clan.lol/clan/clan-core#new-clan"
)
async def create_flake(directory: Path, url: AnyUrl) -> Dict[str, CmdOut]:
if not directory.exists():
directory.mkdir()
response = {}
command = nix_command(
[
"flake",
"init",
"-t",
url,
]
)
out = await run(command, directory)
response["flake init"] = out
command = nix_shell(["git"], ["git", "init"])
out = await run(command, directory)
response["git init"] = out
command = nix_shell(["git"], ["git", "add", "."])
out = await run(command, directory)
response["git add"] = out
command = nix_shell(["git"], ["git", "config", "user.name", "clan-tool"])
out = await run(command, directory)
response["git config"] = out
command = nix_shell(["git"], ["git", "config", "user.email", "clan@example.com"])
out = await run(command, directory)
response["git config"] = out
command = nix_shell(["git"], ["git", "commit", "-a", "-m", "Initial commit"])
out = await run(command, directory)
response["git commit"] = out
return response
def create_flake_command(args: argparse.Namespace) -> None:
flake_dir = clan_flakes_dir() / args.name
runforcli(create_flake, flake_dir, DEFAULT_URL)
# takes a (sub)parser and configures it
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"name",
type=str,
help="name for the flake",
)
# parser.add_argument("name", type=str, help="name of the flake")
parser.set_defaults(func=create_flake_command)

View File

@@ -1,27 +0,0 @@
import argparse
import logging
import os
from ..dirs import clan_flakes_dir
log = logging.getLogger(__name__)
def list_flakes() -> list[str]:
path = clan_flakes_dir()
log.debug(f"Listing machines in {path}")
if not path.exists():
return []
objs: list[str] = []
for f in os.listdir(path):
objs.append(f)
return objs
def list_command(args: argparse.Namespace) -> None:
for flake in list_flakes():
print(flake)
def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.set_defaults(func=list_command)

View File

@@ -1,3 +0,0 @@
from typing import NewType
FlakeName = NewType("FlakeName", str)

View File

@@ -1,88 +0,0 @@
import shlex
import subprocess
from pathlib import Path
from typing import Optional
from clan_cli.dirs import find_git_repo_root
from clan_cli.errors import ClanError
from clan_cli.nix import nix_shell
# generic vcs agnostic commit function
def commit_file(
file_path: Path,
repo_dir: Optional[Path] = None,
commit_message: Optional[str] = None,
) -> None:
if repo_dir is None:
repo_dir = find_git_repo_root()
if repo_dir is None:
return
# check that the file is in the git repository and exists
if not Path(file_path).resolve().is_relative_to(repo_dir.resolve()):
raise ClanError(f"File {file_path} is not in the git repository {repo_dir}")
if not file_path.exists():
raise ClanError(f"File {file_path} does not exist")
# generate commit message if not provided
if commit_message is None:
# ensure that mentioned file path is relative to repo
commit_message = f"Add {file_path.relative_to(repo_dir)}"
# check if the repo is a git repo and commit
if (repo_dir / ".git").exists():
_commit_file_to_git(repo_dir, file_path, commit_message)
else:
return
def _commit_file_to_git(repo_dir: Path, file_path: Path, commit_message: str) -> None:
"""Commit a file to a git repository.
:param repo_dir: The path to the git repository.
:param file_path: The path to the file to commit.
:param commit_message: The commit message.
:raises ClanError: If the file is not in the git repository.
"""
cmd = nix_shell(
["git"],
["git", "-C", str(repo_dir), "add", str(file_path)],
)
# add the file to the git index
try:
subprocess.run(cmd, check=True)
except subprocess.CalledProcessError as e:
raise ClanError(
f"Failed to add {file_path} to git repository {repo_dir}:\n{shlex.join(cmd)}\n exited with {e.returncode}"
) from e
# check if there is a diff
cmd = nix_shell(
["git"],
["git", "-C", str(repo_dir), "diff", "--cached", "--exit-code"],
)
result = subprocess.run(cmd, cwd=repo_dir)
# if there is no diff, return
if result.returncode == 0:
return
# commit only that file
cmd = nix_shell(
["git"],
[
"git",
"-C",
str(repo_dir),
"commit",
"-m",
commit_message,
str(file_path.relative_to(repo_dir)),
],
)
try:
subprocess.run(
cmd,
check=True,
)
except subprocess.CalledProcessError as e:
raise ClanError(
f"Failed to commit {file_path} to git repository {repo_dir}:\n{shlex.join(cmd)}\n exited with {e.returncode}"
) from e

View File

@@ -1,35 +0,0 @@
# !/usr/bin/env python3
import argparse
import subprocess
import urllib
from typing import Optional
def join(args: argparse.Namespace) -> None:
# start webui in background
uri = args.flake_uri.removeprefix("clan://")
subprocess.run(
["clan", "--debug", "webui", f"/join?flake={urllib.parse.quote_plus(uri)}"],
# stdout=sys.stdout,
# stderr=sys.stderr,
)
print(f"joined clan {args.flake_uri}")
# takes a (sub)parser and configures it
def register_parser(
parser: Optional[argparse.ArgumentParser],
) -> None:
if parser is None:
parser = argparse.ArgumentParser(
description="join a remote clan",
)
# inject callback function to process the input later
parser.set_defaults(func=join)
parser.add_argument(
"flake_uri",
help="flake uri to join",
type=str,
)

View File

@@ -1,33 +0,0 @@
# !/usr/bin/env python3
import argparse
from .create import register_create_parser
from .delete import register_delete_parser
from .install import register_install_parser
from .list import register_list_parser
from .update import register_update_parser
# takes a (sub)parser and configures it
def register_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
update_parser = subparser.add_parser("update", help="Update a machine")
register_update_parser(update_parser)
create_parser = subparser.add_parser("create", help="Create a machine")
register_create_parser(create_parser)
delete_parser = subparser.add_parser("delete", help="Delete a machine")
register_delete_parser(delete_parser)
list_parser = subparser.add_parser("list", help="List machines")
register_list_parser(list_parser)
install_parser = subparser.add_parser("install", help="Install a machine")
register_install_parser(install_parser)

View File

@@ -1,52 +0,0 @@
import argparse
import logging
from typing import Dict
from ..async_cmd import CmdOut, run, runforcli
from ..dirs import specific_flake_dir, specific_machine_dir
from ..errors import ClanError
from ..flakes.types import FlakeName
from ..nix import nix_shell
log = logging.getLogger(__name__)
async def create_machine(flake_name: FlakeName, machine_name: str) -> Dict[str, CmdOut]:
folder = specific_machine_dir(flake_name, machine_name)
folder.mkdir(parents=True, exist_ok=True)
# create empty settings.json file inside the folder
with open(folder / "settings.json", "w") as f:
f.write("{}")
response = {}
out = await run(nix_shell(["git"], ["git", "add", str(folder)]), cwd=folder)
response["git add"] = out
out = await run(
nix_shell(
["git"],
["git", "commit", "-m", f"Added machine {machine_name}", str(folder)],
),
cwd=folder,
)
response["git commit"] = out
return response
def create_command(args: argparse.Namespace) -> None:
try:
flake_dir = specific_flake_dir(args.flake)
runforcli(create_machine, flake_dir, args.machine)
except ClanError as e:
print(e)
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=create_command)

View File

@@ -1,23 +0,0 @@
import argparse
import shutil
from ..dirs import specific_machine_dir
from ..errors import ClanError
def delete_command(args: argparse.Namespace) -> None:
folder = specific_machine_dir(args.flake, args.host)
if folder.exists():
shutil.rmtree(folder)
else:
raise ClanError(f"Machine {args.host} does not exist")
def register_delete_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("host", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=delete_command)

View File

@@ -1,10 +0,0 @@
from ..dirs import specific_machine_dir
from ..flakes.types import FlakeName
def machine_has_fact(flake_name: FlakeName, machine: str, fact: str) -> bool:
return (specific_machine_dir(flake_name, machine) / "facts" / fact).exists()
def machine_get_fact(flake_name: FlakeName, machine: str, fact: str) -> str:
return (specific_machine_dir(flake_name, machine) / "facts" / fact).read_text()

View File

@@ -1,65 +0,0 @@
import argparse
import subprocess
from pathlib import Path
from tempfile import TemporaryDirectory
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
from ..nix import nix_shell
from ..secrets.generate import generate_secrets
def install_nixos(machine: Machine) -> None:
h = machine.host
target_host = f"{h.user or 'root'}@{h.host}"
flake_attr = h.meta.get("flake_attr", "")
generate_secrets(machine)
with TemporaryDirectory() as tmpdir_:
tmpdir = Path(tmpdir_)
machine.upload_secrets(tmpdir / machine.secrets_upload_directory)
subprocess.run(
nix_shell(
["nixos-anywhere"],
[
"nixos-anywhere",
"-f",
f"{machine.flake_dir}#{flake_attr}",
"-t",
"--no-reboot",
"--extra-files",
str(tmpdir),
target_host,
],
),
check=True,
)
def install_command(args: argparse.Namespace) -> None:
machine = Machine(args.machine, flake_dir=specific_flake_dir(args.flake))
machine.deployment_address = args.target_host
install_nixos(machine)
def register_install_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"machine",
type=str,
help="machine to install",
)
parser.add_argument(
"target_host",
type=str,
help="ssh address to install to in the form of user@host:2222",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to install machine from",
)
parser.set_defaults(func=install_command)

View File

@@ -1,35 +0,0 @@
import argparse
import logging
import os
from ..dirs import machines_dir
from ..flakes.types import FlakeName
from .types import validate_hostname
log = logging.getLogger(__name__)
def list_machines(flake_name: FlakeName) -> list[str]:
path = machines_dir(flake_name)
log.debug(f"Listing machines in {path}")
if not path.exists():
return []
objs: list[str] = []
for f in os.listdir(path):
if validate_hostname(f):
objs.append(f)
return objs
def list_command(args: argparse.Namespace) -> None:
for machine in list_machines(args.flake):
print(machine)
def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=list_command)

View File

@@ -1,112 +0,0 @@
import json
import os
import subprocess
import sys
from pathlib import Path
from typing import Optional
from ..nix import nix_build, nix_config, nix_eval
from ..ssh import Host, parse_deployment_address
def build_machine_data(machine_name: str, clan_dir: Path) -> dict:
config = nix_config()
system = config["system"]
outpath = subprocess.run(
nix_build(
[
f'path:{clan_dir}#clanInternals.machines."{system}"."{machine_name}".config.system.clan.deployment.file'
]
),
stdout=subprocess.PIPE,
check=True,
text=True,
).stdout.strip()
return json.loads(Path(outpath).read_text())
class Machine:
def __init__(
self,
name: str,
flake_dir: Path,
machine_data: Optional[dict] = None,
) -> None:
"""
Creates a Machine
@name: the name of the machine
@clan_dir: the directory of the clan, optional, if not set it will be determined from the current working directory
@machine_json: can be optionally used to skip evaluation of the machine, location of the json file with machine data
"""
self.name = name
self.flake_dir = flake_dir
if machine_data is None:
self.machine_data = build_machine_data(name, self.flake_dir)
else:
self.machine_data = machine_data
self.deployment_address = self.machine_data["deploymentAddress"]
self.upload_secrets = self.machine_data["uploadSecrets"]
self.generate_secrets = self.machine_data["generateSecrets"]
self.secrets_upload_directory = self.machine_data["secretsUploadDirectory"]
@property
def host(self) -> Host:
return parse_deployment_address(
self.name, self.deployment_address, meta={"machine": self}
)
def run_upload_secrets(self, secrets_dir: Path) -> bool:
"""
Upload the secrets to the provided directory
@secrets_dir: the directory to store the secrets in
"""
env = os.environ.copy()
env["CLAN_DIR"] = str(self.flake_dir)
env["PYTHONPATH"] = str(
":".join(sys.path)
) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir)
print(f"uploading secrets... {self.upload_secrets}")
proc = subprocess.run(
[self.upload_secrets],
env=env,
stdout=subprocess.PIPE,
text=True,
)
if proc.returncode == 23:
print("no secrets to upload")
return False
elif proc.returncode != 0:
print("failed generate secrets directory")
exit(1)
return True
def eval_nix(self, attr: str) -> str:
"""
eval a nix attribute of the machine
@attr: the attribute to get
"""
output = subprocess.run(
nix_eval([f"path:{self.flake_dir}#{attr}"]),
stdout=subprocess.PIPE,
check=True,
text=True,
).stdout.strip()
return output
def build_nix(self, attr: str) -> Path:
"""
build a nix attribute of the machine
@attr: the attribute to get
"""
outpath = subprocess.run(
nix_build([f"path:{self.flake_dir}#{attr}"]),
stdout=subprocess.PIPE,
check=True,
text=True,
).stdout.strip()
return Path(outpath)

View File

@@ -1,22 +0,0 @@
import argparse
import re
VALID_HOSTNAME = re.compile(r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", re.IGNORECASE)
def validate_hostname(hostname: str) -> bool:
if len(hostname) > 63:
return False
return VALID_HOSTNAME.match(hostname) is not None
def machine_name_type(arg_value: str) -> str:
if len(arg_value) > 63:
raise argparse.ArgumentTypeError(
"Machine name must be less than 63 characters long"
)
if not VALID_HOSTNAME.match(arg_value):
raise argparse.ArgumentTypeError(
"Invalid character in machine name. Allowed characters are a-z, 0-9, ., -, and _. Must not start with a number"
)
return arg_value

View File

@@ -1,159 +0,0 @@
import argparse
import json
import os
import subprocess
from pathlib import Path
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
from ..nix import nix_build, nix_command, nix_config
from ..secrets.generate import generate_secrets
from ..secrets.upload import upload_secrets
from ..ssh import Host, HostGroup, HostKeyCheck, parse_deployment_address
def deploy_nixos(hosts: HostGroup, clan_dir: Path) -> None:
"""
Deploy to all hosts in parallel
"""
def deploy(h: Host) -> None:
target = f"{h.user or 'root'}@{h.host}"
ssh_arg = f"-p {h.port}" if h.port else ""
env = os.environ.copy()
env["NIX_SSHOPTS"] = ssh_arg
res = h.run_local(
nix_command(["flake", "archive", "--to", f"ssh://{target}", "--json"]),
check=True,
stdout=subprocess.PIPE,
extra_env=env,
)
data = json.loads(res.stdout)
path = data["path"]
if h.host_key_check != HostKeyCheck.STRICT:
ssh_arg += " -o StrictHostKeyChecking=no"
if h.host_key_check == HostKeyCheck.NONE:
ssh_arg += " -o UserKnownHostsFile=/dev/null"
ssh_arg += " -i " + h.key if h.key else ""
flake_attr = h.meta.get("flake_attr", "")
generate_secrets(h.meta["machine"])
upload_secrets(h.meta["machine"])
target_host = h.meta.get("target_host")
if target_host:
target_user = h.meta.get("target_user")
if target_user:
target_host = f"{target_user}@{target_host}"
extra_args = h.meta.get("extra_args", [])
cmd = (
["nixos-rebuild", "switch"]
+ extra_args
+ [
"--fast",
"--option",
"keep-going",
"true",
"--option",
"accept-flake-config",
"true",
"--build-host",
"",
"--flake",
f"{path}#{flake_attr}",
]
)
if target_host:
cmd.extend(["--target-host", target_host])
ret = h.run(cmd, check=False)
# re-retry switch if the first time fails
if ret.returncode != 0:
ret = h.run(cmd)
hosts.run_function(deploy)
# function to speedup eval if we want to evauluate all machines
def get_all_machines(clan_dir: Path) -> HostGroup:
config = nix_config()
system = config["system"]
machines_json = subprocess.run(
nix_build([f'{clan_dir}#clanInternals.all-machines-json."{system}"']),
stdout=subprocess.PIPE,
check=True,
text=True,
).stdout
machines = json.loads(Path(machines_json).read_text())
hosts = []
for name, machine_data in machines.items():
# very hacky. would be better to do a MachinesGroup instead
host = parse_deployment_address(
name,
machine_data["deploymentAddress"],
meta={
"machine": Machine(
name=name, flake_dir=clan_dir, machine_data=machine_data
)
},
)
hosts.append(host)
return HostGroup(hosts)
def get_selected_machines(machine_names: list[str], flake_dir: Path) -> HostGroup:
hosts = []
for name in machine_names:
machine = Machine(name=name, flake_dir=flake_dir)
hosts.append(machine.host)
return HostGroup(hosts)
# FIXME: we want some kind of inventory here.
def update(args: argparse.Namespace) -> None:
flake_dir = specific_flake_dir(args.flake)
if len(args.machines) == 1 and args.target_host is not None:
machine = Machine(name=args.machines[0], flake_dir=flake_dir)
machine.deployment_address = args.target_host
host = parse_deployment_address(
args.machines[0],
args.target_host,
meta={"machine": machine},
)
machines = HostGroup([host])
elif args.target_host is not None:
print("target host can only be specified for a single machine")
exit(1)
else:
if len(args.machines) == 0:
machines = get_all_machines(flake_dir)
else:
machines = get_selected_machines(args.machines, flake_dir)
deploy_nixos(machines, flake_dir)
def register_update_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"machines",
type=str,
help="machine to update. if empty, update all machines",
nargs="*",
default=[],
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to update machine for",
)
parser.add_argument(
"--target-host",
type=str,
help="address of the machine to update, in the format of user@host:1234",
)
parser.set_defaults(func=update)

View File

@@ -1,46 +0,0 @@
# !/usr/bin/env python3
import argparse
from .generate import register_generate_parser
from .groups import register_groups_parser
from .import_sops import register_import_sops_parser
from .key import register_key_parser
from .machines import register_machines_parser
from .secrets import register_secrets_parser
from .upload import register_upload_parser
from .users import register_users_parser
# takes a (sub)parser and configures it
def register_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
groups_parser = subparser.add_parser("groups", help="manage groups")
register_groups_parser(groups_parser)
users_parser = subparser.add_parser("users", help="manage users")
register_users_parser(users_parser)
machines_parser = subparser.add_parser("machines", help="manage machines")
register_machines_parser(machines_parser)
import_sops_parser = subparser.add_parser("import-sops", help="import a sops file")
register_import_sops_parser(import_sops_parser)
parser_generate = subparser.add_parser(
"generate", help="generate secrets for machines if they don't exist yet"
)
register_generate_parser(parser_generate)
parser_upload = subparser.add_parser("upload", help="upload secrets for machines")
register_upload_parser(parser_upload)
parser_key = subparser.add_parser("key", help="create and show age keys")
register_key_parser(parser_key)
register_secrets_parser(subparser)

View File

@@ -1,44 +0,0 @@
import os
import shutil
from pathlib import Path
from typing import Callable
from ..dirs import specific_flake_dir
from ..errors import ClanError
from ..flakes.types import FlakeName
def get_sops_folder(flake_name: FlakeName) -> Path:
return specific_flake_dir(flake_name) / "sops"
def gen_sops_subfolder(subdir: str) -> Callable[[FlakeName], Path]:
def folder(flake_name: FlakeName) -> Path:
return specific_flake_dir(flake_name) / "sops" / subdir
return folder
sops_secrets_folder = gen_sops_subfolder("secrets")
sops_users_folder = gen_sops_subfolder("users")
sops_machines_folder = gen_sops_subfolder("machines")
sops_groups_folder = gen_sops_subfolder("groups")
def list_objects(path: Path, is_valid: Callable[[str], bool]) -> list[str]:
objs: list[str] = []
if not path.exists():
return objs
for f in os.listdir(path):
if is_valid(f):
objs.append(f)
return objs
def remove_object(path: Path, name: str) -> None:
try:
shutil.rmtree(path / name)
except FileNotFoundError:
raise ClanError(f"{name} not found in {path}")
if not os.listdir(path):
os.rmdir(path)

View File

@@ -1,47 +0,0 @@
import argparse
import logging
import os
import subprocess
import sys
from clan_cli.errors import ClanError
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
log = logging.getLogger(__name__)
def generate_secrets(machine: Machine) -> None:
env = os.environ.copy()
env["CLAN_DIR"] = str(machine.flake_dir)
env["PYTHONPATH"] = ":".join(sys.path) # TODO do this in the clanCore module
print(f"generating secrets... {machine.generate_secrets}")
proc = subprocess.run(
[machine.generate_secrets],
env=env,
)
if proc.returncode != 0:
raise ClanError("failed to generate secrets")
else:
print("successfully generated secrets")
def generate_command(args: argparse.Namespace) -> None:
machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
generate_secrets(machine)
def register_generate_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"machine",
help="The machine to generate secrets for",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=generate_command)

View File

@@ -1,264 +0,0 @@
import argparse
import os
from pathlib import Path
from ..errors import ClanError
from ..flakes.types import FlakeName
from ..machines.types import machine_name_type, validate_hostname
from . import secrets
from .folders import (
sops_groups_folder,
sops_machines_folder,
sops_secrets_folder,
sops_users_folder,
)
from .sops import update_keys
from .types import (
VALID_USER_NAME,
group_name_type,
secret_name_type,
user_name_type,
)
def machines_folder(flake_name: FlakeName, group: str) -> Path:
return sops_groups_folder(flake_name) / group / "machines"
def users_folder(flake_name: FlakeName, group: str) -> Path:
return sops_groups_folder(flake_name) / group / "users"
class Group:
def __init__(
self, flake_name: FlakeName, name: str, machines: list[str], users: list[str]
) -> None:
self.name = name
self.machines = machines
self.users = users
self.flake_name = flake_name
def list_groups(flake_name: FlakeName) -> list[Group]:
groups: list[Group] = []
folder = sops_groups_folder(flake_name)
if not folder.exists():
return groups
for name in os.listdir(folder):
group_folder = folder / name
if not group_folder.is_dir():
continue
machines_path = machines_folder(flake_name, name)
machines = []
if machines_path.is_dir():
for f in machines_path.iterdir():
if validate_hostname(f.name):
machines.append(f.name)
users_path = users_folder(flake_name, name)
users = []
if users_path.is_dir():
for f in users_path.iterdir():
if VALID_USER_NAME.match(f.name):
users.append(f.name)
groups.append(Group(flake_name, name, machines, users))
return groups
def list_command(args: argparse.Namespace) -> None:
for group in list_groups(args.flake):
print(group.name)
if group.machines:
print("machines:")
for machine in group.machines:
print(f" {machine}")
if group.users:
print("users:")
for user in group.users:
print(f" {user}")
print()
def list_directory(directory: Path) -> str:
if not directory.exists():
return f"{directory} does not exist"
msg = f"\n{directory} contains:"
for f in directory.iterdir():
msg += f"\n {f.name}"
return msg
def update_group_keys(flake_name: FlakeName, group: str) -> None:
for secret_ in secrets.list_secrets(flake_name):
secret = sops_secrets_folder(flake_name) / secret_
if (secret / "groups" / group).is_symlink():
update_keys(
secret,
list(sorted(secrets.collect_keys_for_path(secret))),
)
def add_member(
flake_name: FlakeName, group_folder: Path, source_folder: Path, name: str
) -> None:
source = source_folder / name
if not source.exists():
msg = f"{name} does not exist in {source_folder}: "
msg += list_directory(source_folder)
raise ClanError(msg)
group_folder.mkdir(parents=True, exist_ok=True)
user_target = group_folder / name
if user_target.exists():
if not user_target.is_symlink():
raise ClanError(
f"Cannot add user {name}. {user_target} exists but is not a symlink"
)
os.remove(user_target)
user_target.symlink_to(os.path.relpath(source, user_target.parent))
update_group_keys(flake_name, group_folder.parent.name)
def remove_member(flake_name: FlakeName, group_folder: Path, name: str) -> None:
target = group_folder / name
if not target.exists():
msg = f"{name} does not exist in group in {group_folder}: "
msg += list_directory(group_folder)
raise ClanError(msg)
os.remove(target)
if len(os.listdir(group_folder)) > 0:
update_group_keys(flake_name, group_folder.parent.name)
if len(os.listdir(group_folder)) == 0:
os.rmdir(group_folder)
if len(os.listdir(group_folder.parent)) == 0:
os.rmdir(group_folder.parent)
def add_user(flake_name: FlakeName, group: str, name: str) -> None:
add_member(
flake_name, users_folder(flake_name, group), sops_users_folder(flake_name), name
)
def add_user_command(args: argparse.Namespace) -> None:
add_user(args.flake, args.group, args.user)
def remove_user(flake_name: FlakeName, group: str, name: str) -> None:
remove_member(flake_name, users_folder(flake_name, group), name)
def remove_user_command(args: argparse.Namespace) -> None:
remove_user(args.flake, args.group, args.user)
def add_machine(flake_name: FlakeName, group: str, name: str) -> None:
add_member(
flake_name,
machines_folder(flake_name, group),
sops_machines_folder(flake_name),
name,
)
def add_machine_command(args: argparse.Namespace) -> None:
add_machine(args.flake, args.group, args.machine)
def remove_machine(flake_name: FlakeName, group: str, name: str) -> None:
remove_member(flake_name, machines_folder(flake_name, group), name)
def remove_machine_command(args: argparse.Namespace) -> None:
remove_machine(args.flake, args.group, args.machine)
def add_group_argument(parser: argparse.ArgumentParser) -> None:
parser.add_argument("group", help="the name of the secret", type=group_name_type)
def add_secret(flake_name: FlakeName, group: str, name: str) -> None:
secrets.allow_member(
secrets.groups_folder(flake_name, name), sops_groups_folder(flake_name), group
)
def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.flake, args.group, args.secret)
def remove_secret(flake_name: FlakeName, group: str, name: str) -> None:
secrets.disallow_member(secrets.groups_folder(flake_name, name), group)
def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.flake, args.group, args.secret)
def register_groups_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
list_parser = subparser.add_parser("list", help="list groups")
list_parser.set_defaults(func=list_command)
add_machine_parser = subparser.add_parser(
"add-machine", help="add a machine to group"
)
add_group_argument(add_machine_parser)
add_machine_parser.add_argument(
"machine", help="the name of the machines to add", type=machine_name_type
)
add_machine_parser.set_defaults(func=add_machine_command)
remove_machine_parser = subparser.add_parser(
"remove-machine", help="remove a machine from group"
)
add_group_argument(remove_machine_parser)
remove_machine_parser.add_argument(
"machine", help="the name of the machines to remove", type=machine_name_type
)
remove_machine_parser.set_defaults(func=remove_machine_command)
add_user_parser = subparser.add_parser("add-user", help="add a user to group")
add_group_argument(add_user_parser)
add_user_parser.add_argument(
"user", help="the name of the user to add", type=user_name_type
)
add_user_parser.set_defaults(func=add_user_command)
remove_user_parser = subparser.add_parser(
"remove-user", help="remove a user from group"
)
add_group_argument(remove_user_parser)
remove_user_parser.add_argument(
"user", help="the name of the user to remove", type=user_name_type
)
remove_user_parser.set_defaults(func=remove_user_command)
add_secret_parser = subparser.add_parser(
"add-secret", help="allow a user to access a secret"
)
add_secret_parser.add_argument(
"group", help="the name of the user", type=group_name_type
)
add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
add_secret_parser.set_defaults(func=add_secret_command)
remove_secret_parser = subparser.add_parser(
"remove-secret", help="remove a group's access to a secret"
)
remove_secret_parser.add_argument(
"group", help="the name of the group", type=group_name_type
)
remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@@ -1,94 +0,0 @@
import argparse
import json
import subprocess
import sys
from pathlib import Path
from ..errors import ClanError
from ..nix import nix_shell
from .secrets import encrypt_secret, sops_secrets_folder
def import_sops(args: argparse.Namespace) -> None:
file = Path(args.sops_file)
file_type = file.suffix
try:
file.read_text()
except OSError as e:
raise ClanError(f"Could not read file {file}: {e}") from e
if file_type == ".yaml":
cmd = ["sops"]
if args.input_type:
cmd += ["--input-type", args.input_type]
cmd += ["--output-type", "json", "--decrypt", args.sops_file]
cmd = nix_shell(["sops"], cmd)
try:
res = subprocess.run(cmd, check=True, text=True, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as e:
raise ClanError(f"Could not import sops file {file}: {e}") from e
secrets = json.loads(res.stdout)
for k, v in secrets.items():
k = args.prefix + k
if not isinstance(v, str):
print(
f"WARNING: {k} is not a string but {type(v)}, skipping",
file=sys.stderr,
)
continue
if (sops_secrets_folder(args.flake) / k / "secret").exists():
print(
f"WARNING: {k} already exists, skipping",
file=sys.stderr,
)
continue
encrypt_secret(
args.flake,
sops_secrets_folder(args.flake) / k,
v,
add_groups=args.group,
add_machines=args.machine,
add_users=args.user,
)
def register_import_sops_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--input-type",
type=str,
default=None,
help="the input type of the sops file (yaml, json, ...). If not specified, it will be guessed from the file extension",
)
parser.add_argument(
"--group",
type=str,
action="append",
default=[],
help="the group to import the secrets to",
)
parser.add_argument(
"--machine",
type=str,
action="append",
default=[],
help="the machine to import the secrets to",
)
parser.add_argument(
"--user",
type=str,
action="append",
default=[],
help="the user to import the secrets to",
)
parser.add_argument(
"--prefix",
type=str,
default="",
help="the prefix to use for the secret names",
)
parser.add_argument(
"sops_file",
type=str,
help="the sops file to import (- for stdin)",
)
parser.set_defaults(func=import_sops)

View File

@@ -1,48 +0,0 @@
import argparse
from .. import tty
from ..errors import ClanError
from .sops import default_sops_key_path, generate_private_key, get_public_key
def generate_key() -> str:
path = default_sops_key_path()
if path.exists():
raise ClanError(f"Key already exists at {path}")
priv_key, pub_key = generate_private_key()
path.write_text(priv_key)
return pub_key
def show_key() -> str:
return get_public_key(default_sops_key_path().read_text())
def generate_command(args: argparse.Namespace) -> None:
pub_key = generate_key()
tty.info(
f"Generated age private key at '{default_sops_key_path()}' for your user. Please back it up on a secure location or you will lose access to your secrets."
)
tty.info(
f"Also add your age public key to the repository with 'clan secrets users add youruser {pub_key}' (replace youruser with your user name)"
)
pass
def show_command(args: argparse.Namespace) -> None:
print(show_key())
def register_key_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
parser_generate = subparser.add_parser("generate", help="generate age key")
parser_generate.set_defaults(func=generate_command)
parser_show = subparser.add_parser("show", help="show age public key")
parser_show.set_defaults(func=show_command)

View File

@@ -1,170 +0,0 @@
import argparse
from ..flakes.types import FlakeName
from ..machines.types import machine_name_type, validate_hostname
from . import secrets
from .folders import list_objects, remove_object, sops_machines_folder
from .sops import read_key, write_key
from .types import public_or_private_age_key_type, secret_name_type
def add_machine(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
write_key(sops_machines_folder(flake_name) / name, key, force)
def remove_machine(flake_name: FlakeName, name: str) -> None:
remove_object(sops_machines_folder(flake_name), name)
def get_machine(flake_name: FlakeName, name: str) -> str:
return read_key(sops_machines_folder(flake_name) / name)
def has_machine(flake_name: FlakeName, name: str) -> bool:
return (sops_machines_folder(flake_name) / name / "key.json").exists()
def list_machines(flake_name: FlakeName) -> list[str]:
path = sops_machines_folder(flake_name)
def validate(name: str) -> bool:
return validate_hostname(name) and has_machine(flake_name, name)
return list_objects(path, validate)
def add_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
secrets.allow_member(
secrets.machines_folder(flake_name, secret),
sops_machines_folder(flake_name),
machine,
)
def remove_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
secrets.disallow_member(secrets.machines_folder(flake_name, secret), machine)
def list_command(args: argparse.Namespace) -> None:
lst = list_machines(args.flake)
if len(lst) > 0:
print("\n".join(lst))
def add_command(args: argparse.Namespace) -> None:
add_machine(args.flake, args.machine, args.key, args.force)
def get_command(args: argparse.Namespace) -> None:
print(get_machine(args.flake, args.machine))
def remove_command(args: argparse.Namespace) -> None:
remove_machine(args.flake, args.machine)
def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.flake, args.machine, args.secret)
def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.flake, args.machine, args.secret)
def register_machines_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
# Parser
list_parser = subparser.add_parser("list", help="list machines")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command)
# Parser
add_parser = subparser.add_parser("add", help="add a machine")
add_parser.add_argument(
"-f",
"--force",
help="overwrite existing machine",
action="store_true",
default=False,
)
add_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type
)
add_parser.add_argument(
"key",
help="public key or private key of the user",
type=public_or_private_age_key_type,
)
add_parser.set_defaults(func=add_command)
# Parser
get_parser = subparser.add_parser("get", help="get a machine public key")
get_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type
)
get_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
get_parser.set_defaults(func=get_command)
# Parser
remove_parser = subparser.add_parser("remove", help="remove a machine")
remove_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type
)
remove_parser.set_defaults(func=remove_command)
# Parser
add_secret_parser = subparser.add_parser(
"add-secret", help="allow a machine to access a secret"
)
add_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type
)
add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
add_secret_parser.set_defaults(func=add_secret_command)
# Parser
remove_secret_parser = subparser.add_parser(
"remove-secret", help="remove a group's access to a secret"
)
remove_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_secret_parser.add_argument(
"machine", help="the name of the group", type=machine_name_type
)
remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@@ -1,311 +0,0 @@
import argparse
import getpass
import os
import shutil
import sys
from pathlib import Path
from typing import IO
from .. import tty
from ..errors import ClanError
from ..flakes.types import FlakeName
from .folders import (
list_objects,
sops_groups_folder,
sops_machines_folder,
sops_secrets_folder,
sops_users_folder,
)
from .sops import decrypt_file, encrypt_file, ensure_sops_key, read_key, update_keys
from .types import VALID_SECRET_NAME, secret_name_type
def collect_keys_for_type(folder: Path) -> set[str]:
if not folder.exists():
return set()
keys = set()
for p in folder.iterdir():
if not p.is_symlink():
continue
try:
target = p.resolve()
except FileNotFoundError:
tty.warn(f"Ignoring broken symlink {p}")
continue
kind = target.parent.name
if folder.name != kind:
tty.warn(f"Expected {p} to point to {folder} but points to {target.parent}")
continue
keys.add(read_key(target))
return keys
def collect_keys_for_path(path: Path) -> set[str]:
keys = set([])
keys.update(collect_keys_for_type(path / "machines"))
keys.update(collect_keys_for_type(path / "users"))
groups = path / "groups"
if not groups.is_dir():
return keys
for group in groups.iterdir():
keys.update(collect_keys_for_type(group / "machines"))
keys.update(collect_keys_for_type(group / "users"))
return keys
def encrypt_secret(
flake_name: FlakeName,
secret: Path,
value: IO[str] | str | None,
add_users: list[str] = [],
add_machines: list[str] = [],
add_groups: list[str] = [],
) -> None:
key = ensure_sops_key(flake_name)
keys = set([])
for user in add_users:
allow_member(
users_folder(flake_name, secret.name),
sops_users_folder(flake_name),
user,
False,
)
for machine in add_machines:
allow_member(
machines_folder(flake_name, secret.name),
sops_machines_folder(flake_name),
machine,
False,
)
for group in add_groups:
allow_member(
groups_folder(flake_name, secret.name),
sops_groups_folder(flake_name),
group,
False,
)
keys = collect_keys_for_path(secret)
if key.pubkey not in keys:
keys.add(key.pubkey)
allow_member(
users_folder(flake_name, secret.name),
sops_users_folder(flake_name),
key.username,
False,
)
encrypt_file(secret / "secret", value, list(sorted(keys)))
def remove_secret(flake_name: FlakeName, secret: str) -> None:
path = sops_secrets_folder(flake_name) / secret
if not path.exists():
raise ClanError(f"Secret '{secret}' does not exist")
shutil.rmtree(path)
def remove_command(args: argparse.Namespace) -> None:
remove_secret(args.flake, args.secret)
def add_secret_argument(parser: argparse.ArgumentParser) -> None:
parser.add_argument("secret", help="the name of the secret", type=secret_name_type)
def machines_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder(flake_name) / group / "machines"
def users_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder(flake_name) / group / "users"
def groups_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder(flake_name) / group / "groups"
def list_directory(directory: Path) -> str:
if not directory.exists():
return f"{directory} does not exist"
msg = f"\n{directory} contains:"
for f in directory.iterdir():
msg += f"\n {f.name}"
return msg
def allow_member(
group_folder: Path, source_folder: Path, name: str, do_update_keys: bool = True
) -> None:
source = source_folder / name
if not source.exists():
msg = f"{name} does not exist in {source_folder}: "
msg += list_directory(source_folder)
raise ClanError(msg)
group_folder.mkdir(parents=True, exist_ok=True)
user_target = group_folder / name
if user_target.exists():
if not user_target.is_symlink():
raise ClanError(
f"Cannot add user {name}. {user_target} exists but is not a symlink"
)
os.remove(user_target)
user_target.symlink_to(os.path.relpath(source, user_target.parent))
if do_update_keys:
update_keys(
group_folder.parent,
list(sorted(collect_keys_for_path(group_folder.parent))),
)
def disallow_member(group_folder: Path, name: str) -> None:
target = group_folder / name
if not target.exists():
msg = f"{name} does not exist in group in {group_folder}: "
msg += list_directory(group_folder)
raise ClanError(msg)
keys = collect_keys_for_path(group_folder.parent)
if len(keys) < 2:
raise ClanError(
f"Cannot remove {name} from {group_folder.parent.name}. No keys left. Use 'clan secrets remove {name}' to remove the secret."
)
os.remove(target)
if len(os.listdir(group_folder)) == 0:
os.rmdir(group_folder)
if len(os.listdir(group_folder.parent)) == 0:
os.rmdir(group_folder.parent)
update_keys(
target.parent.parent, list(sorted(collect_keys_for_path(group_folder.parent)))
)
def has_secret(flake_name: FlakeName, secret: str) -> bool:
return (sops_secrets_folder(flake_name) / secret / "secret").exists()
def list_secrets(flake_name: FlakeName) -> list[str]:
path = sops_secrets_folder(flake_name)
def validate(name: str) -> bool:
return VALID_SECRET_NAME.match(name) is not None and has_secret(
flake_name, name
)
return list_objects(path, validate)
def list_command(args: argparse.Namespace) -> None:
lst = list_secrets(args.flake)
if len(lst) > 0:
print("\n".join(lst))
def decrypt_secret(flake_name: FlakeName, secret: str) -> str:
ensure_sops_key(flake_name)
secret_path = sops_secrets_folder(flake_name) / secret / "secret"
if not secret_path.exists():
raise ClanError(f"Secret '{secret}' does not exist")
return decrypt_file(secret_path)
def get_command(args: argparse.Namespace) -> None:
print(decrypt_secret(args.flake, args.secret), end="")
def set_command(args: argparse.Namespace) -> None:
env_value = os.environ.get("SOPS_NIX_SECRET")
secret_value: str | IO[str] | None = sys.stdin
if args.edit:
secret_value = None
elif env_value:
secret_value = env_value
elif tty.is_interactive():
secret_value = getpass.getpass(prompt="Paste your secret: ")
encrypt_secret(
args.flake,
sops_secrets_folder(args.flake) / args.secret,
secret_value,
args.user,
args.machine,
args.group,
)
def rename_command(args: argparse.Namespace) -> None:
old_path = sops_secrets_folder(args.flake) / args.secret
new_path = sops_secrets_folder(args.flake) / args.new_name
if not old_path.exists():
raise ClanError(f"Secret '{args.secret}' does not exist")
if new_path.exists():
raise ClanError(f"Secret '{args.new_name}' already exists")
os.rename(old_path, new_path)
def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
parser_list = subparser.add_parser("list", help="list secrets")
parser_list.set_defaults(func=list_command)
parser_get = subparser.add_parser("get", help="get a secret")
add_secret_argument(parser_get)
parser_get.set_defaults(func=get_command)
parser_get.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_set = subparser.add_parser("set", help="set a secret")
add_secret_argument(parser_set)
parser_set.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_set.add_argument(
"--group",
type=str,
action="append",
default=[],
help="the group to import the secrets to (can be repeated)",
)
parser_set.add_argument(
"--machine",
type=str,
action="append",
default=[],
help="the machine to import the secrets to (can be repeated)",
)
parser_set.add_argument(
"--user",
type=str,
action="append",
default=[],
help="the user to import the secrets to (can be repeated)",
)
parser_set.add_argument(
"-e",
"--edit",
action="store_true",
default=False,
help="edit the secret with $EDITOR instead of pasting it",
)
parser_set.set_defaults(func=set_command)
parser_rename = subparser.add_parser("rename", help="rename a secret")
add_secret_argument(parser_rename)
parser_rename.add_argument("new_name", type=str, help="the new name of the secret")
parser_rename.set_defaults(func=rename_command)
parser_remove = subparser.add_parser("remove", help="remove a secret")
add_secret_argument(parser_remove)
parser_remove.set_defaults(func=remove_command)

View File

@@ -1,219 +0,0 @@
import json
import os
import shutil
import subprocess
from contextlib import contextmanager
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import IO, Iterator
from ..dirs import user_config_dir
from ..errors import ClanError
from ..flakes.types import FlakeName
from ..nix import nix_shell
from .folders import sops_machines_folder, sops_users_folder
class SopsKey:
def __init__(self, pubkey: str, username: str) -> None:
self.pubkey = pubkey
self.username = username
def get_public_key(privkey: str) -> str:
cmd = nix_shell(["age"], ["age-keygen", "-y"])
try:
res = subprocess.run(cmd, input=privkey, stdout=subprocess.PIPE, text=True)
except subprocess.CalledProcessError as e:
raise ClanError(
"Failed to get public key for age private key. Is the key malformed?"
) from e
return res.stdout.strip()
def generate_private_key() -> tuple[str, str]:
cmd = nix_shell(["age"], ["age-keygen"])
try:
proc = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, text=True)
res = proc.stdout.strip()
pubkey = None
private_key = None
for line in res.splitlines():
if line.startswith("# public key:"):
pubkey = line.split(":")[1].strip()
if not line.startswith("#"):
private_key = line
if not pubkey:
raise ClanError("Could not find public key in age-keygen output")
if not private_key:
raise ClanError("Could not find private key in age-keygen output")
return private_key, pubkey
except subprocess.CalledProcessError as e:
raise ClanError("Failed to generate private sops key") from e
def get_user_name(flake_name: FlakeName, user: str) -> str:
"""Ask the user for their name until a unique one is provided."""
while True:
name = input(
f"Your key is not yet added to the repository. Enter your user name for which your sops key will be stored in the repository [default: {user}]: "
)
if name:
user = name
if not (sops_users_folder(flake_name) / user).exists():
return user
print(f"{sops_users_folder(flake_name) / user} already exists")
def ensure_user_or_machine(flake_name: FlakeName, pub_key: str) -> SopsKey:
key = SopsKey(pub_key, username="")
folders = [sops_users_folder(flake_name), sops_machines_folder(flake_name)]
for folder in folders:
if folder.exists():
for user in folder.iterdir():
if not (user / "key.json").exists():
continue
if read_key(user) == pub_key:
key.username = user.name
return key
raise ClanError(
f"Your sops key is not yet added to the repository. Please add it with 'clan secrets users add youruser {pub_key}' (replace youruser with your user name)"
)
def default_sops_key_path() -> Path:
raw_path = os.environ.get("SOPS_AGE_KEY_FILE")
if raw_path:
return Path(raw_path)
else:
return user_config_dir() / "sops" / "age" / "keys.txt"
def ensure_sops_key(flake_name: FlakeName) -> SopsKey:
key = os.environ.get("SOPS_AGE_KEY")
if key:
return ensure_user_or_machine(flake_name, get_public_key(key))
path = default_sops_key_path()
if path.exists():
return ensure_user_or_machine(flake_name, get_public_key(path.read_text()))
else:
raise ClanError(
"No sops key found. Please generate one with 'clan secrets key generate'."
)
@contextmanager
def sops_manifest(keys: list[str]) -> Iterator[Path]:
with NamedTemporaryFile(delete=False, mode="w") as manifest:
json.dump(
dict(creation_rules=[dict(key_groups=[dict(age=keys)])]), manifest, indent=2
)
manifest.flush()
yield Path(manifest.name)
def update_keys(secret_path: Path, keys: list[str]) -> None:
with sops_manifest(keys) as manifest:
cmd = nix_shell(
["sops"],
[
"sops",
"--config",
str(manifest),
"updatekeys",
"--yes",
str(secret_path / "secret"),
],
)
res = subprocess.run(cmd)
if res.returncode != 0:
raise ClanError(
f"Failed to update keys for {secret_path}: sops exited with {res.returncode}"
)
def encrypt_file(
secret_path: Path, content: IO[str] | str | None, keys: list[str]
) -> None:
folder = secret_path.parent
folder.mkdir(parents=True, exist_ok=True)
with sops_manifest(keys) as manifest:
if not content:
args = ["sops", "--config", str(manifest)]
args.extend([str(secret_path)])
cmd = nix_shell(["sops"], args)
p = subprocess.run(cmd)
# returns 200 if the file is changed
if p.returncode != 0 and p.returncode != 200:
raise ClanError(
f"Failed to encrypt {secret_path}: sops exited with {p.returncode}"
)
return
# hopefully /tmp is written to an in-memory file to avoid leaking secrets
with NamedTemporaryFile(delete=False) as f:
try:
with open(f.name, "w") as fd:
if isinstance(content, str):
fd.write(content)
else:
shutil.copyfileobj(content, fd)
# we pass an empty manifest to pick up existing configuration of the user
args = ["sops", "--config", str(manifest)]
args.extend(["-i", "--encrypt", str(f.name)])
cmd = nix_shell(["sops"], args)
subprocess.run(cmd, check=True)
# atomic copy of the encrypted file
with NamedTemporaryFile(dir=folder, delete=False) as f2:
shutil.copyfile(f.name, f2.name)
os.rename(f2.name, secret_path)
finally:
try:
os.remove(f.name)
except OSError:
pass
def decrypt_file(secret_path: Path) -> str:
with sops_manifest([]) as manifest:
cmd = nix_shell(
["sops"], ["sops", "--config", str(manifest), "--decrypt", str(secret_path)]
)
res = subprocess.run(cmd, stdout=subprocess.PIPE, text=True)
if res.returncode != 0:
raise ClanError(
f"Failed to decrypt {secret_path}: sops exited with {res.returncode}"
)
return res.stdout
def write_key(path: Path, publickey: str, overwrite: bool) -> None:
path.mkdir(parents=True, exist_ok=True)
try:
flags = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
if not overwrite:
flags |= os.O_EXCL
fd = os.open(path / "key.json", flags)
except FileExistsError:
raise ClanError(f"{path.name} already exists in {path}")
with os.fdopen(fd, "w") as f:
json.dump({"publickey": publickey, "type": "age"}, f, indent=2)
def read_key(path: Path) -> str:
with open(path / "key.json") as f:
try:
key = json.load(f)
except json.JSONDecodeError as e:
raise ClanError(f"Failed to decode {path.name}: {e}")
if key["type"] != "age":
raise ClanError(
f"{path.name} is not an age key but {key['type']}. This is not supported"
)
publickey = key.get("publickey")
if not publickey:
raise ClanError(f"{path.name} does not contain a public key")
return publickey

View File

@@ -1,127 +0,0 @@
import os
import shlex
import shutil
import subprocess
import sys
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Any
from clan_cli.nix import nix_shell
from ..dirs import specific_flake_dir
from ..errors import ClanError
from ..flakes.types import FlakeName
from .folders import sops_secrets_folder
from .machines import add_machine, has_machine
from .secrets import decrypt_secret, encrypt_secret, has_secret
from .sops import generate_private_key
def generate_host_key(flake_name: FlakeName, machine_name: str) -> None:
if has_machine(flake_name, machine_name):
return
priv_key, pub_key = generate_private_key()
encrypt_secret(
flake_name,
sops_secrets_folder(flake_name) / f"{machine_name}-age.key",
priv_key,
)
add_machine(flake_name, machine_name, pub_key, False)
def generate_secrets_group(
flake_name: FlakeName,
secret_group: str,
machine_name: str,
tempdir: Path,
secret_options: dict[str, Any],
) -> None:
clan_dir = specific_flake_dir(flake_name)
secrets = secret_options["secrets"]
needs_regeneration = any(
not has_secret(flake_name, f"{machine_name}-{secret['name']}")
for secret in secrets.values()
)
generator = secret_options["generator"]
subdir = tempdir / secret_group
if needs_regeneration:
facts_dir = subdir / "facts"
facts_dir.mkdir(parents=True)
secrets_dir = subdir / "secrets"
secrets_dir.mkdir(parents=True)
text = f"""\
set -euo pipefail
export facts={shlex.quote(str(facts_dir))}
export secrets={shlex.quote(str(secrets_dir))}
{generator}
"""
try:
cmd = nix_shell(["bash"], ["bash", "-c", text])
subprocess.run(cmd, check=True)
except subprocess.CalledProcessError:
msg = "failed to the following command:\n"
msg += text
raise ClanError(msg)
for secret in secrets.values():
secret_file = secrets_dir / secret["name"]
if not secret_file.is_file():
msg = f"did not generate a file for '{secret['name']}' when running the following command:\n"
msg += text
raise ClanError(msg)
encrypt_secret(
flake_name,
sops_secrets_folder(flake_name) / f"{machine_name}-{secret['name']}",
secret_file.read_text(),
add_machines=[machine_name],
)
for fact in secret_options["facts"].values():
fact_file = facts_dir / fact["name"]
if not fact_file.is_file():
msg = f"did not generate a file for '{fact['name']}' when running the following command:\n"
msg += text
raise ClanError(msg)
fact_path = clan_dir.joinpath(fact["path"])
fact_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copyfile(fact_file, fact_path)
# this is called by the sops.nix clan core module
def generate_secrets_from_nix(
flake_name: FlakeName,
machine_name: str,
secret_submodules: dict[str, Any],
) -> None:
generate_host_key(flake_name, machine_name)
errors = {}
with TemporaryDirectory() as d:
# if any of the secrets are missing, we regenerate all connected facts/secrets
for secret_group, secret_options in secret_submodules.items():
try:
generate_secrets_group(
flake_name, secret_group, machine_name, Path(d), secret_options
)
except ClanError as e:
errors[secret_group] = e
for secret_group, error in errors.items():
print(f"failed to generate secrets for {machine_name}/{secret_group}:")
print(error, file=sys.stderr)
if len(errors) > 0:
sys.exit(1)
# this is called by the sops.nix clan core module
def upload_age_key_from_nix(
flake_name: FlakeName,
machine_name: str,
) -> None:
secret_name = f"{machine_name}-age.key"
if not has_secret(
flake_name, secret_name
): # skip uploading the secret, not managed by us
return
secret = decrypt_secret(flake_name, secret_name)
secrets_dir = Path(os.environ["SECRETS_DIR"])
(secrets_dir / "key.txt").write_text(secret)

View File

@@ -1,52 +0,0 @@
import argparse
import os
import re
from pathlib import Path
from typing import Callable
from ..errors import ClanError
from .sops import get_public_key
VALID_SECRET_NAME = re.compile(r"^[a-zA-Z0-9._-]+$")
VALID_USER_NAME = re.compile(r"^[a-z_]([a-z0-9_-]{0,31})?$")
def secret_name_type(arg_value: str) -> str:
if not VALID_SECRET_NAME.match(arg_value):
raise argparse.ArgumentTypeError(
"Invalid character in secret name. Allowed characters are a-z, A-Z, 0-9, ., -, and _"
)
return arg_value
def public_or_private_age_key_type(arg_value: str) -> str:
if os.path.isfile(arg_value):
arg_value = Path(arg_value).read_text().strip()
if arg_value.startswith("age1"):
return arg_value.strip()
if arg_value.startswith("AGE-SECRET-KEY-"):
return get_public_key(arg_value)
if not arg_value.startswith("age1"):
raise ClanError(
f"Please provide an age key starting with age1, got: '{arg_value}'"
)
return arg_value
def group_or_user_name_type(what: str) -> Callable[[str], str]:
def name_type(arg_value: str) -> str:
if len(arg_value) > 32:
raise argparse.ArgumentTypeError(
f"{what.capitalize()} name must be less than 32 characters long"
)
if not VALID_USER_NAME.match(arg_value):
raise argparse.ArgumentTypeError(
f"Invalid character in {what} name. Allowed characters are a-z, 0-9, -, and _. Must start with a letter or _"
)
return arg_value
return name_type
user_name_type = group_or_user_name_type("user")
group_name_type = group_or_user_name_type("group")

View File

@@ -1,55 +0,0 @@
import argparse
import logging
import subprocess
from pathlib import Path
from tempfile import TemporaryDirectory
from ..dirs import specific_flake_dir
from ..machines.machines import Machine
from ..nix import nix_shell
log = logging.getLogger(__name__)
def upload_secrets(machine: Machine) -> None:
with TemporaryDirectory() as tempdir_:
tempdir = Path(tempdir_)
should_upload = machine.run_upload_secrets(tempdir)
if should_upload:
host = machine.host
ssh_cmd = host.ssh_cmd()
subprocess.run(
nix_shell(
["rsync"],
[
"rsync",
"-e",
" ".join(["ssh"] + ssh_cmd[2:]),
"-az",
"--delete",
f"{str(tempdir)}/",
f"{host.user}@{host.host}:{machine.secrets_upload_directory}/",
],
),
check=True,
)
def upload_command(args: argparse.Namespace) -> None:
machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
upload_secrets(machine)
def register_upload_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"machine",
help="The machine to upload secrets to",
)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=upload_command)

View File

@@ -1,145 +0,0 @@
import argparse
from ..flakes.types import FlakeName
from . import secrets
from .folders import list_objects, remove_object, sops_users_folder
from .sops import read_key, write_key
from .types import (
VALID_USER_NAME,
public_or_private_age_key_type,
secret_name_type,
user_name_type,
)
def add_user(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
write_key(sops_users_folder(flake_name) / name, key, force)
def remove_user(flake_name: FlakeName, name: str) -> None:
remove_object(sops_users_folder(flake_name), name)
def get_user(flake_name: FlakeName, name: str) -> str:
return read_key(sops_users_folder(flake_name) / name)
def list_users(flake_name: FlakeName) -> list[str]:
path = sops_users_folder(flake_name)
def validate(name: str) -> bool:
return (
VALID_USER_NAME.match(name) is not None
and (path / name / "key.json").exists()
)
return list_objects(path, validate)
def add_secret(flake_name: FlakeName, user: str, secret: str) -> None:
secrets.allow_member(
secrets.users_folder(flake_name, secret), sops_users_folder(flake_name), user
)
def remove_secret(flake_name: FlakeName, user: str, secret: str) -> None:
secrets.disallow_member(secrets.users_folder(flake_name, secret), user)
def list_command(args: argparse.Namespace) -> None:
lst = list_users(args.flake)
if len(lst) > 0:
print("\n".join(lst))
def add_command(args: argparse.Namespace) -> None:
add_user(args.flake, args.user, args.key, args.force)
def get_command(args: argparse.Namespace) -> None:
print(get_user(args.flake, args.user))
def remove_command(args: argparse.Namespace) -> None:
remove_user(args.flake, args.user)
def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.flake, args.user, args.secret)
def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.flake, args.user, args.secret)
def register_users_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="the command to run",
help="the command to run",
required=True,
)
list_parser = subparser.add_parser("list", help="list users")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command)
add_parser = subparser.add_parser("add", help="add a user")
add_parser.add_argument(
"-f", "--force", help="overwrite existing user", action="store_true"
)
add_parser.add_argument("user", help="the name of the user", type=user_name_type)
add_parser.add_argument(
"key",
help="public key or private key of the user",
type=public_or_private_age_key_type,
)
add_parser.set_defaults(func=add_command)
add_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
get_parser = subparser.add_parser("get", help="get a user public key")
get_parser.add_argument("user", help="the name of the user", type=user_name_type)
get_parser.set_defaults(func=get_command)
get_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_parser = subparser.add_parser("remove", help="remove a user")
remove_parser.add_argument("user", help="the name of the user", type=user_name_type)
remove_parser.set_defaults(func=remove_command)
remove_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser = subparser.add_parser(
"add-secret", help="allow a user to access a secret"
)
add_secret_parser.add_argument(
"user", help="the name of the group", type=user_name_type
)
add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
add_secret_parser.set_defaults(func=add_secret_command)
remove_secret_parser = subparser.add_parser(
"remove-secret", help="remove a user's access to a secret"
)
remove_secret_parser.add_argument(
"user", help="the name of the group", type=user_name_type
)
remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type
)
remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@@ -1,863 +0,0 @@
# Adapted from https://github.com/numtide/deploykit
import fcntl
import logging
import math
import os
import select
import shlex
import subprocess
import sys
import time
from contextlib import ExitStack, contextmanager
from enum import Enum
from pathlib import Path
from shlex import quote
from threading import Thread
from typing import (
IO,
Any,
Callable,
Dict,
Generic,
Iterator,
List,
Literal,
Optional,
Tuple,
TypeVar,
Union,
overload,
)
# https://no-color.org
DISABLE_COLOR = not sys.stderr.isatty() or os.environ.get("NO_COLOR", "") != ""
def ansi_color(color: int) -> str:
return f"\x1b[{color}m"
class CommandFormatter(logging.Formatter):
"""
print errors in red and warnings in yellow
"""
def __init__(self) -> None:
super().__init__(
"%(prefix_color)s[%(command_prefix)s]%(color_reset)s %(color)s%(message)s%(color_reset)s"
)
self.hostnames: List[str] = []
self.hostname_color_offset = 1 # first host shouldn't get agressive red
def format(self, record: logging.LogRecord) -> str:
colorcode = 0
if record.levelno == logging.ERROR:
colorcode = 31 # red
if record.levelno == logging.WARN:
colorcode = 33 # yellow
color, prefix_color, color_reset = "", "", ""
if not DISABLE_COLOR:
command_prefix = getattr(record, "command_prefix", "")
color = ansi_color(colorcode)
prefix_color = ansi_color(self.hostname_colorcode(command_prefix))
color_reset = "\x1b[0m"
setattr(record, "color", color)
setattr(record, "prefix_color", prefix_color)
setattr(record, "color_reset", color_reset)
return super().format(record)
def hostname_colorcode(self, hostname: str) -> int:
try:
index = self.hostnames.index(hostname)
except ValueError:
self.hostnames += [hostname]
index = self.hostnames.index(hostname)
return 31 + (index + self.hostname_color_offset) % 7
def setup_loggers() -> Tuple[logging.Logger, logging.Logger]:
# If we use the default logger here (logging.error etc) or a logger called
# "deploykit", then cmdlog messages are also posted on the default logger.
# To avoid this message duplication, we set up a main and command logger
# and use a "deploykit" main logger.
kitlog = logging.getLogger("deploykit.main")
kitlog.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(logging.Formatter())
kitlog.addHandler(ch)
# use specific logger for command outputs
cmdlog = logging.getLogger("deploykit.command")
cmdlog.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(CommandFormatter())
cmdlog.addHandler(ch)
return (kitlog, cmdlog)
# loggers for: general deploykit, command output
kitlog, cmdlog = setup_loggers()
info = kitlog.info
warn = kitlog.warning
error = kitlog.error
@contextmanager
def _pipe() -> Iterator[Tuple[IO[str], IO[str]]]:
(pipe_r, pipe_w) = os.pipe()
read_end = os.fdopen(pipe_r, "r")
write_end = os.fdopen(pipe_w, "w")
try:
fl = fcntl.fcntl(read_end, fcntl.F_GETFL)
fcntl.fcntl(read_end, fcntl.F_SETFL, fl | os.O_NONBLOCK)
yield (read_end, write_end)
finally:
read_end.close()
write_end.close()
FILE = Union[None, int]
# Seconds until a message is printed when _run produces no output.
NO_OUTPUT_TIMEOUT = 20
class HostKeyCheck(Enum):
# Strictly check ssh host keys, prompt for unknown ones
STRICT = 0
# Trust on ssh keys on first use
TOFU = 1
# Do not check ssh host keys
NONE = 2
class Host:
def __init__(
self,
host: str,
user: Optional[str] = None,
port: Optional[int] = None,
key: Optional[str] = None,
forward_agent: bool = False,
command_prefix: Optional[str] = None,
host_key_check: HostKeyCheck = HostKeyCheck.STRICT,
meta: Dict[str, Any] = {},
verbose_ssh: bool = False,
ssh_options: dict[str, str] = {},
) -> None:
"""
Creates a Host
@host the hostname to connect to via ssh
@port the port to connect to via ssh
@forward_agent: wheter to forward ssh agent
@command_prefix: string to prefix each line of the command output with, defaults to host
@host_key_check: wether to check ssh host keys
@verbose_ssh: Enables verbose logging on ssh connections
@meta: meta attributes associated with the host. Those can be accessed in custom functions passed to `run_function`
"""
self.host = host
self.user = user
self.port = port
self.key = key
if command_prefix:
self.command_prefix = command_prefix
else:
self.command_prefix = host
self.forward_agent = forward_agent
self.host_key_check = host_key_check
self.meta = meta
self.verbose_ssh = verbose_ssh
self.ssh_options = ssh_options
def _prefix_output(
self,
displayed_cmd: str,
print_std_fd: Optional[IO[str]],
print_err_fd: Optional[IO[str]],
stdout: Optional[IO[str]],
stderr: Optional[IO[str]],
timeout: float = math.inf,
) -> Tuple[str, str]:
rlist = []
if print_std_fd is not None:
rlist.append(print_std_fd)
if print_err_fd is not None:
rlist.append(print_err_fd)
if stdout is not None:
rlist.append(stdout)
if stderr is not None:
rlist.append(stderr)
print_std_buf = ""
print_err_buf = ""
stdout_buf = ""
stderr_buf = ""
start = time.time()
last_output = time.time()
while len(rlist) != 0:
r, _, _ = select.select(rlist, [], [], min(timeout, NO_OUTPUT_TIMEOUT))
def print_from(
print_fd: IO[str], print_buf: str, is_err: bool = False
) -> Tuple[float, str]:
read = os.read(print_fd.fileno(), 4096)
if len(read) == 0:
rlist.remove(print_fd)
print_buf += read.decode("utf-8")
if (read == b"" and len(print_buf) != 0) or "\n" in print_buf:
# print and empty the print_buf, if the stream is draining,
# but there is still something in the buffer or on newline.
lines = print_buf.rstrip("\n").split("\n")
for line in lines:
if not is_err:
cmdlog.info(
line, extra=dict(command_prefix=self.command_prefix)
)
pass
else:
cmdlog.error(
line, extra=dict(command_prefix=self.command_prefix)
)
print_buf = ""
last_output = time.time()
return (last_output, print_buf)
if print_std_fd in r and print_std_fd is not None:
(last_output, print_std_buf) = print_from(
print_std_fd, print_std_buf, is_err=False
)
if print_err_fd in r and print_err_fd is not None:
(last_output, print_err_buf) = print_from(
print_err_fd, print_err_buf, is_err=True
)
now = time.time()
elapsed = now - start
if now - last_output > NO_OUTPUT_TIMEOUT:
elapsed_msg = time.strftime("%H:%M:%S", time.gmtime(elapsed))
cmdlog.warn(
f"still waiting for '{displayed_cmd}' to finish... ({elapsed_msg} elapsed)",
extra=dict(command_prefix=self.command_prefix),
)
def handle_fd(fd: Optional[IO[Any]]) -> str:
if fd and fd in r:
read = os.read(fd.fileno(), 4096)
if len(read) == 0:
rlist.remove(fd)
else:
return read.decode("utf-8")
return ""
stdout_buf += handle_fd(stdout)
stderr_buf += handle_fd(stderr)
if now - last_output >= timeout:
break
return stdout_buf, stderr_buf
def _run(
self,
cmd: List[str],
displayed_cmd: str,
shell: bool,
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
timeout: float = math.inf,
) -> subprocess.CompletedProcess[str]:
with ExitStack() as stack:
read_std_fd, write_std_fd = (None, None)
read_err_fd, write_err_fd = (None, None)
if stdout is None or stderr is None:
read_std_fd, write_std_fd = stack.enter_context(_pipe())
read_err_fd, write_err_fd = stack.enter_context(_pipe())
if stdout is None:
stdout_read = None
stdout_write = write_std_fd
elif stdout == subprocess.PIPE:
stdout_read, stdout_write = stack.enter_context(_pipe())
else:
raise Exception(f"unsupported value for stdout parameter: {stdout}")
if stderr is None:
stderr_read = None
stderr_write = write_err_fd
elif stderr == subprocess.PIPE:
stderr_read, stderr_write = stack.enter_context(_pipe())
else:
raise Exception(f"unsupported value for stderr parameter: {stderr}")
env = os.environ.copy()
env.update(extra_env)
with subprocess.Popen(
cmd,
text=True,
shell=shell,
stdout=stdout_write,
stderr=stderr_write,
env=env,
cwd=cwd,
) as p:
if write_std_fd is not None:
write_std_fd.close()
if write_err_fd is not None:
write_err_fd.close()
if stdout == subprocess.PIPE:
assert stdout_write is not None
stdout_write.close()
if stderr == subprocess.PIPE:
assert stderr_write is not None
stderr_write.close()
start = time.time()
stdout_data, stderr_data = self._prefix_output(
displayed_cmd,
read_std_fd,
read_err_fd,
stdout_read,
stderr_read,
timeout,
)
try:
ret = p.wait(timeout=max(0, timeout - (time.time() - start)))
except subprocess.TimeoutExpired:
p.kill()
raise
if ret != 0:
if check:
raise subprocess.CalledProcessError(
ret, cmd=cmd, output=stdout_data, stderr=stderr_data
)
else:
cmdlog.warning(
f"[Command failed: {ret}] {displayed_cmd}",
extra=dict(command_prefix=self.command_prefix),
)
return subprocess.CompletedProcess(
cmd, ret, stdout=stdout_data, stderr=stderr_data
)
raise RuntimeError("unreachable")
def run_local(
self,
cmd: Union[str, List[str]],
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
timeout: float = math.inf,
) -> subprocess.CompletedProcess[str]:
"""
Command to run locally for the host
@cmd the commmand to run
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocess.PIPE
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
@extra_env environment variables to override whe running the command
@cwd current working directory to run the process in
@timeout: Timeout in seconds for the command to complete
@return subprocess.CompletedProcess result of the command
"""
shell = False
if isinstance(cmd, str):
cmd = [cmd]
shell = True
displayed_cmd = " ".join(cmd)
cmdlog.info(
f"$ {displayed_cmd}", extra=dict(command_prefix=self.command_prefix)
)
return self._run(
cmd,
displayed_cmd,
shell=shell,
stdout=stdout,
stderr=stderr,
extra_env=extra_env,
cwd=cwd,
check=check,
timeout=timeout,
)
def run(
self,
cmd: Union[str, List[str]],
stdout: FILE = None,
stderr: FILE = None,
become_root: bool = False,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
verbose_ssh: bool = False,
timeout: float = math.inf,
) -> subprocess.CompletedProcess[str]:
"""
Command to run on the host via ssh
@cmd the commmand to run
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
@become_root if the ssh_user is not root than sudo is prepended
@extra_env environment variables to override whe running the command
@cwd current working directory to run the process in
@verbose_ssh: Enables verbose logging on ssh connections
@timeout: Timeout in seconds for the command to complete
@return subprocess.CompletedProcess result of the ssh command
"""
sudo = ""
if become_root and self.user != "root":
sudo = "sudo -- "
vars = []
for k, v in extra_env.items():
vars.append(f"{shlex.quote(k)}={shlex.quote(v)}")
displayed_cmd = ""
export_cmd = ""
if vars:
export_cmd = f"export {' '.join(vars)}; "
displayed_cmd += export_cmd
if isinstance(cmd, list):
displayed_cmd += " ".join(cmd)
else:
displayed_cmd += cmd
cmdlog.info(
f"$ {displayed_cmd}", extra=dict(command_prefix=self.command_prefix)
)
bash_cmd = export_cmd
bash_args = []
if isinstance(cmd, list):
bash_cmd += 'exec "$@"'
bash_args += cmd
else:
bash_cmd += cmd
# FIXME we assume bash to be present here? Should be documented...
ssh_cmd = self.ssh_cmd(verbose_ssh=verbose_ssh) + [
"--",
f"{sudo}bash -c {quote(bash_cmd)} -- {' '.join(map(quote, bash_args))}",
]
return self._run(
ssh_cmd,
displayed_cmd,
shell=False,
stdout=stdout,
stderr=stderr,
cwd=cwd,
check=check,
timeout=timeout,
)
def ssh_cmd(
self,
verbose_ssh: bool = False,
) -> List:
if self.user is not None:
ssh_target = f"{self.user}@{self.host}"
else:
ssh_target = self.host
ssh_opts = ["-A"] if self.forward_agent else []
for k, v in self.ssh_options.items():
ssh_opts.extend(["-o", f"{k}={shlex.quote(v)}"])
if self.port:
ssh_opts.extend(["-p", str(self.port)])
if self.key:
ssh_opts.extend(["-i", self.key])
if self.host_key_check != HostKeyCheck.STRICT:
ssh_opts.extend(["-o", "StrictHostKeyChecking=no"])
if self.host_key_check == HostKeyCheck.NONE:
ssh_opts.extend(["-o", "UserKnownHostsFile=/dev/null"])
if verbose_ssh or self.verbose_ssh:
ssh_opts.extend(["-v"])
return ["ssh", ssh_target] + ssh_opts
T = TypeVar("T")
class HostResult(Generic[T]):
def __init__(self, host: Host, result: Union[T, Exception]) -> None:
self.host = host
self._result = result
@property
def error(self) -> Optional[Exception]:
"""
Returns an error if the command failed
"""
if isinstance(self._result, Exception):
return self._result
return None
@property
def result(self) -> T:
"""
Unwrap the result
"""
if isinstance(self._result, Exception):
raise self._result
return self._result
Results = List[HostResult[subprocess.CompletedProcess[str]]]
def _worker(
func: Callable[[Host], T],
host: Host,
results: List[HostResult[T]],
idx: int,
) -> None:
try:
results[idx] = HostResult(host, func(host))
except Exception as e:
kitlog.exception(e)
results[idx] = HostResult(host, e)
class HostGroup:
def __init__(self, hosts: List[Host]) -> None:
self.hosts = hosts
def _run_local(
self,
cmd: Union[str, List[str]],
host: Host,
results: Results,
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
verbose_ssh: bool = False,
timeout: float = math.inf,
) -> None:
try:
proc = host.run_local(
cmd,
stdout=stdout,
stderr=stderr,
extra_env=extra_env,
cwd=cwd,
check=check,
timeout=timeout,
)
results.append(HostResult(host, proc))
except Exception as e:
kitlog.exception(e)
results.append(HostResult(host, e))
def _run_remote(
self,
cmd: Union[str, List[str]],
host: Host,
results: Results,
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
verbose_ssh: bool = False,
timeout: float = math.inf,
) -> None:
try:
proc = host.run(
cmd,
stdout=stdout,
stderr=stderr,
extra_env=extra_env,
cwd=cwd,
check=check,
verbose_ssh=verbose_ssh,
timeout=timeout,
)
results.append(HostResult(host, proc))
except Exception as e:
kitlog.exception(e)
results.append(HostResult(host, e))
def _reraise_errors(self, results: List[HostResult[Any]]) -> None:
errors = 0
for result in results:
e = result.error
if e:
cmdlog.error(
f"failed with: {e}",
extra=dict(command_prefix=result.host.command_prefix),
)
errors += 1
if errors > 0:
raise Exception(
f"{errors} hosts failed with an error. Check the logs above"
)
def _run(
self,
cmd: Union[str, List[str]],
local: bool = False,
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
verbose_ssh: bool = False,
timeout: float = math.inf,
) -> Results:
results: Results = []
threads = []
for host in self.hosts:
fn = self._run_local if local else self._run_remote
thread = Thread(
target=fn,
kwargs=dict(
results=results,
cmd=cmd,
host=host,
stdout=stdout,
stderr=stderr,
extra_env=extra_env,
cwd=cwd,
check=check,
verbose_ssh=verbose_ssh,
timeout=timeout,
),
)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
if check:
self._reraise_errors(results)
return results
def run(
self,
cmd: Union[str, List[str]],
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
verbose_ssh: bool = False,
timeout: float = math.inf,
) -> Results:
"""
Command to run on the remote host via ssh
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
@cwd current working directory to run the process in
@verbose_ssh: Enables verbose logging on ssh connections
@timeout: Timeout in seconds for the command to complete
@return a lists of tuples containing Host and the result of the command for this Host
"""
return self._run(
cmd,
stdout=stdout,
stderr=stderr,
extra_env=extra_env,
cwd=cwd,
check=check,
verbose_ssh=verbose_ssh,
timeout=timeout,
)
def run_local(
self,
cmd: Union[str, List[str]],
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
timeout: float = math.inf,
) -> Results:
"""
Command to run locally for each host in the group in parallel
@cmd the commmand to run
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
@cwd current working directory to run the process in
@extra_env environment variables to override whe running the command
@timeout: Timeout in seconds for the command to complete
@return a lists of tuples containing Host and the result of the command for this Host
"""
return self._run(
cmd,
local=True,
stdout=stdout,
stderr=stderr,
extra_env=extra_env,
cwd=cwd,
check=check,
timeout=timeout,
)
def run_function(
self, func: Callable[[Host], T], check: bool = True
) -> List[HostResult[T]]:
"""
Function to run for each host in the group in parallel
@func the function to call
"""
threads = []
results: List[HostResult[T]] = [
HostResult(h, Exception(f"No result set for thread {i}"))
for (i, h) in enumerate(self.hosts)
]
for i, host in enumerate(self.hosts):
thread = Thread(
target=_worker,
args=(func, host, results, i),
)
threads.append(thread)
for thread in threads:
thread.start()
for thread in threads:
thread.join()
if check:
self._reraise_errors(results)
return results
def filter(self, pred: Callable[[Host], bool]) -> "HostGroup":
"""Return a new Group with the results filtered by the predicate"""
return HostGroup(list(filter(pred, self.hosts)))
def parse_deployment_address(
machine_name: str, host: str, meta: dict[str, Any] = {}
) -> Host:
parts = host.split("@")
user: Optional[str] = None
if len(parts) > 1:
user = parts[0]
hostname = parts[1]
else:
hostname = parts[0]
maybe_options = hostname.split("?")
options: Dict[str, str] = {}
if len(maybe_options) > 1:
hostname = maybe_options[0]
for option in maybe_options[1].split("&"):
k, v = option.split("=")
options[k] = v
maybe_port = hostname.split(":")
port = None
if len(maybe_port) > 1:
hostname = maybe_port[0]
port = int(maybe_port[1])
meta = meta.copy()
meta["flake_attr"] = machine_name
return Host(
hostname,
user=user,
port=port,
command_prefix=machine_name,
meta=meta,
ssh_options=options,
)
@overload
def run(
cmd: Union[List[str], str],
text: Literal[True] = ...,
stdout: FILE = ...,
stderr: FILE = ...,
extra_env: Dict[str, str] = ...,
cwd: Union[None, str, Path] = ...,
check: bool = ...,
) -> subprocess.CompletedProcess[str]:
...
@overload
def run(
cmd: Union[List[str], str],
text: Literal[False],
stdout: FILE = ...,
stderr: FILE = ...,
extra_env: Dict[str, str] = ...,
cwd: Union[None, str, Path] = ...,
check: bool = ...,
) -> subprocess.CompletedProcess[bytes]:
...
def run(
cmd: Union[List[str], str],
text: bool = True,
stdout: FILE = None,
stderr: FILE = None,
extra_env: Dict[str, str] = {},
cwd: Union[None, str, Path] = None,
check: bool = True,
) -> subprocess.CompletedProcess[Any]:
"""
Run command locally
@cmd if this parameter is a string the command is interpreted as a shell command,
otherwise if it is a list, than the first list element is the command
and the remaining list elements are passed as arguments to the
command.
@text when true, file objects for stdout and stderr are opened in text mode.
@stdout if not None stdout of the command will be redirected to this file i.e. stdout=subprocss.PIPE
@stderr if not None stderr of the command will be redirected to this file i.e. stderr=subprocess.PIPE
@extra_env environment variables to override whe running the command
@cwd current working directory to run the process in
@check If check is true, and the process exits with a non-zero exit code, a
CalledProcessError exception will be raised. Attributes of that exception
hold the arguments, the exit code, and stdout and stderr if they were
captured.
"""
if isinstance(cmd, list):
info("$ " + " ".join(cmd))
else:
info(f"$ {cmd}")
env = os.environ.copy()
env.update(extra_env)
return subprocess.run(
cmd,
stdout=stdout,
stderr=stderr,
env=env,
cwd=cwd,
check=check,
shell=not isinstance(cmd, list),
text=text,
)

View File

@@ -1,80 +0,0 @@
import argparse
import json
import subprocess
from typing import Optional
from ..nix import nix_shell
def ssh(
host: str,
user: str = "root",
password: Optional[str] = None,
ssh_args: list[str] = [],
) -> None:
packages = ["tor", "openssh"]
password_args = []
if password:
packages.append("sshpass")
password_args = [
"sshpass",
"-p",
password,
]
_ssh_args = ssh_args + [
"ssh",
"-o",
"UserKnownHostsFile=/dev/null",
"-o",
"StrictHostKeyChecking=no",
f"{user}@{host}",
]
cmd = nix_shell(packages, ["torify"] + password_args + _ssh_args)
subprocess.run(cmd)
def qrcode_scan(picture_file: str) -> str:
return (
subprocess.run(
nix_shell(
["zbar"],
[
"zbarimg",
"--quiet",
"--raw",
picture_file,
],
),
stdout=subprocess.PIPE,
check=True,
)
.stdout.decode()
.strip()
)
def main(args: argparse.Namespace) -> None:
if args.json:
with open(args.json) as file:
ssh_data = json.load(file)
ssh(host=ssh_data["address"], password=ssh_data["password"])
elif args.png:
ssh_data = json.loads(qrcode_scan(args.png))
ssh(host=ssh_data["address"], password=ssh_data["password"])
def register_parser(parser: argparse.ArgumentParser) -> None:
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
"-j",
"--json",
help="specify the json file for ssh data (generated by starting the clan installer)",
)
group.add_argument(
"-P",
"--png",
help="specify the json file for ssh data as the qrcode image (generated by starting the clan installer)",
)
# TODO pass all args we don't parse into ssh_args, currently it fails if arg starts with -
parser.add_argument("ssh_args", nargs="*", default=[])
parser.set_defaults(func=main)

View File

@@ -12,6 +12,7 @@ from pathlib import Path
from typing import Any, Iterator, Optional, Type, TypeVar
from uuid import UUID, uuid4
from .custom_logger import ThreadFormatter, get_caller
from .errors import ClanError
@@ -38,7 +39,8 @@ class Command:
cwd: Optional[Path] = None,
) -> None:
self.running = True
self.log.debug(f"Running command: {shlex.join(cmd)}")
self.log.debug(f"Command: {shlex.join(cmd)}")
self.log.debug(f"Caller: {get_caller()}")
cwd_res = None
if cwd is not None:
@@ -68,10 +70,10 @@ class Command:
try:
for line in fd:
if fd == self.p.stderr:
print(f"[{cmd[0]}] stderr: {line}")
self.log.debug(f"[{cmd[0]}] stderr: {line}")
self.stderr.append(line)
else:
print(f"[{cmd[0]}] stdout: {line}")
self.log.debug(f"[{cmd[0]}] stdout: {line}")
self.stdout.append(line)
self._output.put(line)
except BlockingIOError:
@@ -80,8 +82,6 @@ class Command:
if self.p.returncode != 0:
raise ClanError(f"Failed to run command: {shlex.join(cmd)}")
self.log.debug("Successfully ran command")
class TaskStatus(str, Enum):
NOTSTARTED = "NOTSTARTED"
@@ -94,7 +94,13 @@ class BaseTask:
def __init__(self, uuid: UUID, num_cmds: int) -> None:
# constructor
self.uuid: UUID = uuid
self.log = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(ThreadFormatter())
logger = logging.getLogger(__name__)
logger.addHandler(handler)
self.log = logger
self.log = logger
self.procs: list[Command] = []
self.status = TaskStatus.NOTSTARTED
self.logs_lock = threading.Lock()
@@ -108,6 +114,10 @@ class BaseTask:
self.status = TaskStatus.RUNNING
try:
self.run()
# TODO: We need to check, if too many commands have been initialized,
# but not run. This would deadlock the log_lines() function.
# Idea: Run next(cmds) and check if it raises StopIteration if not,
# we have too many commands
except Exception as e:
# FIXME: fix exception handling here
traceback.print_exception(*sys.exc_info())

View File

@@ -1,25 +0,0 @@
import sys
from typing import IO, Any, Callable
def is_interactive() -> bool:
"""Returns true if the current process is interactive"""
return sys.stdin.isatty() and sys.stdout.isatty()
def color_text(code: int, file: IO[Any] = sys.stdout) -> Callable[[str], None]:
"""
Print with color if stderr is a tty
"""
def wrapper(text: str) -> None:
if file.isatty():
print(f"\x1b[{code}m{text}\x1b[0m", file=file)
else:
print(text, file=file)
return wrapper
warn = color_text(91, file=sys.stderr)
info = color_text(92, file=sys.stderr)

View File

@@ -0,0 +1,23 @@
import logging
from pathlib import Path
from typing import NewType
log = logging.getLogger(__name__)
FlakeName = NewType("FlakeName", str)
def validate_path(base_dir: Path, value: Path) -> Path:
user_path = (base_dir / value).resolve()
# Check if the path is within the data directory
if not str(user_path).startswith(str(base_dir)):
if not str(user_path).startswith("/tmp/pytest"):
raise ValueError(
f"Destination out of bounds. Expected {user_path} to start with {base_dir}"
)
else:
log.warning(
f"Detected pytest tmpdir. Skipping path validation for {user_path}"
)
return user_path

View File

@@ -1,21 +0,0 @@
import argparse
from .create import register_create_parser
from .inspect import register_inspect_parser
def register_parser(parser: argparse.ArgumentParser) -> None:
subparser = parser.add_subparsers(
title="command",
description="command to execute",
help="the command to execute",
required=True,
)
inspect_parser = subparser.add_parser(
"inspect", help="inspect the vm configuration"
)
register_inspect_parser(inspect_parser)
create_parser = subparser.add_parser("create", help="create a VM from a machine")
register_create_parser(create_parser)

View File

@@ -1,165 +0,0 @@
import argparse
import asyncio
import json
import os
import shlex
import sys
import tempfile
from pathlib import Path
from typing import Iterator
from uuid import UUID
from ..dirs import specific_flake_dir
from ..nix import nix_build, nix_config, nix_shell
from ..task_manager import BaseTask, Command, create_task
from .inspect import VmConfig, inspect_vm
class BuildVmTask(BaseTask):
def __init__(self, uuid: UUID, vm: VmConfig) -> None:
super().__init__(uuid, num_cmds=6)
self.vm = vm
def get_vm_create_info(self, cmds: Iterator[Command]) -> dict:
config = nix_config()
system = config["system"]
clan_dir = self.vm.flake_url
machine = self.vm.flake_attr
cmd = next(cmds)
cmd.run(
nix_build(
[
f'{clan_dir}#clanInternals.machines."{system}"."{machine}".config.system.clan.vm.create'
]
)
)
vm_json = "".join(cmd.stdout)
self.log.debug(f"VM JSON path: {vm_json}")
with open(vm_json.strip()) as f:
return json.load(f)
def run(self) -> None:
cmds = self.commands()
machine = self.vm.flake_attr
self.log.debug(f"Creating VM for {machine}")
# TODO: We should get this from the vm argument
vm_config = self.get_vm_create_info(cmds)
with tempfile.TemporaryDirectory() as tmpdir_:
tmpdir = Path(tmpdir_)
xchg_dir = tmpdir / "xchg"
xchg_dir.mkdir()
secrets_dir = tmpdir / "secrets"
secrets_dir.mkdir()
disk_img = f"{tmpdir_}/disk.img"
env = os.environ.copy()
env["CLAN_DIR"] = str(self.vm.flake_url)
env["PYTHONPATH"] = str(
":".join(sys.path)
) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir)
cmd = next(cmds)
if Path(self.vm.flake_url).is_dir():
cmd.run(
[vm_config["generateSecrets"]],
env=env,
)
else:
cmd.run(["echo", "won't generate secrets for non local clan"])
cmd = next(cmds)
cmd.run(
[vm_config["uploadSecrets"]],
env=env,
)
cmd = next(cmds)
cmd.run(
nix_shell(
["qemu"],
[
"qemu-img",
"create",
"-f",
"raw",
disk_img,
"1024M",
],
)
)
cmd = next(cmds)
cmd.run(
nix_shell(
["e2fsprogs"],
[
"mkfs.ext4",
"-L",
"nixos",
disk_img,
],
)
)
cmd = next(cmds)
cmdline = [
(Path(vm_config["toplevel"]) / "kernel-params").read_text(),
f'init={vm_config["toplevel"]}/init',
f'regInfo={vm_config["regInfo"]}/registration',
"console=ttyS0,115200n8",
"console=tty0",
]
qemu_command = [
# fmt: off
"qemu-kvm",
"-name", machine,
"-m", f'{vm_config["memorySize"]}M',
"-smp", str(vm_config["cores"]),
"-device", "virtio-rng-pci",
"-net", "nic,netdev=user.0,model=virtio", "-netdev", "user,id=user.0",
"-virtfs", "local,path=/nix/store,security_model=none,mount_tag=nix-store",
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=shared",
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=xchg",
"-virtfs", f"local,path={secrets_dir},security_model=none,mount_tag=secrets",
"-drive", f'cache=writeback,file={disk_img},format=raw,id=drive1,if=none,index=1,werror=report',
"-device", "virtio-blk-pci,bootindex=1,drive=drive1,serial=root",
"-device", "virtio-keyboard",
"-usb",
"-device", "usb-tablet,bus=usb-bus.0",
"-kernel", f'{vm_config["toplevel"]}/kernel',
"-initrd", vm_config["initrd"],
"-append", " ".join(cmdline),
# fmt: on
]
if not self.vm.graphics:
qemu_command.append("-nographic")
print("$ " + shlex.join(qemu_command))
cmd.run(nix_shell(["qemu"], qemu_command))
def create_vm(vm: VmConfig) -> BuildVmTask:
return create_task(BuildVmTask, vm)
def create_command(args: argparse.Namespace) -> None:
clan_dir = specific_flake_dir(args.flake)
vm = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
task = create_vm(vm)
for line in task.log_lines():
print(line, end="")
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=create_command)

View File

@@ -1,50 +0,0 @@
import argparse
import asyncio
import json
from pathlib import Path
from pydantic import AnyUrl, BaseModel
from ..async_cmd import run
from ..dirs import specific_flake_dir
from ..nix import nix_config, nix_eval
class VmConfig(BaseModel):
flake_url: AnyUrl | Path
flake_attr: str
cores: int
memory_size: int
graphics: bool
async def inspect_vm(flake_url: AnyUrl | Path, flake_attr: str) -> VmConfig:
config = nix_config()
system = config["system"]
cmd = nix_eval(
[
f'{flake_url}#clanInternals.machines."{system}"."{flake_attr}".config.system.clan.vm.config'
]
)
out = await run(cmd)
data = json.loads(out.stdout)
return VmConfig(flake_url=flake_url, flake_attr=flake_attr, **data)
def inspect_command(args: argparse.Namespace) -> None:
clan_dir = specific_flake_dir(args.flake)
res = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
print("Cores:", res.cores)
print("Memory size:", res.memory_size)
print("Graphics:", res.graphics)
def register_inspect_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=inspect_command)

View File

@@ -3,28 +3,16 @@ from pathlib import Path
from typing import Any
from pydantic import AnyUrl, BaseModel, validator
from pydantic.tools import parse_obj_as
from ..dirs import clan_data_dir, clan_flakes_dir
from ..flakes.create import DEFAULT_URL
from ..types import validate_path
DEFAULT_URL = parse_obj_as(AnyUrl, "http://localhost:8000")
log = logging.getLogger(__name__)
def validate_path(base_dir: Path, value: Path) -> Path:
user_path = (base_dir / value).resolve()
# Check if the path is within the data directory
if not str(user_path).startswith(str(base_dir)):
if not str(user_path).startswith("/tmp/pytest"):
raise ValueError(
f"Destination out of bounds. Expected {user_path} to start with {base_dir}"
)
else:
log.warning(
f"Detected pytest tmpdir. Skipping path validation for {user_path}"
)
return user_path
class ClanDataPath(BaseModel):
dest: Path

View File

@@ -1,11 +1,6 @@
from enum import Enum
from typing import Dict, List
from pydantic import BaseModel, Field
from ..async_cmd import CmdOut
from ..task_manager import TaskStatus
from ..vms.inspect import VmConfig
from pydantic import BaseModel
class Status(Enum):
@@ -17,54 +12,3 @@ class Status(Enum):
class Machine(BaseModel):
name: str
status: Status
class MachineCreate(BaseModel):
name: str
class MachinesResponse(BaseModel):
machines: list[Machine]
class MachineResponse(BaseModel):
machine: Machine
class ConfigResponse(BaseModel):
config: dict
class SchemaResponse(BaseModel):
schema_: dict = Field(alias="schema")
class VmStatusResponse(BaseModel):
error: str | None
status: TaskStatus
class VmCreateResponse(BaseModel):
uuid: str
class FlakeAttrResponse(BaseModel):
flake_attrs: list[str]
class VmInspectResponse(BaseModel):
config: VmConfig
class FlakeAction(BaseModel):
id: str
uri: str
class FlakeCreateResponse(BaseModel):
cmd_out: Dict[str, CmdOut]
class FlakeResponse(BaseModel):
content: str
actions: List[FlakeAction]

View File

@@ -8,7 +8,7 @@ from fastapi.staticfiles import StaticFiles
from ..errors import ClanError
from .assets import asset_path
from .error_handlers import clan_error_handler
from .routers import flake, health, machines, root, vms
from .routers import health, root
origins = [
"http://localhost:3000",
@@ -26,14 +26,11 @@ def setup_app() -> FastAPI:
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(flake.router)
app.include_router(health.router)
app.include_router(machines.router)
app.include_router(vms.router)
# Needs to be last in register. Because of wildcard route
app.include_router(root.router)
app.add_exception_handler(ClanError, clan_error_handler)
app.mount("/static", StaticFiles(directory=asset_path()), name="static")

View File

@@ -1,90 +0,0 @@
import json
from json.decoder import JSONDecodeError
from pathlib import Path
from typing import Annotated
from fastapi import APIRouter, Body, HTTPException, status
from pydantic import AnyUrl
from clan_cli.webui.api_inputs import (
FlakeCreateInput,
)
from clan_cli.webui.api_outputs import (
FlakeAction,
FlakeAttrResponse,
FlakeCreateResponse,
FlakeResponse,
)
from ...async_cmd import run
from ...flakes import create
from ...nix import nix_command, nix_flake_show
router = APIRouter()
# TODO: Check for directory traversal
async def get_attrs(url: AnyUrl | Path) -> list[str]:
cmd = nix_flake_show(url)
out = await run(cmd)
data: dict[str, dict] = {}
try:
data = json.loads(out.stdout)
except JSONDecodeError:
raise HTTPException(status_code=422, detail="Could not load flake.")
nixos_configs = data.get("nixosConfigurations", {})
flake_attrs = list(nixos_configs.keys())
if not flake_attrs:
raise HTTPException(
status_code=422, detail="No entry or no attribute: nixosConfigurations"
)
return flake_attrs
# TODO: Check for directory traversal
@router.get("/api/flake/attrs")
async def inspect_flake_attrs(url: AnyUrl | Path) -> FlakeAttrResponse:
return FlakeAttrResponse(flake_attrs=await get_attrs(url))
# TODO: Check for directory traversal
@router.get("/api/flake")
async def inspect_flake(
url: AnyUrl | Path,
) -> FlakeResponse:
actions = []
# Extract the flake from the given URL
# We do this by running 'nix flake prefetch {url} --json'
cmd = nix_command(["flake", "prefetch", str(url), "--json", "--refresh"])
out = await run(cmd)
data: dict[str, str] = json.loads(out.stdout)
if data.get("storePath") is None:
raise HTTPException(status_code=500, detail="Could not load flake")
content: str
with open(Path(data.get("storePath", "")) / Path("flake.nix")) as f:
content = f.read()
# TODO: Figure out some measure when it is insecure to inspect or create a VM
actions.append(FlakeAction(id="vms/inspect", uri="api/vms/inspect"))
actions.append(FlakeAction(id="vms/create", uri="api/vms/create"))
return FlakeResponse(content=content, actions=actions)
@router.post("/api/flake/create", status_code=status.HTTP_201_CREATED)
async def create_flake(
args: Annotated[FlakeCreateInput, Body()],
) -> FlakeCreateResponse:
if args.dest.exists():
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Flake already exists",
)
cmd_out = await create.create_flake(args.dest, args.url)
return FlakeCreateResponse(cmd_out=cmd_out)

View File

@@ -1,69 +0,0 @@
# Logging setup
import logging
from typing import Annotated
from fastapi import APIRouter, Body
from ...config.machine import (
config_for_machine,
schema_for_machine,
set_config_for_machine,
)
from ...flakes.types import FlakeName
from ...machines.create import create_machine as _create_machine
from ...machines.list import list_machines as _list_machines
from ..api_outputs import (
ConfigResponse,
Machine,
MachineCreate,
MachineResponse,
MachinesResponse,
SchemaResponse,
Status,
)
log = logging.getLogger(__name__)
router = APIRouter()
@router.get("/api/{flake_name}/machines")
async def list_machines(flake_name: FlakeName) -> MachinesResponse:
machines = []
for m in _list_machines(flake_name):
machines.append(Machine(name=m, status=Status.UNKNOWN))
return MachinesResponse(machines=machines)
@router.post("/api/{flake_name}/machines", status_code=201)
async def create_machine(
flake_name: FlakeName, machine: Annotated[MachineCreate, Body()]
) -> MachineResponse:
out = await _create_machine(flake_name, machine.name)
log.debug(out)
return MachineResponse(machine=Machine(name=machine.name, status=Status.UNKNOWN))
@router.get("/api/machines/{name}")
async def get_machine(name: str) -> MachineResponse:
log.error("TODO")
return MachineResponse(machine=Machine(name=name, status=Status.UNKNOWN))
@router.get("/api/{flake_name}/machines/{name}/config")
async def get_machine_config(flake_name: FlakeName, name: str) -> ConfigResponse:
config = config_for_machine(flake_name, name)
return ConfigResponse(config=config)
@router.put("/api/{flake_name}/machines/{name}/config")
async def set_machine_config(
flake_name: FlakeName, name: str, config: Annotated[dict, Body()]
) -> ConfigResponse:
set_config_for_machine(flake_name, name, config)
return ConfigResponse(config=config)
@router.get("/api/{flake_name}/machines/{name}/schema")
async def get_machine_schema(flake_name: FlakeName, name: str) -> SchemaResponse:
schema = schema_for_machine(flake_name, name)
return SchemaResponse(schema=schema)

View File

@@ -1,67 +0,0 @@
import logging
from pathlib import Path
from typing import Annotated, Iterator
from uuid import UUID
from fastapi import APIRouter, Body, status
from fastapi.exceptions import HTTPException
from fastapi.responses import StreamingResponse
from pydantic import AnyUrl
from clan_cli.webui.routers.flake import get_attrs
from ...task_manager import get_task
from ...vms import create, inspect
from ..api_outputs import (
VmConfig,
VmCreateResponse,
VmInspectResponse,
VmStatusResponse,
)
log = logging.getLogger(__name__)
router = APIRouter()
# TODO: Check for directory traversal
@router.post("/api/vms/inspect")
async def inspect_vm(
flake_url: Annotated[AnyUrl | Path, Body()], flake_attr: Annotated[str, Body()]
) -> VmInspectResponse:
config = await inspect.inspect_vm(flake_url, flake_attr)
return VmInspectResponse(config=config)
@router.get("/api/vms/{uuid}/status")
async def get_vm_status(uuid: UUID) -> VmStatusResponse:
task = get_task(uuid)
log.debug(msg=f"error: {task.error}, task.status: {task.status}")
error = str(task.error) if task.error is not None else None
return VmStatusResponse(status=task.status, error=error)
@router.get("/api/vms/{uuid}/logs")
async def get_vm_logs(uuid: UUID) -> StreamingResponse:
# Generator function that yields log lines as they are available
def stream_logs() -> Iterator[str]:
task = get_task(uuid)
yield from task.log_lines()
return StreamingResponse(
content=stream_logs(),
media_type="text/plain",
)
# TODO: Check for directory traversal
@router.post("/api/vms/create")
async def create_vm(vm: Annotated[VmConfig, Body()]) -> VmCreateResponse:
flake_attrs = await get_attrs(vm.flake_url)
if vm.flake_attr not in flake_attrs:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Provided attribute '{vm.flake_attr}' does not exist.",
)
task = create.create_vm(vm)
return VmCreateResponse(uuid=str(task.uuid))

View File

@@ -12,6 +12,7 @@ from typing import Iterator
# XXX: can we dynamically load this using nix develop?
import uvicorn
from pydantic import AnyUrl, IPvAnyAddress
from pydantic.tools import parse_obj_as
from clan_cli.errors import ClanError
@@ -25,7 +26,7 @@ def open_browser(base_url: AnyUrl, sub_url: str) -> None:
break
except OSError:
time.sleep(i)
url = AnyUrl(f"{base_url}/{sub_url.removeprefix('/')}")
url = parse_obj_as(AnyUrl, f"{base_url}/{sub_url.removeprefix('/')}")
_open_browser(url)

View File

@@ -8,9 +8,11 @@
, openssh
, pytest
, pytest-cov
, pytest-xdist
, pytest-subprocess
, pytest-parallel
, pytest-timeout
, remote-pdb
, ipdb
, python3
, runCommand
, setuptools
@@ -45,8 +47,10 @@ let
pytest
pytest-cov
pytest-subprocess
pytest-parallel
pytest-xdist
pytest-timeout
remote-pdb
ipdb
openssh
git
gnupg
@@ -80,9 +84,7 @@ let
source = runCommand "clan-cli-source" { } ''
cp -r ${./.} $out
chmod -R +w $out
rm $out/clan_cli/config/jsonschema
ln -s ${nixpkgs'} $out/clan_cli/nixpkgs
cp -r ${../../lib/jsonschema} $out/clan_cli/config/jsonschema
ln -s ${ui-assets} $out/clan_cli/webui/assets
'';
nixpkgs' = runCommand "nixpkgs" { nativeBuildInputs = [ nix ]; } ''

View File

@@ -14,9 +14,14 @@ exclude = ["clan_cli.nixpkgs*"]
[tool.setuptools.package-data]
clan_cli = [ "config/jsonschema/*", "webui/assets/**/*"]
[tool.pytest.ini_options]
testpaths = "tests"
faulthandler_timeout = 60
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --workers auto --durations 5"
log_level = "DEBUG"
log_format = "%(levelname)s: %(message)s"
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --durations 5 --color=yes --maxfail=1 --new-first -nauto" # Add --pdb for debugging
norecursedirs = "tests/helpers"
markers = [ "impure" ]
@@ -32,6 +37,10 @@ exclude = "clan_cli.nixpkgs"
module = "argcomplete.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "ipdb.*"
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "jsonschema.*"
ignore_missing_imports = true
@@ -47,7 +56,7 @@ ignore_missing_imports = true
[tool.ruff]
line-length = 88
select = [ "E", "F", "I", "U", "N"]
select = [ "E", "F", "I", "N"]
ignore = [ "E501" ]
[tool.black]

View File

@@ -44,6 +44,7 @@ mkShell {
export PATH="$tmp_path/python/bin:${checkScript}/bin:$PATH"
export PYTHONPATH="$repo_root:$tmp_path/python/${pythonWithDeps.sitePackages}:"
export PYTHONBREAKPOINT=ipdb.set_trace
export XDG_DATA_DIRS="$tmp_path/share''${XDG_DATA_DIRS:+:$XDG_DATA_DIRS}"
export fish_complete_path="$tmp_path/share/fish/vendor_completions.d''${fish_complete_path:+:$fish_complete_path}"
@@ -53,7 +54,5 @@ mkShell {
$tmp_path/share/zsh/site-functions
register-python-argcomplete --shell fish clan > $tmp_path/share/fish/vendor_completions.d/clan.fish
register-python-argcomplete --shell bash clan > $tmp_path/share/bash-completion/completions/clan
./bin/clan machines create example
'';
}

View File

@@ -1,31 +0,0 @@
import pytest
class KeyPair:
def __init__(self, pubkey: str, privkey: str) -> None:
self.pubkey = pubkey
self.privkey = privkey
KEYS = [
KeyPair(
"age1dhwqzkah943xzc34tc3dlmfayyevcmdmxzjezdgdy33euxwf59vsp3vk3c",
"AGE-SECRET-KEY-1KF8E3SR3TTGL6M476SKF7EEMR4H9NF7ZWYSLJUAK8JX276JC7KUSSURKFK",
),
KeyPair(
"age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62",
"AGE-SECRET-KEY-1U5ENXZQAY62NC78Y2WC0SEGRRMAEEKH79EYY5TH4GPFWJKEAY0USZ6X7YQ",
),
KeyPair(
"age1dhuh9xtefhgpr2sjjf7gmp9q2pr37z92rv4wsadxuqdx48989g7qj552qp",
"AGE-SECRET-KEY-169N3FT32VNYQ9WYJMLUSVTMA0TTZGVJF7YZWS8AHTWJ5RR9VGR7QCD8SKF",
),
]
@pytest.fixture
def age_keys() -> list[KeyPair]:
"""
Root directory of the tests
"""
return KEYS

View File

@@ -13,12 +13,8 @@ pytest_plugins = [
"api",
"temporary_dir",
"root",
"age_keys",
"sshd",
"command",
"ports",
"host_group",
"fixtures_flakes",
]

View File

@@ -1,23 +0,0 @@
secret-key: ENC[AES256_GCM,data:gjX4OmCUdd3TlA4p,iv:3yZVpyd6FqkITQY0nU2M1iubmzvkR6PfkK2m/s6nQh8=,tag:Abgp9xkiFFylZIyAlap6Ew==,type:str]
nested:
secret-key: ENC[AES256_GCM,data:iUMgDhhIjwvd7wL4,iv:jiJIrh12dSu/sXX+z9ITVoEMNDMjwIlFBnyv40oN4LE=,tag:G9VmAa66Km1sc7JEhW5AvA==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA0eWdRVjlydXlXOVZFQ3lO
bzU1eG9Iam5Ka29Sdlo0cHJ4b1R6bjdNSzBjCkgwRndCbWZQWHlDU0x1cWRmaGVt
N29lbjR6UjN0L2RhaXEzSG9zQmRsZGsKLS0tIEdsdWgxSmZwU3BWUDVxVWRSSC9M
eVZ6bjgwZnR2TTM5MkRYZWNFSFplQWsKmSzv12/dftL9jx2y35UZUGVK6xWdatE8
BGJiCvMlp0BQNrh2s/+YaEaBa48w8LL79U/XJnEZ+ZUwxmlbSTn6Hg==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2023-08-08T14:27:20Z"
mac: ENC[AES256_GCM,data:iRWWX+L5Q5nKn3fBCLaWoz/mvqGnNnRd93gJmYXDZbRjFoHa9IFJZst5QDIDa1ZRYUe6G0/+lV5SBi+vwRm1pHysJ3c0ZWYjBP+e1jw3jLXxLV5gACsDC8by+6rFUCho0Xgu+Nqu2ehhNenjQQnCvDH5ivWbW70KFT5ynNgR9Tw=,iv:RYnnbLMC/hNfMwWPreMq9uvY0khajwQTZENO/P34ckY=,tag:Xi1PS5vM1c+sRkroHkPn1Q==,type:str]
pgp: []
unencrypted_suffix: _unencrypted
version: 3.7.3

View File

@@ -1,7 +0,0 @@
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
QyNTUxOQAAACDonlRWMYxHTtnOeeiurKA1j26EfVZWeozuqSrtCYScFwAAAJje9J1V3vSd
VQAAAAtzc2gtZWQyNTUxOQAAACDonlRWMYxHTtnOeeiurKA1j26EfVZWeozuqSrtCYScFw
AAAEBxDpEXwhlJB/f6ZJOT9BbSqXeLy9S6qeuc25hXu5kpbuieVFYxjEdO2c556K6soDWP
boR9VlZ6jO6pKu0JhJwXAAAAE2pvZXJnQHR1cmluZ21hY2hpbmUBAg==
-----END OPENSSH PRIVATE KEY-----

View File

@@ -1 +0,0 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOieVFYxjEdO2c556K6soDWPboR9VlZ6jO6pKu0JhJwX joerg@turingmachine

View File

@@ -1,7 +0,0 @@
HostKey $host_key
LogLevel DEBUG3
# In the nix build sandbox we don't get any meaningful PATH after login
MaxStartups 64:30:256
AuthorizedKeysFile $host_key.pub
AcceptEnv REALPATH
PasswordAuthentication no

View File

@@ -1,100 +0,0 @@
import fileinput
import shutil
import tempfile
from pathlib import Path
from typing import Iterator, NamedTuple
import pytest
from root import CLAN_CORE
from clan_cli.dirs import nixpkgs_source
from clan_cli.flakes.types import FlakeName
# substitutes string sin a file.
# This can be used on the flake.nix or default.nix of a machine
def substitute(
file: Path,
clan_core_flake: Path | None = None,
flake: Path = Path(__file__).parent,
) -> None:
sops_key = str(flake.joinpath("sops.key"))
for line in fileinput.input(file, inplace=True):
line = line.replace("__NIXPKGS__", str(nixpkgs_source()))
if clan_core_flake:
line = line.replace("__CLAN_CORE__", str(clan_core_flake))
line = line.replace("__CLAN_SOPS_KEY_PATH__", sops_key)
line = line.replace("__CLAN_SOPS_KEY_DIR__", str(flake))
print(line, end="")
class TestFlake(NamedTuple):
name: FlakeName
path: Path
def create_flake(
monkeypatch: pytest.MonkeyPatch,
flake_name: FlakeName,
clan_core_flake: Path | None = None,
machines: list[str] = [],
remote: bool = False,
) -> Iterator[TestFlake]:
"""
Creates a flake with the given name and machines.
The machine names map to the machines in ./test_machines
"""
template = Path(__file__).parent / flake_name
# copy the template to a new temporary location
with tempfile.TemporaryDirectory() as tmpdir_:
home = Path(tmpdir_)
flake = home / flake_name
shutil.copytree(template, flake)
# lookup the requested machines in ./test_machines and include them
if machines:
(flake / "machines").mkdir(parents=True, exist_ok=True)
for machine_name in machines:
machine_path = Path(__file__).parent / "machines" / machine_name
shutil.copytree(machine_path, flake / "machines" / machine_name)
substitute(flake / "machines" / machine_name / "default.nix", flake)
# in the flake.nix file replace the string __CLAN_URL__ with the the clan flake
# provided by get_test_flake_toplevel
flake_nix = flake / "flake.nix"
# this is where we would install the sops key to, when updating
substitute(flake_nix, clan_core_flake, flake)
if remote:
with tempfile.TemporaryDirectory() as workdir:
monkeypatch.chdir(workdir)
monkeypatch.setenv("HOME", str(home))
yield TestFlake(flake_name, flake)
else:
monkeypatch.chdir(flake)
monkeypatch.setenv("HOME", str(home))
yield TestFlake(flake_name, flake)
@pytest.fixture
def test_flake(monkeypatch: pytest.MonkeyPatch) -> Iterator[TestFlake]:
yield from create_flake(monkeypatch, FlakeName("test_flake"))
@pytest.fixture
def test_flake_with_core(monkeypatch: pytest.MonkeyPatch) -> Iterator[TestFlake]:
if not (CLAN_CORE / "flake.nix").exists():
raise Exception(
"clan-core flake not found. This test requires the clan-core flake to be present"
)
yield from create_flake(monkeypatch, FlakeName("test_flake_with_core"), CLAN_CORE)
@pytest.fixture
def test_flake_with_core_and_pass(
monkeypatch: pytest.MonkeyPatch,
) -> Iterator[TestFlake]:
if not (CLAN_CORE / "flake.nix").exists():
raise Exception(
"clan-core flake not found. This test requires the clan-core flake to be present"
)
yield from create_flake(
monkeypatch, FlakeName("test_flake_with_core_and_pass"), CLAN_CORE
)

View File

@@ -1,6 +1,11 @@
import argparse
import logging
import shlex
from clan_cli import create_parser
from clan_cli.custom_logger import get_caller
log = logging.getLogger(__name__)
class Cli:
@@ -8,6 +13,9 @@ class Cli:
self.parser = create_parser(prog="clan")
def run(self, args: list[str]) -> argparse.Namespace:
cmd = shlex.join(["clan"] + args)
log.debug(f"$ {cmd}")
log.debug(f"Caller {get_caller()}")
parsed = self.parser.parse_args(args)
if hasattr(parsed, "func"):
parsed.func(parsed)

View File

@@ -1,23 +0,0 @@
import os
import pwd
import pytest
from sshd import Sshd
from clan_cli.ssh import Host, HostGroup, HostKeyCheck
@pytest.fixture
def host_group(sshd: Sshd) -> HostGroup:
login = pwd.getpwuid(os.getuid()).pw_name
return HostGroup(
[
Host(
"127.0.0.1",
port=sshd.port,
user=login,
key=sshd.key,
host_key_check=HostKeyCheck.NONE,
)
]
)

View File

@@ -1,20 +0,0 @@
{ lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";
system.stateVersion = lib.version;
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
clanCore.secretsUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
clan.virtualisation.graphics = false;
clan.networking.zerotier.controller.enable = true;
networking.useDHCP = false;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
}

View File

@@ -1,20 +0,0 @@
{ lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";
system.stateVersion = lib.version;
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
clanCore.secretsUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
clan.virtualisation.graphics = false;
clan.networking.zerotier.controller.enable = true;
networking.useDHCP = false;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
}

View File

@@ -1,17 +0,0 @@
{ lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";
system.stateVersion = lib.version;
clan.virtualisation.graphics = false;
networking.useDHCP = false;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
}

View File

@@ -1,136 +0,0 @@
import os
import shutil
import string
import subprocess
import time
from pathlib import Path
from sys import platform
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING, Iterator
import pytest
if TYPE_CHECKING:
from command import Command
from ports import PortFunction
class Sshd:
def __init__(self, port: int, proc: subprocess.Popen[str], key: str) -> None:
self.port = port
self.proc = proc
self.key = key
class SshdConfig:
def __init__(
self, path: Path, login_shell: Path, key: str, preload_lib: Path
) -> None:
self.path = path
self.login_shell = login_shell
self.key = key
self.preload_lib = preload_lib
@pytest.fixture(scope="session")
def sshd_config(test_root: Path) -> Iterator[SshdConfig]:
# FIXME, if any parent of the sshd directory is world-writable than sshd will refuse it.
# we use .direnv instead since it's already in .gitignore
with TemporaryDirectory() as _dir:
dir = Path(_dir)
host_key = test_root / "data" / "ssh_host_ed25519_key"
host_key.chmod(0o600)
template = (test_root / "data" / "sshd_config").read_text()
content = string.Template(template).substitute(dict(host_key=host_key))
config = dir / "sshd_config"
config.write_text(content)
login_shell = dir / "shell"
bash = shutil.which("bash")
path = os.environ["PATH"]
assert bash is not None
login_shell.write_text(
f"""#!{bash}
if [[ -f /etc/profile ]]; then
source /etc/profile
fi
if [[ -n "$REALPATH" ]]; then
export PATH="$REALPATH:${path}"
else
export PATH="${path}"
fi
exec {bash} -l "${{@}}"
"""
)
login_shell.chmod(0o755)
lib_path = None
assert (
platform == "linux"
), "we do not support the ld_preload trick on non-linux just now"
# This enforces a login shell by overriding the login shell of `getpwnam(3)`
lib_path = dir / "libgetpwnam-preload.so"
subprocess.run(
[
os.environ.get("CC", "cc"),
"-shared",
"-o",
lib_path,
str(test_root / "getpwnam-preload.c"),
],
check=True,
)
yield SshdConfig(config, login_shell, str(host_key), lib_path)
@pytest.fixture
def sshd(
sshd_config: SshdConfig,
command: "Command",
unused_tcp_port: "PortFunction",
monkeypatch: pytest.MonkeyPatch,
) -> Iterator[Sshd]:
import subprocess
port = unused_tcp_port()
sshd = shutil.which("sshd")
assert sshd is not None, "no sshd binary found"
env = {}
env = dict(
LD_PRELOAD=str(sshd_config.preload_lib),
LOGIN_SHELL=str(sshd_config.login_shell),
)
proc = command.run(
[sshd, "-f", str(sshd_config.path), "-D", "-p", str(port)], extra_env=env
)
monkeypatch.delenv("SSH_AUTH_SOCK", raising=False)
while True:
print(sshd_config.path)
if (
subprocess.run(
[
"ssh",
"-o",
"StrictHostKeyChecking=no",
"-o",
"UserKnownHostsFile=/dev/null",
"-i",
sshd_config.key,
"localhost",
"-p",
str(port),
"true",
],
).returncode
== 0
):
yield Sshd(port, proc, sshd_config.key)
return
else:
rc = proc.poll()
if rc is not None:
raise Exception(f"sshd processes was terminated with {rc}")
time.sleep(0.1)

View File

@@ -1,3 +1,4 @@
import logging
import os
import tempfile
from pathlib import Path
@@ -5,14 +6,20 @@ from typing import Iterator
import pytest
log = logging.getLogger(__name__)
@pytest.fixture
def temporary_dir() -> Iterator[Path]:
if os.getenv("TEST_KEEP_TEMPORARY_DIR"):
temp_dir = tempfile.mkdtemp(prefix="pytest-")
path = Path(temp_dir)
def temporary_home(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]:
env_dir = os.getenv("TEST_TEMPORARY_DIR")
if env_dir is not None:
path = Path(env_dir).resolve()
log.debug("Temp HOME directory: %s", str(path))
monkeypatch.setenv("HOME", str(path))
yield path
print("=========> Keeping temporary directory: ", path)
else:
log.debug("TEST_TEMPORARY_DIR not set, using TemporaryDirectory")
with tempfile.TemporaryDirectory(prefix="pytest-") as dirpath:
monkeypatch.setenv("HOME", str(dirpath))
log.debug("Temp HOME directory: %s", str(dirpath))
yield Path(dirpath)

View File

@@ -1,10 +0,0 @@
import pytest
from cli import Cli
def test_help(capsys: pytest.CaptureFixture) -> None:
cli = Cli()
with pytest.raises(SystemExit):
cli.run(["--help"])
captured = capsys.readouterr()
assert captured.out.startswith("usage:")

View File

@@ -1,228 +0,0 @@
import json
import tempfile
from pathlib import Path
from typing import Any, Optional
import pytest
from cli import Cli
from clan_cli import config
from clan_cli.config import parsing
from clan_cli.errors import ClanError
example_options = f"{Path(config.__file__).parent}/jsonschema/options.json"
# use pytest.parametrize
@pytest.mark.parametrize(
"args,expected",
[
(["name", "DavHau"], {"name": "DavHau"}),
(
["kernelModules", "foo", "bar", "baz"],
{"kernelModules": ["foo", "bar", "baz"]},
),
(["services.opt", "test"], {"services": {"opt": "test"}}),
(["userIds.DavHau", "42"], {"userIds": {"DavHau": 42}}),
],
)
def test_set_some_option(
args: list[str],
expected: dict[str, Any],
test_flake: Path,
) -> None:
# create temporary file for out_file
with tempfile.NamedTemporaryFile() as out_file:
with open(out_file.name, "w") as f:
json.dump({}, f)
cli = Cli()
cli.run(
[
"config",
"--quiet",
"--options-file",
example_options,
"--settings-file",
out_file.name,
]
+ args
)
json_out = json.loads(open(out_file.name).read())
assert json_out == expected
def test_configure_machine(
test_flake: Path,
temporary_dir: Path,
capsys: pytest.CaptureFixture,
monkeypatch: pytest.MonkeyPatch,
) -> None:
monkeypatch.setenv("HOME", str(temporary_dir))
cli = Cli()
cli.run(["config", "-m", "machine1", "clan.jitsi.enable", "true"])
# clear the output buffer
capsys.readouterr()
# read a option value
cli.run(["config", "-m", "machine1", "clan.jitsi.enable"])
# read the output
assert capsys.readouterr().out == "true\n"
def test_walk_jsonschema_all_types() -> None:
schema = dict(
type="object",
properties=dict(
array=dict(
type="array",
items=dict(
type="string",
),
),
boolean=dict(type="boolean"),
integer=dict(type="integer"),
number=dict(type="number"),
string=dict(type="string"),
),
)
expected = {
"array": list[str],
"boolean": bool,
"integer": int,
"number": float,
"string": str,
}
assert config.parsing.options_types_from_schema(schema) == expected
def test_walk_jsonschema_nested() -> None:
schema = dict(
type="object",
properties=dict(
name=dict(
type="object",
properties=dict(
first=dict(type="string"),
last=dict(type="string"),
),
),
age=dict(type="integer"),
),
)
expected = {
"age": int,
"name.first": str,
"name.last": str,
}
assert config.parsing.options_types_from_schema(schema) == expected
# test walk_jsonschema with dynamic attributes (e.g. "additionalProperties")
def test_walk_jsonschema_dynamic_attrs() -> None:
schema = dict(
type="object",
properties=dict(
age=dict(type="integer"),
users=dict(
type="object",
additionalProperties=dict(type="string"),
),
),
)
expected = {
"age": int,
"users.<name>": str, # <name> is a placeholder for any string
}
assert config.parsing.options_types_from_schema(schema) == expected
def test_type_from_schema_path_simple() -> None:
schema = dict(
type="boolean",
)
assert parsing.type_from_schema_path(schema, []) == bool
def test_type_from_schema_path_nested() -> None:
schema = dict(
type="object",
properties=dict(
name=dict(
type="object",
properties=dict(
first=dict(type="string"),
last=dict(type="string"),
),
),
age=dict(type="integer"),
),
)
assert parsing.type_from_schema_path(schema, ["age"]) == int
assert parsing.type_from_schema_path(schema, ["name", "first"]) == str
def test_type_from_schema_path_dynamic_attrs() -> None:
schema = dict(
type="object",
properties=dict(
age=dict(type="integer"),
users=dict(
type="object",
additionalProperties=dict(type="string"),
),
),
)
assert parsing.type_from_schema_path(schema, ["age"]) == int
assert parsing.type_from_schema_path(schema, ["users", "foo"]) == str
def test_map_type() -> None:
with pytest.raises(ClanError):
config.map_type("foo")
assert config.map_type("string") == str
assert config.map_type("integer") == int
assert config.map_type("boolean") == bool
assert config.map_type("attribute set of string") == dict[str, str]
assert config.map_type("attribute set of integer") == dict[str, int]
assert config.map_type("null or string") == Optional[str]
# test the cast function with simple types
def test_cast() -> None:
assert config.cast(value=["true"], type=bool, opt_description="foo-option") is True
assert (
config.cast(value=["null"], type=Optional[str], opt_description="foo-option")
is None
)
assert (
config.cast(value=["bar"], type=Optional[str], opt_description="foo-option")
== "bar"
)
@pytest.mark.parametrize(
"option,value,options,expected",
[
("foo.bar", ["baz"], {"foo.bar": {"type": "str"}}, ("foo.bar", ["baz"])),
("foo.bar", ["baz"], {"foo": {"type": "attrs"}}, ("foo", {"bar": ["baz"]})),
(
"users.users.my-user.name",
["my-name"],
{"users.users.<name>.name": {"type": "str"}},
("users.users.<name>.name", ["my-name"]),
),
(
"foo.bar.baz.bum",
["val"],
{"foo.<name>.baz": {"type": "attrs"}},
("foo.<name>.baz", {"bum": ["val"]}),
),
(
"userIds.DavHau",
["42"],
{"userIds": {"type": "attrs"}},
("userIds", {"DavHau": ["42"]}),
),
],
)
def test_find_option(option: str, value: list, options: dict, expected: tuple) -> None:
assert config.find_option(option, value, options) == expected

View File

@@ -1,66 +0,0 @@
import json
import subprocess
from pathlib import Path
import pytest
from api import TestClient
from cli import Cli
@pytest.fixture
def cli() -> Cli:
return Cli()
@pytest.mark.impure
def test_create_flake_api(
monkeypatch: pytest.MonkeyPatch, api: TestClient, temporary_dir: Path
) -> None:
flake_dir = temporary_dir / "flake_dir"
flake_dir_str = str(flake_dir.resolve())
response = api.post(
"/api/flake/create",
json=dict(
dest=flake_dir_str,
url="git+https://git.clan.lol/clan/clan-core#new-clan",
),
)
assert response.status_code == 201, f"Failed to create flake {response.text}"
assert (flake_dir / ".clan-flake").exists()
assert (flake_dir / "flake.nix").exists()
@pytest.mark.impure
def test_create_flake(
monkeypatch: pytest.MonkeyPatch,
temporary_dir: Path,
capsys: pytest.CaptureFixture,
cli: Cli,
) -> None:
monkeypatch.chdir(temporary_dir)
flake_dir = temporary_dir / "flake_dir"
flake_dir_str = str(flake_dir.resolve())
cli.run(["flake", "create", flake_dir_str])
assert (flake_dir / ".clan-flake").exists()
monkeypatch.chdir(flake_dir)
cli.run(["machines", "create", "machine1"])
capsys.readouterr() # flush cache
cli.run(["machines", "list"])
assert "machine1" in capsys.readouterr().out
flake_show = subprocess.run(
["nix", "flake", "show", "--json"],
check=True,
capture_output=True,
text=True,
)
flake_outputs = json.loads(flake_show.stdout)
try:
flake_outputs["nixosConfigurations"]["machine1"]
except KeyError:
pytest.fail("nixosConfigurations.machine1 not found in flake outputs")
# configure machine1
capsys.readouterr()
cli.run(["config", "--machine", "machine1", "services.openssh.enable"])
capsys.readouterr()
cli.run(["config", "--machine", "machine1", "services.openssh.enable", "true"])

View File

@@ -1,22 +0,0 @@
from pathlib import Path
import pytest
from clan_cli.dirs import _get_clan_flake_toplevel
from clan_cli.errors import ClanError
def test_get_clan_flake_toplevel(
monkeypatch: pytest.MonkeyPatch, temporary_dir: Path
) -> None:
monkeypatch.chdir(temporary_dir)
with pytest.raises(ClanError):
print(_get_clan_flake_toplevel())
(temporary_dir / ".git").touch()
assert _get_clan_flake_toplevel() == temporary_dir
subdir = temporary_dir / "subdir"
subdir.mkdir()
monkeypatch.chdir(subdir)
(subdir / ".clan-flake").touch()
assert _get_clan_flake_toplevel() == subdir

View File

@@ -1,33 +0,0 @@
{
# this placeholder is replaced by the path to nixpkgs
inputs.nixpkgs.url = "__NIXPKGS__";
outputs = inputs: {
nixosConfigurations.machine1 = inputs.nixpkgs.lib.nixosSystem {
modules = [
./nixosModules/machine1.nix
(if builtins.pathExists ./machines/machine1/settings.json
then builtins.fromJSON (builtins.readFile ./machines/machine1/settings.json)
else { })
({ lib, options, pkgs, ... }: {
config = {
nixpkgs.hostPlatform = "x86_64-linux";
# speed up by not instantiating nixpkgs twice and disable documentation
nixpkgs.pkgs = inputs.nixpkgs.legacyPackages.x86_64-linux;
documentation.enable = false;
};
options.clanCore.optionsNix = lib.mkOption {
type = lib.types.raw;
internal = true;
readOnly = true;
default = (pkgs.nixosOptionsDoc { inherit options; }).optionsNix;
defaultText = "optionsNix";
description = ''
This is to export nixos options used for `clan config`
'';
};
})
];
};
};
}

View File

@@ -1,7 +0,0 @@
{ lib, ... }: {
options.clan.jitsi.enable = lib.mkOption {
type = lib.types.bool;
default = false;
description = "Enable jitsi on this machine";
};
}

View File

@@ -1,51 +0,0 @@
import json
from pathlib import Path
import pytest
from api import TestClient
@pytest.mark.impure
def test_inspect_ok(api: TestClient, test_flake_with_core: Path) -> None:
params = {"url": str(test_flake_with_core)}
response = api.get(
"/api/flake/attrs",
params=params,
)
assert response.status_code == 200, "Failed to inspect vm"
data = response.json()
print("Data: ", data)
assert data.get("flake_attrs") == ["vm1"]
@pytest.mark.impure
def test_inspect_err(api: TestClient) -> None:
params = {"url": "flake-parts"}
response = api.get(
"/api/flake/attrs",
params=params,
)
assert response.status_code != 200, "Succeed to inspect vm but expected to fail"
data = response.json()
print("Data: ", data)
assert data.get("detail")
@pytest.mark.impure
def test_inspect_flake(api: TestClient, test_flake_with_core: Path) -> None:
params = {"url": str(test_flake_with_core)}
response = api.get(
"/api/flake",
params=params,
)
assert response.status_code == 200, "Failed to inspect vm"
data = response.json()
print("Data: ", json.dumps(data, indent=2))
assert data.get("content") is not None
actions = data.get("actions")
assert actions is not None
assert len(actions) == 2
assert actions[0].get("id") == "vms/inspect"
assert actions[0].get("uri") == "api/vms/inspect"
assert actions[1].get("id") == "vms/create"
assert actions[1].get("uri") == "api/vms/create"

View File

@@ -1,39 +0,0 @@
{
# Use this path to our repo root e.g. for UI test
# inputs.clan-core.url = "../../../../.";
# this placeholder is replaced by the path to nixpkgs
inputs.clan-core.url = "__CLAN_CORE__";
outputs = { self, clan-core }:
let
clan = clan-core.lib.buildClan {
directory = self;
machines = {
vm1 = { lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";
system.stateVersion = lib.version;
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
clanCore.secretsUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
clan.virtualisation.graphics = false;
clan.networking.zerotier.controller.enable = true;
networking.useDHCP = false;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
};
};
};
in
{
inherit (clan) nixosConfigurations clanInternals;
};
}

View File

@@ -1,37 +0,0 @@
{
# Use this path to our repo root e.g. for UI test
# inputs.clan-core.url = "../../../../.";
# this placeholder is replaced by the path to clan-core
inputs.clan-core.url = "__CLAN_CORE__";
outputs = { self, clan-core }:
let
clan = clan-core.lib.buildClan {
directory = self;
machines = {
vm1 = { lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";
system.stateVersion = lib.version;
clanCore.secretStore = "password-store";
clanCore.secretsUploadDirectory = lib.mkForce "__CLAN_SOPS_KEY_DIR__/secrets";
clan.networking.zerotier.controller.enable = true;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
};
};
};
in
{
inherit (clan) nixosConfigurations clanInternals;
};
}

Some files were not shown because too many files have changed in this diff Show More