diff --git a/Cargo.lock b/Cargo.lock index 64fed08..5f1595e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -223,6 +223,7 @@ dependencies = [ "tracing", "tracing-human-layer", "tracing-subscriber", + "which", ] [[package]] @@ -851,6 +852,15 @@ version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" +[[package]] +name = "which" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81995fafaaaf6ae47a7d0cc83c67caf92aeb7e5331650ae6ff856f7c0c60c459" +dependencies = [ + "libc", +] + [[package]] name = "windows" version = "0.61.3" diff --git a/Cargo.toml b/Cargo.toml index 7408fb0..38a75c3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ tap = "1.0.1" tracing = { version = "0.1.44", features = ["attributes"] } tracing-human-layer = "0.2.1" tracing-subscriber = { version = "0.3.22", default-features = false, features = ["std", "env-filter", "fmt", "ansi", "registry", "parking_lot"] } +which = "8.0.2" [profile.dev] opt-level = 1 @@ -63,3 +64,21 @@ opt-level = 1 [profile.release.package."*"] debug = true debug-assertions = true + +[lints.clippy] +#arithmetic_side_effects = "warn" +as_ptr_cast_mut = "warn" +assigning_clones = "warn" +borrow_as_ptr = "warn" +#cargo_common_metadata = "warn" +cast_lossless = "warn" +#cast_possible_truncation = "warn" +cast_possible_wrap = "warn" +cast_ptr_alignment = "warn" +cast_sign_loss = "warn" +clear_with_drain = "warn" +coerce_container_to_any = "warn" +derive_partial_eq_without_eq = "warn" +doc_broken_link = "warn" +doc_comment_double_space_linebreaks = "warn" +doc_markdown = "warn" diff --git a/default.nix b/default.nix index 697681f..5122e5a 100644 --- a/default.nix +++ b/default.nix @@ -9,18 +9,28 @@ in import src { inherit pkgs; }, }: let inherit (qpkgs) lib; - dynix = (qpkgs.callPackage ./package.nix { }) + + # Use LLD for faster link times. + defaultStdenv = pkgs.clangStdenv.override { + cc = pkgs.clangStdenv.cc.override { + bintools = pkgs.wrapBintoolsWith { inherit (pkgs.llvmPackages) bintools; }; + }; + }; + + dynix = (qpkgs.callPackage ./package.nix { clangStdenv = defaultStdenv; }) .overrideAttrs (final: prev: { dynixCommand = qpkgs.stdlib.mkStdenvPretty prev.dynixCommand; dynixModules = qpkgs.stdlib.mkStdenvPretty prev.dynixModules; }) |> qpkgs.stdlib.mkStdenvPretty; + byStdenv = lib.mapAttrs (stdenvName: stdenv: let withStdenv = dynix.override { clangStdenv = stdenv; }; dynix' = withStdenv.overrideAttrs (prev: { pname = "${prev.pname}-${stdenvName}"; }); in dynix') qpkgs.validStdenvs; + in dynix.overrideAttrs (prev: lib.recursiveUpdate prev { passthru = { inherit byStdenv; }; }) diff --git a/package.nix b/package.nix index fa07871..08b6bbc 100644 --- a/package.nix +++ b/package.nix @@ -7,23 +7,15 @@ clangStdenv, callPackage, linkFarm, - llvmPackages, rustHooks, rustPackages, versionCheckHook, - wrapBintoolsWith, + writeScript, }: lib.callWith' rustPackages ({ rustPlatform, cargo, }: let - # Use LLD for faster link times. - stdenv = clangStdenv.override { - cc = clangStdenv.cc.override { - bintools = wrapBintoolsWith { - bintools = llvmPackages.bintools; - }; - }; - }; + stdenv = clangStdenv; cargoToml = lib.importTOML ./Cargo.toml; cargoPackage = cargoToml.package; in stdenv.mkDerivation (finalAttrs: let @@ -46,18 +38,37 @@ in { cp -r --reflink=auto "$dynixCommand/"* "$out/" mkdir -p "$modules" cp -r --reflink=auto "$dynixModules/"* "$modules/" + install -Dm a=rx "$dynixTestingClient" "$out/libexec/dynix-testing-client.py" ''; # # SUB-DERIVATONS # + dynixTestingClient = writeScript "dynix-testing-client.py" '' + #!/usr/bin/env python3 + import socket, sys, os, json + try: + sockpath = sys.argv[1] + except IndexError: + sockpath = f"{os.environ['XDG_RUNTIME_DIR']}/dynix.sock" + sock = socket.socket(family=socket.AF_UNIX) + sock.connect(sockpath) + sock.sendall(sys.stdin.buffer.read()) + sock.settimeout(20) + reply = json.loads(sock.recv(256).decode("utf-8")) + print(json.dumps(reply, indent=2)) + sys.exit(reply["status"]) + ''; + dynixCommand = stdenv.mkDerivation { pname = "${self.pname}-command"; inherit (self) version; inherit (self) strictDeps __structuredAttrs; inherit (self) doCheck doInstallCheck; + outputs = [ "out" "doc" ]; + src = lib.fileset.toSource { root = ./.; fileset = lib.fileset.unions [ @@ -71,6 +82,12 @@ in { lockFile = ./Cargo.lock; }; + postInstall = '' + cargo doc --document-private-items + mkdir -p "$doc" + cp -r ./target/doc/* "$doc/" + ''; + nativeBuildInputs = rustHooks.asList ++ [ cargo ]; diff --git a/rustfmt.toml b/rustfmt.toml index d662dc1..02c4dd5 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -4,3 +4,13 @@ match_block_trailing_comma = true merge_derives = false + +# Unstable options. +blank_lines_upper_bound = 3 +format_code_in_doc_comments = true +format_macro_matchers = true +# When structs, slices, arrays, and block/array-like macros are used as the last argument in an expression list, +# allow them to overflow (like blocks/closures) instead of being indented on a new line. +overflow_delimited_expr = true +# Put `type` and `const` items before methods. +reorder_impl_items = true diff --git a/shell.nix b/shell.nix index 39cd49f..a1b40ff 100644 --- a/shell.nix +++ b/shell.nix @@ -16,7 +16,21 @@ fenixLib ? let src = fetchTarball "https://github.com/nix-community/fenix/archive/main.tar.gz"; in import src { inherit pkgs; }, - fenixToolchain ? fenixLib.latest.toolchain, + fenixBaseToolchain ? fenixLib.stable.withComponents [ + "cargo" + "rustc" + "llvm-tools" + "rust-std" + "rust-docs" + "rust-src" + "rustc-dev" + "clippy" + ], + fenixToolchain ? fenixLib.combine [ + fenixBaseToolchain + # Rustfmt is very handy to have as nightly. + fenixLib.latest.rustfmt + ], }: let inherit (pkgs) lib; diff --git a/src/daemon.rs b/src/daemon.rs index 6543d3a..5f1b88c 100644 --- a/src/daemon.rs +++ b/src/daemon.rs @@ -1,6 +1,7 @@ use std::{ env, io, os::fd::{AsFd, BorrowedFd, IntoRawFd, OwnedFd, RawFd}, + process::{Command, Stdio}, sync::{ Arc, LazyLock, atomic::{AtomicUsize, Ordering}, @@ -12,7 +13,20 @@ use iddqd::{BiHashMap, IdOrdMap}; use mio::{Events, Interest, Poll, Token, event::Event, net::UnixListener, unix::SourceFd}; -use rustix::{buffer::spare_capacity, net::SocketFlags, process::Uid}; +use rustix::{ + buffer::spare_capacity, + net::SocketFlags, + process::{Pid, PidfdFlags, Uid, WaitId, WaitIdOptions}, +}; + +mod rustix { + pub use rustix::process::{getuid, pidfd_open, waitid}; + pub use rustix::*; +} + +//mod rustix_prelude { +// pub use rustix::process::{getuid, pidfd_open, waitid}; +//} use serde_json::StreamDeserializer; @@ -21,10 +35,7 @@ use crate::prelude::*; pub mod api; use api::DaemonCmd; -use crate::{ - SourceFile, SourceLine, - daemon_tokfd::{FdInfo, FdKind}, -}; +use crate::daemon_tokfd::{FdInfo, FdKind}; use crate::{OwnedFdWithFlags, TokenFd}; @@ -45,6 +56,22 @@ pub static TMPDIR: LazyLock<&'static Path> = LazyLock::new(|| { Box::leak(dir) }); +pub static NIXOS_REBUILD: LazyLock<&'static Path> = LazyLock::new(|| { + which::which("nixos-rebuild") + .inspect_err(|e| error!("couldn't find `nixos-rebuild` in PATH: {e}")) + .map(PathBuf::into_boxed_path) + .map(|boxed| &*Box::leak(boxed)) + .unwrap_or(Path::new("/run/current-system/sw/bin/nixos-rebuild")) +}); + +pub static NIX: LazyLock<&'static Path> = LazyLock::new(|| { + which::which("nix") + .inspect_err(|e| error!("couldn't find `nix` in PATH: {e}")) + .map(PathBuf::into_boxed_path) + .map(|boxed| &*Box::leak(boxed)) + .unwrap_or(Path::new("/run/current-system/sw/bin/nix")) +}); + const TIMEOUT_NEVER: Option = None; static NEXT_TOKEN_NUMBER: AtomicUsize = AtomicUsize::new(1); @@ -60,6 +87,42 @@ fn next_token() -> Token { Token(tok) } +trait EventExt { + type Display; + + fn display(&self) -> Self::Display; +} + +#[derive(Copy)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct EventDisplay { + token: Token, + error: bool, + writable: bool, + write_closed: bool, + readable: bool, + read_closed: bool, +} +impl EventExt for Event { + type Display = EventDisplay; + + fn display(&self) -> Self::Display { + EventDisplay { + token: self.token(), + error: self.is_error(), + writable: self.is_writable(), + write_closed: self.is_write_closed(), + readable: self.is_readable(), + read_closed: self.is_read_closed(), + } + } +} +impl Display for EventDisplay { + fn fmt(&self, f: &mut Formatter) -> FmtResult { + todo!() + } +} + #[derive(Debug)] pub struct Daemon { config_path: Arc, @@ -132,6 +195,33 @@ impl Daemon { token } + #[expect(dead_code)] + fn register_with_name(&mut self, fd: RawFd, kind: FdKind, name: Box) -> Token { + let token = next_token(); + + debug!( + "Registering new {} FdInfo for {fd} ({}) with token {token:?}", + name.to_string_lossy(), + kind.name_str(), + ); + + self.fd_info + .insert_unique(FdInfo::new_with_name(fd, kind, name)) + .unwrap(); + + self.tokfd + .insert_unique(TokenFd { token, fd }) + .unwrap_or_else(|e| todo!("{e}")); + + let mut source = SourceFd(&fd); + self.poller + .registry() + .register(&mut source, token, Interest::READABLE) + .unwrap_or_else(|e| unreachable!("registering {fd:?} with poller failed: {e}")); + + token + } + fn deregister(&mut self, fd: RawFd) { let info = self .fd_info @@ -279,6 +369,32 @@ const DAEMON: Token = Token(0); /// Private helpers. impl Daemon { + fn proxy_stdio(&mut self, fd: &BorrowedFd) -> Result<(), IoError> { + let info = self.fd_info.get(&fd.as_raw_fd()).unwrap(); + let label = match info.kind { + FdKind::ChildStdout => "stdout", + FdKind::ChildStderr => "stderr", + other => unreachable!("child stdio cannot have kind {other:?}"), + }; + // FIXME: don't use a new allocation every time. + let mut buffer: Vec = Vec::with_capacity(1024); + // FIXME: handle line buffering correctly. + loop { + let count = rustix::io::read(fd, spare_capacity(&mut buffer)) + .inspect_err(|e| error!("read() on child stdio fd {fd:?} failed: {e}"))?; + + if count == 0 { + break; + } + + for line in buffer.lines() { + info!("[child {label}]: {}", line.as_bstr()) + } + } + + Ok(()) + } + fn read_cmd(&mut self, fd: &BorrowedFd) -> Result<(), IoError> { // FIXME: don't use a new allocation every time. let mut cmd_buffer: Vec = Vec::with_capacity(1024); @@ -317,42 +433,82 @@ impl Daemon { } fn dispatch_cmd(&mut self, cmd: DaemonCmd) -> Result<(), IoError> { + // Write the new file... let (name, value) = match cmd { DaemonCmd::Append { name, value } => (name, value), }; - let mut opts = File::options(); - opts.read(true) - .write(true) - .create(false) - .custom_flags(libc::O_CLOEXEC); - let source_file = SourceFile::open_from(self.config_path.clone(), opts)?; + let source_file = crate::open_source_file(self.config_path.clone())?; let pri = crate::get_where(source_file.clone()).unwrap_or_else(|e| todo!("{e}")); let new_pri = pri - 1; - //let new_pri_line = - // crate::get_next_prio_line(source_file.clone(), Arc::from(name), Arc::from(value)); // Get next priority line. - let source_lines = source_file.lines()?; - let penultimate = source_lines.get(source_lines.len() - 2); - // FIXME: don't rely on whitespace lol - debug_assert_eq!(penultimate.map(SourceLine::text).as_deref(), Some(" ];")); - let penultimate = penultimate.unwrap(); - let new_generation = 0 - new_pri; - let new_line = SourceLine { - line: penultimate.line, - path: source_file.path(), - text: Arc::from(format!( - " {} = lib.mkOverride ({}) ({}); # DYNIX GENERATION {}", - name.to_nix_decl(), - new_pri, - value, - new_generation, - )), - }; - - drop(source_lines); + let opt_name = name.to_nix_decl(); + let new_line = crate::get_next_prio_line( + source_file.clone(), + &opt_name, + new_pri, + &value.to_nix_source(), + ) + .unwrap_or_else(|e| panic!("someone is holding a reference to source.lines(): {e}")); crate::write_next_prio(source_file, new_line).unwrap_or_else(|e| todo!("{e}")); + // Rebuild and switch. + // FIXME: allow passing additional args. + //let child = Command::new(*NIXOS_REBUILD) + // .arg("switch") + // .arg("--log-format") + // .arg("raw-with-logs") + // .arg("--no-reexec") + // .arg("-v") + // .stdout(Stdio::piped()) + // .stderr(Stdio::piped()) + // .spawn() + // .inspect_err(|e| { + // error!("failed to spawn `nixos-rebuild` command: {e}"); + // })?; + + let expr = "(import { }).config.dynamicism.applyDynamicConfiguration { }"; + let child = Command::new(*NIX) + .arg("run") + .arg("--show-trace") + .arg("--log-format") + .arg("raw-with-logs") + .arg("--impure") + .arg("-E") + .arg(expr) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .inspect_err(|e| error!("failed to spawn `nix run` command: {e}"))?; + + debug!("Spanwed child process {}", child.id()); + + let pid = Pid::from_child(&child); + + let stdout = child.stdout.unwrap_or_else(|| { + unreachable!("`child` is given `.stdout(Stdio::piped())`"); + }); + let stderr = child.stderr.unwrap_or_else(|| { + unreachable!("`child` is given `.stderr(Stdio::piped())`"); + }); + + let _token = self.register(stdout.into_raw_fd(), FdKind::ChildStdout); + let _token = self.register(stderr.into_raw_fd(), FdKind::ChildStderr); + + match rustix::process::pidfd_open(pid, PidfdFlags::NONBLOCK) { + Ok(pidfd) => { + debug!("Opened pidfd {pidfd:?}, for process {pid}"); + self.register(pidfd.into_raw_fd(), FdKind::Pid(pid)); + }, + Err(e) if e.kind() == IoErrorKind::NotFound => { + warn!("child {pid} not found; died before we could open it?"); + }, + Err(e) => { + error!("Error opening pidfd for child {pid}: {e}"); + return Err(e)?; + }, + } + Ok(()) } @@ -386,9 +542,9 @@ impl Daemon { loop { if tracing::enabled!(tracing::Level::DEBUG) { - trace!("Daemon loop iteration, with file descriptors: "); + debug!("Daemon loop iteration, with file descriptors: "); for info in &self.fd_info { - trace!("- {}", info.display()); + debug!("- {}", info.display()); } } @@ -441,7 +597,7 @@ impl Daemon { } fn handle_event(&mut self, event: &Event) -> Result<(), IoError> { - trace!("Handling event {event:?}"); + trace!("Handling event {event:#?}"); match event.token() { DAEMON => { @@ -487,16 +643,92 @@ impl Daemon { }, other_token => { // This must be a stream fd. - let stream_fd = self.fd_for_token(other_token).unwrap_or_else(|| { + let fd = self.fd_for_token(other_token).unwrap_or_else(|| { unreachable!("tried to get fd for non-existent token? {other_token:?}") }); + let Some(info) = self.fd_info.get(&fd) else { + panic!("Received an event on an unregistered fd {fd}; IO-safety violation?"); + }; + + let either_available = event.is_readable() || event.is_writable(); + if !either_available { + info!( + "File descriptor {} r:{}, w:{}", + info.display(), + event.is_readable(), + event.is_writable(), + ); + // FIXME: code duplication + if event.is_read_closed() { + self.deregister(fd); + return Ok(()); + } + } + + match info.kind { + FdKind::Pid(pid) => { + debug!("Reaping child process {pid}"); + // SAFETY: `fd` cannot have been closed yet, since that's what we do here. + let pidfd = unsafe { BorrowedFd::borrow_raw(fd) }; + let status = rustix::waitid(WaitId::PidFd(pidfd), WaitIdOptions::EXITED) + .unwrap_or_else(|e| { + todo!("waitid() can fail? on pid {pid}: {e}"); + }) + .unwrap_or_else(|| { + todo!("waitid() returned None? for pid {pid}"); + }); + + debug!("waitid() for pid {pid} returned status: {status:?}"); + let is_dead = status.exited() || status.killed() || status.dumped(); + if !is_dead { + todo!("Handle process {pid} events that aren't death: {status:?}"); + } + let Some(exit_code) = status.exit_status() else { + unreachable!("Process {pid} died with no exit code at all? {status:?}"); + }; + debug!("Child process {pid} exited with code {exit_code}"); + + // Close the pidfd. + self.deregister(fd); + + let stream = self + .fd_info + .iter() + .find_map(|info| (info.kind == FdKind::SockStream).then_some(info)); + if let Some(stream) = stream { + // SAFETY: fixme. + let stream_fd = unsafe { BorrowedFd::borrow_raw(stream.fd) }; + let payload = format!("{{ \"status\": {exit_code} }}\n"); + if let Err(e) = rustix::io::write(stream_fd, payload.as_bytes()) { + error!("couldn't write reply to stream fd {stream_fd:?}: {e}"); + } + } + }, + FdKind::ChildStdout => { + warn!("got stdout"); + // SAFETY: oh boy. + let stdout = unsafe { BorrowedFd::borrow_raw(fd) }; + self.proxy_stdio(&stdout) + .unwrap_or_else(|e| error!("failed to proxy child stdout: {e}")); + }, + FdKind::ChildStderr => { + warn!("got stderr"); + // SAFETY: oh boy. + let stderr = unsafe { BorrowedFd::borrow_raw(fd) }; + self.proxy_stdio(&stderr) + .unwrap_or_else(|e| error!("failed to proxy child stderr: {e}")); + }, + FdKind::SockStream => { + // SAFETY: oh boy. + let stream_fd = unsafe { BorrowedFd::borrow_raw(fd) }; + self.read_cmd(&stream_fd).unwrap(); + }, + kind => todo!("{kind:?}"), + }; if event.is_read_closed() { - self.deregister(stream_fd); - } else { - // SAFETY: oh boy. - let stream_fd = unsafe { BorrowedFd::borrow_raw(stream_fd) }; - self.read_cmd(&stream_fd).unwrap(); + self.deregister(fd); + return Ok(()); } }, } diff --git a/src/daemon/api.rs b/src/daemon/api.rs index 077ce0f..2ab805c 100644 --- a/src/daemon/api.rs +++ b/src/daemon/api.rs @@ -47,6 +47,24 @@ impl ConvenientAttrPath { } } +#[derive(Debug, Clone, PartialEq)] +#[derive(Deserialize, Serialize)] +#[serde(untagged)] +pub enum NixLiteral { + String(String), + Number(f64), + // FIXME: add the rest =P +} + +impl NixLiteral { + pub fn to_nix_source(&self) -> String { + match self { + NixLiteral::String(s) => format!("\"{s}\""), + NixLiteral::Number(n) => n.to_string(), + } + } +} + #[derive(Debug, Clone, PartialEq)] #[derive(serde::Deserialize, serde::Serialize)] #[serde(tag = "action", content = "args", rename_all = "snake_case")] @@ -54,6 +72,6 @@ impl ConvenientAttrPath { pub enum DaemonCmd { Append { name: ConvenientAttrPath, - value: Box, + value: Box, }, } diff --git a/src/daemon_tokfd.rs b/src/daemon_tokfd.rs index e00f5f2..cb436c6 100644 --- a/src/daemon_tokfd.rs +++ b/src/daemon_tokfd.rs @@ -3,6 +3,7 @@ use std::{os::fd::RawFd, sync::OnceLock}; use circular_buffer::CircularBuffer; use iddqd::{BiHashItem, IdOrdItem}; use mio::Token; +use rustix::process::Pid; use crate::prelude::*; @@ -76,11 +77,11 @@ impl FdInfo { impl IdOrdItem for FdInfo { type Key<'a> = &'a RawFd; + iddqd::id_upcast!(); + fn key(&self) -> &RawFd { &self.fd } - - iddqd::id_upcast!(); } #[derive(Debug)] @@ -105,13 +106,16 @@ impl<'a> Display for FdInfoDisplay<'a> { } #[derive(Copy)] -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] #[non_exhaustive] pub enum FdKind { File, Socket, SockStream, Poller, + ChildStdout, + ChildStderr, + Pid(Pid), #[default] Unknown, } @@ -124,6 +128,9 @@ impl FdKind { Socket => "socket", SockStream => "socket stream", Poller => "poller", + ChildStdout => "child stdout", + ChildStderr => "child stderr", + Pid(_) => "pidfd", Unknown => "«unknown»", } } @@ -140,6 +147,8 @@ impl BiHashItem for TokenFd { type K1<'a> = Token; type K2<'a> = RawFd; + iddqd::bi_upcast!(); + fn key1(&self) -> Token { self.token } @@ -147,8 +156,6 @@ impl BiHashItem for TokenFd { fn key2(&self) -> RawFd { self.fd } - - iddqd::bi_upcast!(); } impl From for (Token, RawFd) { diff --git a/src/lib.rs b/src/lib.rs index b2b597a..cfa921f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -99,6 +99,22 @@ static MK_OVERRIDE_RE: LazyLock = LazyLock::new(|| { Regex::new(r"(?-u)\bmkOverride\s+\((?[\d-]+)\)").unwrap() }); +pub(crate) fn open_source_file(path: Arc) -> Result { + let mut opts = File::options(); + opts.read(true) + .write(true) + .create(false) + .custom_flags(libc::O_CLOEXEC); + + SourceFile::open_from(Arc::clone(&path), opts) + .tap_err(|e| error!("couldn't open source file at {}: {e}", path.display())) +} + +pub(crate) fn get_line_to_insert() -> SourceLine { + // + todo!(); +} + #[tracing::instrument(level = "debug")] pub fn do_append(args: Arc, append_args: AppendCmd) -> Result<(), BoxDynError> { let filepath = Path::new(&args.file); @@ -110,22 +126,16 @@ pub fn do_append(args: Arc, append_args: AppendCmd) -> Result<(), BoxDynEr filepath.to_path_buf() }; - let mut opts = File::options(); - opts.read(true) - .write(true) - .create(false) - .custom_flags(libc::O_CLOEXEC); - - let source_file = SourceFile::open_from(Arc::from(filepath), opts)?; + let source_file = open_source_file(Arc::from(filepath))?; let pri = get_where(source_file.clone())?; let new_pri = pri - 1; let new_pri_line = get_next_prio_line( source_file.clone(), - append_args.name, + &append_args.name, new_pri, - append_args.value, + &append_args.value, )?; debug!("new_pri_line={new_pri_line}"); @@ -210,9 +220,9 @@ pub fn get_where(dynamic_nix: SourceFile) -> Result { pub fn get_next_prio_line( source: SourceFile, - option_name: Arc, + option_name: &str, new_prio: i64, - new_value: Arc, + new_value: &str, ) -> Result { let source_lines = source.lines()?; let penultimate = source_lines.get(source_lines.len() - 2); @@ -221,6 +231,7 @@ pub fn get_next_prio_line( let penultimate = penultimate.unwrap(); let new_generation = 0 - new_prio; + info!("setting '{option_name}' to '{new_value}' for generation '{new_generation}'"); let new_line = SourceLine { line: penultimate.line, @@ -246,6 +257,8 @@ pub fn write_next_prio(mut source: SourceFile, new_line: SourceLine) -> Result<( text: Arc::from(" }"), }; + debug!("writing new source line: {new_line}"); + source.insert_lines(&[new_mod_start, new_line, new_mod_end])?; Ok(()) diff --git a/src/source.rs b/src/source.rs index 287f72e..36cd70b 100644 --- a/src/source.rs +++ b/src/source.rs @@ -6,6 +6,7 @@ use std::{ cell::{Ref, RefCell}, hash::Hash, io::{BufRead, BufReader, BufWriter}, + mem::{self, MaybeUninit}, ops::Deref, ptr, sync::{Arc, Mutex, OnceLock}, @@ -103,6 +104,28 @@ pub struct SourceFile { lines: Arc>>>, } +#[derive(Debug)] +#[repr(transparent)] +pub struct OpaqueDerefSourceLines<'s>(Ref<'s, [SourceLine]>); +impl<'s> Deref for OpaqueDerefSourceLines<'s> { + type Target = [SourceLine]; + + fn deref(&self) -> &[SourceLine] { + &*self.0 + } +} + +#[derive(Debug)] +#[repr(transparent)] +pub struct OpaqueDerefSourceLine<'s>(Ref<'s, SourceLine>); +impl<'s> Deref for OpaqueDerefSourceLine<'s> { + type Target = SourceLine; + + fn deref(&self) -> &SourceLine { + &*self.0 + } +} + impl SourceFile { /// Panics if `path` is a directory path instead of a file path. pub fn open_from(path: Arc, options: OpenOptions) -> Result { @@ -159,15 +182,17 @@ impl SourceFile { Ok(self._lines_slice()) } - pub fn lines(&self) -> Result + '_, IoError> { - self._lines() + pub fn lines(&self) -> Result, IoError> { + let lines = self._lines()?; + + Ok(OpaqueDerefSourceLines(lines)) } - pub fn line(&self, line: Line) -> Result + '_, IoError> { + pub fn line(&self, line: Line) -> Result, IoError> { let lines_lock = self._lines()?; let line = Ref::map(lines_lock, |lines| &lines[line.index() as usize]); - Ok(line) + Ok(OpaqueDerefSourceLine(line)) } /// `lines` but already be initialized. diff --git a/tests/default.nix b/tests/default.nix index 57b1c94..0541f6f 100644 --- a/tests/default.nix +++ b/tests/default.nix @@ -50,16 +50,11 @@ requiredBy = [ "multi-user.target" ]; after = [ "default.target" ]; script = '' - if [[ -e /etc/nixos/hardware-configuration.nix ]]; then - echo "install-dynix: configuration already copied; nothing to do" - exit 0 - fi - nix profile install -vv "${dynix.drvPath}^*" # " mkdir -vp /etc/nixos nixos-generate-config - cp -rv --dereference /run/current-system/sw/share/nixos/*.nix /etc/nixos/ + cp -rvf --dereference /run/current-system/sw/share/nixos/*.nix /etc/nixos/ if ! [[ -e /etc/nixos/dynix-vm-configuration.nix ]]; then echo "FAILURE" echo "FAILURE" >&2 diff --git a/tests/dynix-vm-configuration.nix b/tests/dynix-vm-configuration.nix index 2e299c3..677ef70 100644 --- a/tests/dynix-vm-configuration.nix +++ b/tests/dynix-vm-configuration.nix @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: EUPL-1.1 -{ pkgs, lib, modulesPath, ... }: +{ pkgs, lib, modulesPath, config, ... }: let moduleList = import (modulesPath + "/module-list.nix"); @@ -44,11 +44,40 @@ in }; }; + # Setup XDG base directories for me. + security.pam.services.login = { + rules.session.xdg = { + enable = true; + control = "optional"; + modulePath = "${pkgs.pam_xdg}/lib/security/pam_xdg.so"; + args = [ ]; + order = 10500; + }; + }; + environment.pathsToLink = [ "/share" ]; environment.extraOutputsToInstall = [ "modules" ]; environment.variables = { "NIXOS_CONFIG" = "/etc/nixos/configuration.nix"; }; + environment.sessionVariables = { + "NIXOS_CONFIG" = "/etc/nixos/configuration.nix"; + }; + + systemd.services.dynix-daemon = { + enable = true; + path = [ config.nix.package ]; + serviceConfig = { + Environment = [ + "RUST_LOG=trace" + ]; + ExecSearchPath = [ "/run/current-system/sw/bin" ]; + SuccessExitStatus = [ "0" "2" ]; + # `bash -l` so XDG_RUNTIME_DIR is set correctly. lol. + ExecStart = "bash -l -c 'exec /root/.nix-profile/bin/dynix daemon --color=always'"; + SyslogIdentifier = "dynix-daemon"; + }; + }; environment.shellAliases = { ls = "eza --long --header --group --group-directories-first --classify --binary"; @@ -58,5 +87,8 @@ in eza fd ripgrep + netcat.nc + socat + python3 ]; } diff --git a/tests/gotosocial/test-script.py b/tests/gotosocial/test-script.py index eca3044..0f3131e 100644 --- a/tests/gotosocial/test-script.py +++ b/tests/gotosocial/test-script.py @@ -5,7 +5,7 @@ from pathlib import Path import shlex import textwrap -from typing import cast, TYPE_CHECKING +from typing import Any, cast, TYPE_CHECKING from beartype import beartype @@ -18,6 +18,13 @@ if TYPE_CHECKING: assert machine.shell is not None ls = "eza -lah --color=always --group-directories-first" +testing_client = "/root/.nix-profile/libexec/dynix-testing-client.py" + +ANSI_RESET = "\x1b[0m" +ANSI_BOLD = "\x1b[1m" +ANSI_NOBOLD = "\x1b[22m" +ANSI_RED = "\x1b[31m" +ANSI_GREEN = "\x1b[32m" @beartype def run_log(machine: Machine, *commands: str, timeout: int | None = 60) -> str: @@ -54,71 +61,108 @@ def get_config_file() -> str: config_file_path.unlink() + machine.logger.info(f"{ANSI_GREEN}INFO{ANSI_RESET}: got config file:") + machine.logger.info(textwrap.indent(data, " ")) + return data @beartype -def dynix_append(option: str, value: str): +def dynix_append_cli(option: str, value: Any): + value = f'"{value}"' if isinstance(value, str) else value machine.succeed(f''' dynix append {shlex.quote(option)} {shlex.quote(value)} '''.strip()) -@beartype -def do_apply(): expr = textwrap.dedent(""" (import { }).config.dynamicism.applyDynamicConfiguration { } """).strip() - - machine.succeed(rf""" + machine.succeed(textwrap.dedent(rf""" nix run --show-trace --log-format raw-with-logs --impure -E {shlex.quote(expr)} - """.strip()) + """).strip()) +@beartype +def dynix_append_daemon(option: str, value: Any): + import json + payload = json.dumps(dict( + action="append", + args=dict( + name=option, + value=value, + ), + )) + + machine.succeed(f"echo '{payload}' | {testing_client} /run/user/0/dynix.sock") + +@beartype +def run_all_tests(machine: Machine, *, use_daemon: bool): + dynix_append = dynix_append_daemon if use_daemon else dynix_append_cli + + dynix_out = machine.succeed("dynix --version") + assert "dynix" in dynix_out, f"dynix not in {dynix_out=}" + + machine.succeed("systemctl start user@0.service") + machine.wait_for_unit("user@0.service") + machine.succeed("systemctl start dynix-daemon.service") + machine.wait_for_unit("dynix-daemon.service") + + machine.log("REBUILDING configuration inside VM") + machine.succeed("env PAGER= nixos-rebuild switch --log-format raw-with-logs --no-reexec --fallback") + machine.wait_for_unit("gotosocial.service") + + # Make sure the config before any dynamic changes is what we expect. + config_text = get_config_file() + lines = config_text.splitlines() + try: + application_name = next(line for line in lines if line.startswith("application-name:")) + except StopIteration: + raise AssertionError(f"no 'application-name:' found in config file: {textwrap.indent(config_text, " ")}") + assert "gotosocial-for-machine" in application_name, f"'gotosocial-for-machine' should be in {application_name=}" + + try: + host = next(line for line in lines if line.startswith("host:")) + except StopIteration: + raise AssertionError(f"no 'host:' found in config file: {textwrap.indent(config_text, " ")}") + assert "gotosocial-machine" in host, f"'gotosocial-machine' should be in {host=}" + + new_app_name = "yay!" + dynix_append("services.gotosocial.settings.application-name", f'{new_app_name}') + + config_text = get_config_file() + lines = config_text.splitlines() + try: + application_name = next(line for line in lines if line.startswith("application-name:")) + except StopIteration: + raise AssertionError(f"no 'application-name:' found in config file: {textwrap.indent(config_text, " ")}") + assert new_app_name in application_name, f"'{new_app_name}' should be in {application_name=}" + + machine.log("REBUILDING configuration inside VM") + machine.succeed("env PAGER= nixos-rebuild switch --log-format raw-with-logs --no-reexec --fallback") + + machine.wait_for_unit("gotosocial.service") + + config_text = get_config_file() + lines = config_text.splitlines() + try: + application_name = next(line for line in lines if line.startswith("application-name:")) + except StopIteration: + raise AssertionError(f"no 'application-name:' found in config file: {textwrap.indent(config_text, " ")}") + assert "gotosocial-for-machine" in application_name, f"'gotosocial-for-machine' should be in {application_name=}" + +machine.start(allow_reboot=True) machine.wait_for_unit("default.target") machine.wait_for_unit("install-dynix.service") - -dynix_out = machine.succeed("dynix --version") -assert "dynix" in dynix_out, f"dynix not in {dynix_out=}" - -machine.log("REBUILDING configuration inside VM") -machine.succeed("env PAGER= nixos-rebuild switch --log-format raw-with-logs --no-reexec --fallback") -machine.wait_for_unit("gotosocial.service") - -# Make sure the config before any dynamic changes is what we expect. -config_text = get_config_file() -lines = config_text.splitlines() try: - application_name = next(line for line in lines if line.startswith("application-name:")) -except StopIteration: - raise AssertionError(f"no 'application-name:' found in config file: {textwrap.indent(config_text, " ")}") -assert "gotosocial-for-machine" in application_name, f"'gotosocial-for-machine' should be in {application_name=}" + run_all_tests(machine, use_daemon=False) +except Exception as e: + machine.logger.error(f"{ANSI_RED}ERROR{ANSI_RESET} during {ANSI_BOLD}CLI{ANSI_RESET} tests: {e}") + raise +machine.reboot() + +machine.wait_for_unit("install-dynix.service") try: - host = next(line for line in lines if line.startswith("host:")) -except StopIteration: - raise AssertionError(f"no 'host:' found in config file: {textwrap.indent(config_text, " ")}") -assert "gotosocial-machine" in host, f"'gotosocial-machine' should be in {host=}" - -new_app_name = "yay!" -dynix_append("services.gotosocial.settings.application-name", f'"{new_app_name}"') -do_apply() - -config_text = get_config_file() -lines = config_text.splitlines() -try: - application_name = next(line for line in lines if line.startswith("application-name:")) -except StopIteration: - raise AssertionError(f"no 'application-name:' found in config file: {textwrap.indent(config_text, " ")}") -assert new_app_name in application_name, f"'{new_app_name}' should be in {application_name=}" - -machine.log("REBUILDING configuration inside VM") -machine.succeed("env PAGER= nixos-rebuild switch --log-format raw-with-logs --no-reexec --fallback") - -machine.wait_for_unit("gotosocial.service") - -config_text = get_config_file() -lines = config_text.splitlines() -try: - application_name = next(line for line in lines if line.startswith("application-name:")) -except StopIteration: - raise AssertionError(f"no 'application-name:' found in config file: {textwrap.indent(config_text, " ")}") -assert "gotosocial-for-machine" in application_name, f"'gotosocial-for-machine' should be in {application_name=}" + run_all_tests(machine, use_daemon=True) +except Exception as e: + machine.logger.error(f"{ANSI_RED}ERROR{ANSI_RESET} during {ANSI_BOLD}daemon{ANSI_RESET} tests: {e}") + raise diff --git a/tests/harmonia/test-script.py b/tests/harmonia/test-script.py index c40e130..83ca0db 100644 --- a/tests/harmonia/test-script.py +++ b/tests/harmonia/test-script.py @@ -4,6 +4,7 @@ import functools from pathlib import Path +import json import shlex import textwrap import tomllib @@ -20,6 +21,7 @@ if TYPE_CHECKING: assert machine.shell is not None ls = "eza -lah --color=always --group-directories-first" +testing_client = "/root/.nix-profile/libexec/dynix-testing-client.py" indent = functools.partial(textwrap.indent, prefix=' ') @@ -55,65 +57,114 @@ def get_config_file() -> dict[str, Any]: with open(config_file_path, "rb") as f: config_data = tomllib.load(f) - config_file_path.unlink() + try: + config_file_path.unlink() + except Exception as e: + machine.log(f"Couldn't unlike path {config_file_path}: {e}") + raise return config_data @beartype -def dynix_append(option: str, value: Any): +def dynix_append_daemon(option: str, value: Any): + payload = json.dumps(dict( + action="append", + args=dict( + name=option, + value=value, + ), + )) + + machine.succeed(f"echo '{payload}' | {testing_client} /run/user/0/dynix.sock") + +@beartype +def dynix_append_cli(option: str, value: Any): machine.succeed(f''' dynix append {shlex.quote(option)} {shlex.quote(str(value))} '''.strip()) -@beartype -def do_apply(): expr = textwrap.dedent(""" (import { }).config.dynamicism.applyDynamicConfiguration { } """).strip() - machine.succeed(rf""" nix run --show-trace --log-format raw-with-logs --impure -E {shlex.quote(expr)} """.strip()) -machine.wait_for_unit("default.target") -machine.wait_for_unit("install-dynix.service") +@beartype +def run_all_tests(machine: Machine, *, use_daemon: bool): + dynix_append = dynix_append_daemon if use_daemon else dynix_append_cli -dynix_out = machine.succeed("dynix --version") -assert "dynix" in dynix_out, f"dynix not in {dynix_out=}" + # + # Setup. + # + dynix_out = machine.succeed("dynix --version") + assert "dynix" in dynix_out, f"dynix not in {dynix_out=}" -# Config should have our initial values. -config_toml = get_config_file() -assert int(config_toml['workers']) == 4, f"{config_toml['workers']=} != 4" -assert int(config_toml['max_connection_rate']) == 256, f"{config_toml['max_connection_rate']=} != 256" + machine.succeed("systemctl start user@0.service") + machine.wait_for_unit("user@0.service") -with machine.nested("must succeed: initial nixos-rebuild switch"): + run_log(machine, "systemctl start dynix-daemon.service") + machine.wait_for_unit("dynix-daemon.service") + + machine.log("Checking initial harmonia.service conditions") + + # Config should have our initial values. + config_toml = get_config_file() + assert int(config_toml['workers']) == 4, f"{config_toml['workers']=} != 4" + assert int(config_toml['max_connection_rate']) == 256, f"{config_toml['max_connection_rate']=} != 256" + + with machine.nested("must succeed: initial nixos-rebuild switch"): + machine.succeed("env PAGER= nixos-rebuild switch --log-format raw-with-logs --no-reexec -v --fallback") + + # Config should not have changed. + config_toml = get_config_file() + assert int(config_toml['workers']) == 4, f"{config_toml['workers']=} != 4" + assert int(config_toml['max_connection_rate']) == 256, f"{config_toml['max_connection_rate']=} != 256" + + machine.log("Testing dynamic workers=20") + new_workers = 20 + dynix_append("services.harmonia.settings.workers", new_workers) + + machine.log("Testing that workers, but not max_connection_rate, changed") + # Workers, but not max connection rate, should have changed. + config_toml = get_config_file() + from pprint import pformat + machine.log(pformat(config_toml)) + assert int(config_toml['workers']) == new_workers, f"{config_toml['workers']=} != {new_workers}" + assert int(config_toml['max_connection_rate']) == 256, f"{config_toml['max_connection_rate']=} != 256" + + machine.log("Testing dynamic max_connection_rate=100") + new_max_connection_rate = 100 + dynix_append("services.harmonia.settings.max_connection_rate", new_max_connection_rate) + + # Max connection rate should have changed, and workers should be the same as before. + config_toml = get_config_file() + print(f"checking connection rate, {use_daemon=}") + assert int(config_toml['max_connection_rate']) == new_max_connection_rate, f"{config_toml['max_connection_rate']=} != {new_max_connection_rate}" + print(f"checking workers, {use_daemon=}") + assert int(config_toml['workers']) == new_workers, f"{config_toml['workers']=} != {new_workers}" + + machine.log("Done with tests; stopping dynix-daemon") + machine.succeed("systemctl stop dynix-daemon.service") + + # And this should set everything back. machine.succeed("env PAGER= nixos-rebuild switch --log-format raw-with-logs --no-reexec -v --fallback") + machine.wait_for_unit("harmonia.service") + config_toml = get_config_file() + assert int(config_toml['max_connection_rate']) == 256, f'{config_toml["max_connection_rate"]=} != 256' + assert int(config_toml['workers']) == 4, f'{config_toml["workers"]=} != 4' -# Config should not have changed. -config_toml = get_config_file() -assert int(config_toml['workers']) == 4, f"{config_toml['workers']=} != 4" -assert int(config_toml['max_connection_rate']) == 256, f"{config_toml['max_connection_rate']=} != 256" +machine.start(allow_reboot=True) +machine.wait_for_unit("install-dynix.service") +try: + run_all_tests(machine, use_daemon=False) +except Exception as e: + machine.log(f"ERROR running CLI tests: {e}") -new_workers = 20 -dynix_append("services.harmonia.settings.workers", new_workers) -do_apply() +machine.reboot() -# Workers, but not max connection rate, should have changed. -config_toml = get_config_file() -assert int(config_toml['workers']) == new_workers, f"{config_toml['workers']=} != {new_workers}" -assert int(config_toml['max_connection_rate']) == 256, f"{config_toml['max_connection_rate']=} != 256" - -new_max_connection_rate = 100 -dynix_append("services.harmonia.settings.max_connection_rate", new_max_connection_rate) -do_apply() - -# Max connection rate should have changed, and workers should be the same as before. -config_toml = get_config_file() -assert int(config_toml['max_connection_rate']) == new_max_connection_rate, f"{config_toml['max_connection_rate']=} != {new_max_connection_rate}" -assert int(config_toml['workers']) == new_workers, f"{config_toml['workers']=} != {new_workers}" - -# And this should set everything back. -machine.succeed("env PAGER= nixos-rebuild switch --log-format raw-with-logs --no-reexec -v --fallback") -machine.wait_for_unit("harmonia.service") -config_toml = get_config_file() -assert int(config_toml['max_connection_rate']) == 256, f'{config_toml["max_connection_rate"]=} != 256' -assert int(config_toml['workers']) == 4, f'{config_toml["workers"]=} != 4' +machine.wait_for_unit("install-dynix.service") +try: + run_all_tests(machine, use_daemon=True) +except Exception as e: + machine.log(f"ERROR running DAEMON tests: {e}") + raise