more refactoring
This commit is contained in:
parent
d7dd7ca7be
commit
2970d15532
17 changed files with 432 additions and 340 deletions
|
@ -8,6 +8,7 @@ use axum::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// Extractor for the Accept header
|
||||||
pub struct ExtractAccept(pub HeaderValue);
|
pub struct ExtractAccept(pub HeaderValue);
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
|
@ -18,10 +19,50 @@ where
|
||||||
type Rejection = (StatusCode, &'static str);
|
type Rejection = (StatusCode, &'static str);
|
||||||
|
|
||||||
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
||||||
if let Some(user_agent) = parts.headers.get(ACCEPT) {
|
if let Some(accept) = parts.headers.get(ACCEPT) {
|
||||||
Ok(ExtractAccept(user_agent.clone()))
|
Ok(ExtractAccept(accept.clone()))
|
||||||
} else {
|
} else {
|
||||||
Err((StatusCode::BAD_REQUEST, "`User-Agent` header is missing"))
|
Err((StatusCode::NOT_ACCEPTABLE, "`Accept` header is missing"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Supported content types
|
||||||
|
pub(super) enum ResponseType {
|
||||||
|
HTML,
|
||||||
|
JSON,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses the Accept header and returns content type to return
|
||||||
|
pub(super) fn parse_accept(accept: &HeaderValue) -> ResponseType {
|
||||||
|
let bytes = accept.as_bytes();
|
||||||
|
if bytes.starts_with(b"application/json") {
|
||||||
|
return ResponseType::JSON;
|
||||||
|
}
|
||||||
|
ResponseType::HTML
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use axum::{
|
||||||
|
extract::FromRequest,
|
||||||
|
http::{HeaderValue, Request},
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_accept() {
|
||||||
|
let req = Request::builder()
|
||||||
|
.header(ACCEPT, "application/json; charset=utf-8")
|
||||||
|
.body(())
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let extract = ExtractAccept::from_request(req, &()).await.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
extract.0,
|
||||||
|
HeaderValue::from_static("application/json; charset=utf-8")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -5,10 +5,13 @@ use axum::{
|
||||||
};
|
};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// Result type for HTTP requests
|
||||||
pub type Result<T> = core::result::Result<T, AppError>;
|
pub type Result<T> = core::result::Result<T, AppError>;
|
||||||
|
|
||||||
|
/// Error type that can be returned from HTTP requests
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum AppError {
|
pub enum AppError {
|
||||||
|
/// Client error (e.g. bad input, not found)
|
||||||
#[error("client error: <{code}> {message}")]
|
#[error("client error: <{code}> {message}")]
|
||||||
Client {
|
Client {
|
||||||
status: StatusCode,
|
status: StatusCode,
|
||||||
|
@ -16,18 +19,23 @@ pub enum AppError {
|
||||||
message: String,
|
message: String,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/// Server error caused by Docker API errors
|
||||||
#[error("docker error: {0}")]
|
#[error("docker error: {0}")]
|
||||||
DockerError(#[from] bollard::errors::Error),
|
DockerError(#[from] bollard::errors::Error),
|
||||||
|
|
||||||
|
/// Server error caused by file operation errors
|
||||||
#[error("file error: {0}")]
|
#[error("file error: {0}")]
|
||||||
FileError(#[from] std::io::Error),
|
FileError(#[from] std::io::Error),
|
||||||
|
|
||||||
|
/// Server error caused by unexpected internal errors
|
||||||
#[error("unexpected internal error: {0}")]
|
#[error("unexpected internal error: {0}")]
|
||||||
Internal(#[from] anyhow::Error),
|
Internal(#[from] anyhow::Error),
|
||||||
|
|
||||||
|
/// Errors caused by JSON parsing errors (both incoming and outgoing)
|
||||||
#[error("incoming JSON format error: {0}")]
|
#[error("incoming JSON format error: {0}")]
|
||||||
JSONFormat(#[from] JsonRejection),
|
JSONFormat(#[from] JsonRejection),
|
||||||
|
|
||||||
|
/// Server error caused by template rendering errors
|
||||||
#[error("template error: {0}")]
|
#[error("template error: {0}")]
|
||||||
Template(#[from] askama::Error),
|
Template(#[from] askama::Error),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
use super::error::{AppError, ErrorInfo};
|
use super::{
|
||||||
|
accept::{parse_accept, ResponseType},
|
||||||
|
error::{AppError, ErrorInfo},
|
||||||
|
};
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
use askama_axum::IntoResponse;
|
use askama_axum::IntoResponse;
|
||||||
use axum::{
|
use axum::{
|
||||||
|
@ -32,14 +35,18 @@ pub fn reply<T: Template>(json: serde_json::Value, html: T) -> HandlerResponse {
|
||||||
Ok(response)
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles the multiple return type system of staxman.
|
||||||
|
/// Handlers might return an Error or a Response object, this will intercept
|
||||||
|
/// those and return them as either JSON or HTML depending on the Accept header.
|
||||||
pub async fn response_interceptor<B>(
|
pub async fn response_interceptor<B>(
|
||||||
request: Request<B>,
|
request: Request<B>,
|
||||||
next: Next<B>,
|
next: Next<B>,
|
||||||
) -> axum::response::Response {
|
) -> axum::response::Response {
|
||||||
let accept_header = request
|
let accept = request
|
||||||
.headers()
|
.headers()
|
||||||
.get(&ACCEPT)
|
.get(&ACCEPT)
|
||||||
.map(|value| value.as_ref().to_owned());
|
.map(|header| parse_accept(header))
|
||||||
|
.unwrap_or_else(|| ResponseType::HTML);
|
||||||
|
|
||||||
let mut response = next.run(request).await;
|
let mut response = next.run(request).await;
|
||||||
|
|
||||||
|
@ -49,9 +56,9 @@ pub async fn response_interceptor<B>(
|
||||||
message,
|
message,
|
||||||
}) = response.extensions_mut().remove::<ErrorInfo>()
|
}) = response.extensions_mut().remove::<ErrorInfo>()
|
||||||
{
|
{
|
||||||
match accept_header.as_deref() {
|
match accept {
|
||||||
Some(b"application/json") => {
|
ResponseType::JSON => {
|
||||||
return (status, Json(json!({"code": code, "message": message}))).into_response()
|
return (status, Json(json!({"code": code, "message": message}))).into_response();
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return (
|
return (
|
||||||
|
@ -61,15 +68,19 @@ pub async fn response_interceptor<B>(
|
||||||
message,
|
message,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.into_response()
|
.into_response();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(Response { html, json }) = response.extensions_mut().remove::<Response>() {
|
if let Some(Response { html, json }) = response.extensions_mut().remove::<Response>() {
|
||||||
match accept_header.as_deref() {
|
match accept {
|
||||||
Some(b"application/json") => return Json(json).into_response(),
|
ResponseType::JSON => {
|
||||||
_ => return Html(html).into_response(),
|
return Json(json).into_response();
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Html(html).into_response();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,301 +0,0 @@
|
||||||
use super::{
|
|
||||||
container::ContainerInfo, error::StackError, git::ThreadSafeRepository,
|
|
||||||
nix::parse_arion_compose,
|
|
||||||
};
|
|
||||||
use crate::http::error::{AppError, Result};
|
|
||||||
use axum::http::StatusCode;
|
|
||||||
use bollard::{container::ListContainersOptions, service::ContainerSummary, Docker};
|
|
||||||
use futures_util::future::try_join;
|
|
||||||
use serde::Serialize;
|
|
||||||
use std::{
|
|
||||||
collections::HashMap,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
process::Command,
|
|
||||||
};
|
|
||||||
use tempfile::tempdir;
|
|
||||||
use tokio::fs;
|
|
||||||
|
|
||||||
const COMPOSE_FILE: &str = "arion-compose.nix";
|
|
||||||
const PACKAGE_FILE: &str = "arion-pkgs.nix";
|
|
||||||
const PACKAGE_CONTENTS: &str = r#"import <nixpkgs> { system = "x86_64-linux"; }
|
|
||||||
"#;
|
|
||||||
|
|
||||||
async fn is_stack(dir: &Path) -> Result<bool> {
|
|
||||||
Ok(fs::try_exists(dir.join(COMPOSE_FILE)).await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_containers(docker: &Docker, stack_name: &str) -> Result<Vec<ContainerSummary>> {
|
|
||||||
Ok(docker
|
|
||||||
.list_containers(Some(ListContainersOptions {
|
|
||||||
all: true,
|
|
||||||
limit: None,
|
|
||||||
size: true,
|
|
||||||
filters: HashMap::from([(
|
|
||||||
"label".to_string(),
|
|
||||||
vec![format!("com.docker.compose.project={}", stack_name)],
|
|
||||||
)]),
|
|
||||||
}))
|
|
||||||
.await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_compose(base_dir: &Path, stack_name: &str) -> Result<String> {
|
|
||||||
let dir = base_dir.join(stack_name);
|
|
||||||
if !is_stack(&dir).await? {
|
|
||||||
return Err(StackError::NotFound.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let contents = tokio::fs::read_to_string(dir.join(COMPOSE_FILE)).await?;
|
|
||||||
Ok(contents)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
pub struct ServiceInfo {
|
|
||||||
name: String,
|
|
||||||
container: Option<String>,
|
|
||||||
running: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
pub struct StackInfo {
|
|
||||||
pub folder: String,
|
|
||||||
pub name: String,
|
|
||||||
pub active: bool,
|
|
||||||
pub services: Vec<ServiceInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StackInfo {
|
|
||||||
pub fn stats(&self) -> (usize, usize) {
|
|
||||||
let running = self.services.iter().filter(|s| s.running).count();
|
|
||||||
(running, self.services.len() - running)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
pub struct NodeInfo {
|
|
||||||
pub stacks: Vec<StackInfo>,
|
|
||||||
pub containers: Vec<ContainerInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_service(containers: &Vec<ContainerInfo>, stack_name: &str, service: &str) -> ServiceInfo {
|
|
||||||
let container = containers.iter().find(|cont| {
|
|
||||||
let labels = cont.labels.clone().unwrap_or_default();
|
|
||||||
labels.get("com.docker.compose.project") == Some(&stack_name.to_string())
|
|
||||||
&& labels.get("com.docker.compose.service") == Some(&service.to_string())
|
|
||||||
});
|
|
||||||
|
|
||||||
match container {
|
|
||||||
Some(info) => ServiceInfo {
|
|
||||||
name: service.to_string(),
|
|
||||||
container: Some(info.name.clone()),
|
|
||||||
running: info.running(),
|
|
||||||
},
|
|
||||||
_ => ServiceInfo {
|
|
||||||
name: service.to_string(),
|
|
||||||
container: None,
|
|
||||||
running: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list(base_dir: &Path, docker: &Docker) -> Result<NodeInfo> {
|
|
||||||
let containers: Vec<ContainerInfo> = docker
|
|
||||||
.list_containers(Some(ListContainersOptions::<String> {
|
|
||||||
all: true,
|
|
||||||
limit: None,
|
|
||||||
..Default::default()
|
|
||||||
}))
|
|
||||||
.await?
|
|
||||||
.iter()
|
|
||||||
.map(|c| c.clone().into())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut dirs = fs::read_dir(base_dir).await?;
|
|
||||||
let mut stacks = vec![];
|
|
||||||
while let Some(dir) = dirs.next_entry().await? {
|
|
||||||
let meta = dir.metadata().await?;
|
|
||||||
if !meta.is_dir() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if is_stack(&dir.path()).await? {
|
|
||||||
let folder = dir.file_name().to_string_lossy().to_string();
|
|
||||||
let compose_file = get_compose(base_dir, &folder).await?;
|
|
||||||
let info = parse_arion_compose(&compose_file)?;
|
|
||||||
let name = info.project;
|
|
||||||
// Check status by analyzing containers
|
|
||||||
let active = containers
|
|
||||||
.iter()
|
|
||||||
.any(|cont| cont.state == "running" && cont.stack() == Some(name.clone()));
|
|
||||||
let services = info
|
|
||||||
.services
|
|
||||||
.iter()
|
|
||||||
.map(|service| get_service(&containers, &name, service))
|
|
||||||
.collect();
|
|
||||||
stacks.push(StackInfo {
|
|
||||||
folder,
|
|
||||||
name,
|
|
||||||
active,
|
|
||||||
services,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(NodeInfo { stacks, containers })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn write_compose(base_dir: &Path, stack_name: &str, contents: &str) -> Result<()> {
|
|
||||||
let dir = base_dir.join(stack_name);
|
|
||||||
if !is_stack(&dir).await? {
|
|
||||||
return Err(StackError::NotFound.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(fs::write(dir.join(COMPOSE_FILE), contents).await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn commit_compose(
|
|
||||||
repository: ThreadSafeRepository,
|
|
||||||
stack_name: &str,
|
|
||||||
message: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
let compose_path = format!("{}/{}", stack_name, COMPOSE_FILE);
|
|
||||||
repository.commit_files(&[&PathBuf::from(compose_path)], message)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn create_new(
|
|
||||||
repository: ThreadSafeRepository,
|
|
||||||
stack_name: &str,
|
|
||||||
source: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
// Calculate stack directory and create it
|
|
||||||
let stack_path = repository.path.join(stack_name);
|
|
||||||
fs::create_dir_all(&stack_path).await?;
|
|
||||||
|
|
||||||
// Create package file and compose file
|
|
||||||
try_join(
|
|
||||||
fs::write(stack_path.join(PACKAGE_FILE), PACKAGE_CONTENTS),
|
|
||||||
fs::write(stack_path.join(COMPOSE_FILE), source),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Commit everything
|
|
||||||
repository.commit_files(
|
|
||||||
&[
|
|
||||||
&PathBuf::from(format!("{}/{}", stack_name, PACKAGE_FILE)),
|
|
||||||
&PathBuf::from(format!("{}/{}", stack_name, COMPOSE_FILE)),
|
|
||||||
],
|
|
||||||
format!("Created stack {}", stack_name).as_str(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn remove(
|
|
||||||
base_dir: &Path,
|
|
||||||
arion_bin: &Path,
|
|
||||||
repository: ThreadSafeRepository,
|
|
||||||
stack_name: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
// Remove all containers and resources
|
|
||||||
command(base_dir, stack_name, arion_bin, StackCommand::Down).await?;
|
|
||||||
|
|
||||||
// Remove from repository
|
|
||||||
repository.remove_folder(
|
|
||||||
&PathBuf::from(stack_name),
|
|
||||||
format!("Removed stack {}", stack_name).as_str(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Remove from disk
|
|
||||||
fs::remove_dir_all(repository.path.join(stack_name)).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn check_compose(arion_bin: &Path, source: &str) -> Result<String> {
|
|
||||||
// Check that it's a valid nix tree
|
|
||||||
let info = parse_arion_compose(source).map_err(|err| AppError::Client {
|
|
||||||
status: StatusCode::NOT_ACCEPTABLE,
|
|
||||||
code: "failed-nix-parse",
|
|
||||||
message: format!("Parse error: {}", err),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Create a temporary stack and check that it generates a YAML tree
|
|
||||||
let dir = tempdir()?;
|
|
||||||
let path = dir.path();
|
|
||||||
|
|
||||||
// Create package file and compose file
|
|
||||||
try_join(
|
|
||||||
fs::write(path.join(PACKAGE_FILE), PACKAGE_CONTENTS),
|
|
||||||
fs::write(path.join(COMPOSE_FILE), source),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let cmd = arion(arion_bin, path, StackCommand::Test)?;
|
|
||||||
|
|
||||||
dir.close()?;
|
|
||||||
|
|
||||||
if let CommandStatus::Failure(_, err) = cmd {
|
|
||||||
Err(AppError::Client {
|
|
||||||
status: StatusCode::NOT_ACCEPTABLE,
|
|
||||||
code: "failed-arion-check",
|
|
||||||
message: format!("Arion {}", err),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Ok(info.project)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum CommandStatus {
|
|
||||||
Success(String, String),
|
|
||||||
Failure(String, String),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn arion(arion_bin: &Path, path: &Path, action: StackCommand) -> Result<CommandStatus> {
|
|
||||||
let output = Command::new(arion_bin)
|
|
||||||
.args(action.command())
|
|
||||||
.current_dir(path)
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
// Convert stdout and stderr to String
|
|
||||||
let stdout_str = String::from_utf8_lossy(&output.stdout).to_string();
|
|
||||||
let stderr_str = String::from_utf8_lossy(&output.stderr).to_string();
|
|
||||||
|
|
||||||
if output.status.success() {
|
|
||||||
Ok(CommandStatus::Success(stdout_str, stderr_str))
|
|
||||||
} else {
|
|
||||||
Ok(CommandStatus::Failure(stdout_str, stderr_str))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn command(
|
|
||||||
base_dir: &Path,
|
|
||||||
stack_name: &str,
|
|
||||||
arion_bin: &Path,
|
|
||||||
action: StackCommand,
|
|
||||||
) -> Result<CommandStatus> {
|
|
||||||
let dir = base_dir.join(stack_name);
|
|
||||||
if !is_stack(&dir).await? {
|
|
||||||
return Err(StackError::NotFound.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
arion(arion_bin, &dir, action)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum StackCommand {
|
|
||||||
Down,
|
|
||||||
Start,
|
|
||||||
Stop,
|
|
||||||
Restart,
|
|
||||||
Test,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StackCommand {
|
|
||||||
fn command(&self) -> &[&str] {
|
|
||||||
match self {
|
|
||||||
StackCommand::Down => &["down"],
|
|
||||||
StackCommand::Start => &["up", "-d"],
|
|
||||||
StackCommand::Stop => &["stop"],
|
|
||||||
StackCommand::Restart => &["restart"],
|
|
||||||
StackCommand::Test => &["config"],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
68
src/node/stack/arion.rs
Normal file
68
src/node/stack/arion.rs
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use std::path::Path;
|
||||||
|
use tokio::process::Command;
|
||||||
|
|
||||||
|
use crate::node::error::StackError;
|
||||||
|
|
||||||
|
use super::utils::is_stack;
|
||||||
|
|
||||||
|
pub enum CommandStatus {
|
||||||
|
Success(String, String),
|
||||||
|
Failure(String, String),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) async fn run(
|
||||||
|
arion_bin: &Path,
|
||||||
|
path: &Path,
|
||||||
|
action: StackCommand,
|
||||||
|
) -> Result<CommandStatus> {
|
||||||
|
let output = Command::new(arion_bin)
|
||||||
|
.args(action.command())
|
||||||
|
.current_dir(path)
|
||||||
|
.output()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Convert stdout and stderr to String
|
||||||
|
let stdout_str = String::from_utf8_lossy(&output.stdout).to_string();
|
||||||
|
let stderr_str = String::from_utf8_lossy(&output.stderr).to_string();
|
||||||
|
|
||||||
|
if output.status.success() {
|
||||||
|
Ok(CommandStatus::Success(stdout_str, stderr_str))
|
||||||
|
} else {
|
||||||
|
Ok(CommandStatus::Failure(stdout_str, stderr_str))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn command(
|
||||||
|
base_dir: &Path,
|
||||||
|
stack_name: &str,
|
||||||
|
arion_bin: &Path,
|
||||||
|
action: StackCommand,
|
||||||
|
) -> Result<CommandStatus> {
|
||||||
|
let dir = base_dir.join(stack_name);
|
||||||
|
if !is_stack(&dir).await? {
|
||||||
|
return Err(StackError::NotFound.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
run(arion_bin, &dir, action).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum StackCommand {
|
||||||
|
Down,
|
||||||
|
Start,
|
||||||
|
Stop,
|
||||||
|
Restart,
|
||||||
|
Test,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StackCommand {
|
||||||
|
fn command(&self) -> &[&str] {
|
||||||
|
match self {
|
||||||
|
StackCommand::Down => &["down"],
|
||||||
|
StackCommand::Start => &["up", "-d"],
|
||||||
|
StackCommand::Stop => &["stop"],
|
||||||
|
StackCommand::Restart => &["restart"],
|
||||||
|
StackCommand::Test => &["config"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
72
src/node/stack/compose.rs
Normal file
72
src/node/stack/compose.rs
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use axum::http::StatusCode;
|
||||||
|
use futures_util::future::try_join;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tempfile::tempdir;
|
||||||
|
use tokio::fs;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
http::error::AppError,
|
||||||
|
node::{error::StackError, git::ThreadSafeRepository, nix::parse_arion_compose},
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{arion, utils, COMPOSE_FILE, PACKAGE_CONTENTS, PACKAGE_FILE};
|
||||||
|
|
||||||
|
pub async fn get(base_dir: &Path, stack_name: &str) -> Result<String> {
|
||||||
|
let dir = base_dir.join(stack_name);
|
||||||
|
if !utils::is_stack(&dir).await? {
|
||||||
|
return Err(StackError::NotFound.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
let contents = tokio::fs::read_to_string(dir.join(COMPOSE_FILE)).await?;
|
||||||
|
Ok(contents)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn write(base_dir: &Path, stack_name: &str, contents: &str) -> Result<()> {
|
||||||
|
let dir = base_dir.join(stack_name);
|
||||||
|
if !utils::is_stack(&dir).await? {
|
||||||
|
return Err(StackError::NotFound.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(fs::write(dir.join(COMPOSE_FILE), contents).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn commit(repository: ThreadSafeRepository, stack_name: &str, message: &str) -> Result<()> {
|
||||||
|
let compose_path = format!("{}/{}", stack_name, COMPOSE_FILE);
|
||||||
|
repository.commit_files(&[&PathBuf::from(compose_path)], message)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn check(arion_bin: &Path, source: &str) -> crate::http::error::Result<String> {
|
||||||
|
// Check that it's a valid nix tree
|
||||||
|
let info = parse_arion_compose(source).map_err(|err| AppError::Client {
|
||||||
|
status: StatusCode::NOT_ACCEPTABLE,
|
||||||
|
code: "failed-nix-parse",
|
||||||
|
message: format!("Parse error: {}", err),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Create a temporary stack and check that it generates a YAML tree
|
||||||
|
let dir = tempdir()?;
|
||||||
|
let path = dir.path();
|
||||||
|
|
||||||
|
// Create package file and compose file
|
||||||
|
try_join(
|
||||||
|
fs::write(path.join(PACKAGE_FILE), PACKAGE_CONTENTS),
|
||||||
|
fs::write(path.join(COMPOSE_FILE), source),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let cmd = arion::run(arion_bin, path, arion::StackCommand::Test).await?;
|
||||||
|
|
||||||
|
dir.close()?;
|
||||||
|
|
||||||
|
if let arion::CommandStatus::Failure(_, err) = cmd {
|
||||||
|
Err(AppError::Client {
|
||||||
|
status: StatusCode::NOT_ACCEPTABLE,
|
||||||
|
code: "failed-arion-check",
|
||||||
|
message: format!("Arion {}", err),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(info.project)
|
||||||
|
}
|
||||||
|
}
|
95
src/node/stack/list.rs
Normal file
95
src/node/stack/list.rs
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use bollard::{container::ListContainersOptions, service::ContainerSummary, Docker};
|
||||||
|
use std::{collections::HashMap, path::Path};
|
||||||
|
use tokio::fs;
|
||||||
|
|
||||||
|
use crate::node::{container::ContainerInfo, nix::parse_arion_compose};
|
||||||
|
|
||||||
|
use super::{compose, utils, NodeInfo, ServiceInfo, StackInfo};
|
||||||
|
|
||||||
|
impl StackInfo {
|
||||||
|
pub fn stats(&self) -> (usize, usize) {
|
||||||
|
let running = self.services.iter().filter(|s| s.running).count();
|
||||||
|
(running, self.services.len() - running)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_service(containers: &Vec<ContainerInfo>, stack_name: &str, service: &str) -> ServiceInfo {
|
||||||
|
let container = containers.iter().find(|cont| {
|
||||||
|
let labels = cont.labels.clone().unwrap_or_default();
|
||||||
|
labels.get("com.docker.compose.project") == Some(&stack_name.to_string())
|
||||||
|
&& labels.get("com.docker.compose.service") == Some(&service.to_string())
|
||||||
|
});
|
||||||
|
|
||||||
|
match container {
|
||||||
|
Some(info) => ServiceInfo {
|
||||||
|
name: service.to_string(),
|
||||||
|
container: Some(info.name.clone()),
|
||||||
|
running: info.running(),
|
||||||
|
},
|
||||||
|
_ => ServiceInfo {
|
||||||
|
name: service.to_string(),
|
||||||
|
container: None,
|
||||||
|
running: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn all(base_dir: &Path, docker: &Docker) -> Result<NodeInfo> {
|
||||||
|
let containers: Vec<ContainerInfo> = docker
|
||||||
|
.list_containers(Some(ListContainersOptions::<String> {
|
||||||
|
all: true,
|
||||||
|
limit: None,
|
||||||
|
..Default::default()
|
||||||
|
}))
|
||||||
|
.await?
|
||||||
|
.iter()
|
||||||
|
.map(|c| c.clone().into())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut dirs = fs::read_dir(base_dir).await?;
|
||||||
|
let mut stacks = vec![];
|
||||||
|
while let Some(dir) = dirs.next_entry().await? {
|
||||||
|
let meta = dir.metadata().await?;
|
||||||
|
if !meta.is_dir() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if utils::is_stack(&dir.path()).await? {
|
||||||
|
let folder = dir.file_name().to_string_lossy().to_string();
|
||||||
|
let compose_file = compose::get(base_dir, &folder).await?;
|
||||||
|
let info = parse_arion_compose(&compose_file)?;
|
||||||
|
let name = info.project;
|
||||||
|
// Check status by analyzing containers
|
||||||
|
let active = containers
|
||||||
|
.iter()
|
||||||
|
.any(|cont| cont.state == "running" && cont.stack() == Some(name.clone()));
|
||||||
|
let services = info
|
||||||
|
.services
|
||||||
|
.iter()
|
||||||
|
.map(|service| get_service(&containers, &name, service))
|
||||||
|
.collect();
|
||||||
|
stacks.push(StackInfo {
|
||||||
|
folder,
|
||||||
|
name,
|
||||||
|
active,
|
||||||
|
services,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(NodeInfo { stacks, containers })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn containers(docker: &Docker, stack_name: &str) -> Result<Vec<ContainerSummary>> {
|
||||||
|
Ok(docker
|
||||||
|
.list_containers(Some(ListContainersOptions {
|
||||||
|
all: true,
|
||||||
|
limit: None,
|
||||||
|
size: true,
|
||||||
|
filters: HashMap::from([(
|
||||||
|
"label".to_string(),
|
||||||
|
vec![format!("com.docker.compose.project={}", stack_name)],
|
||||||
|
)]),
|
||||||
|
}))
|
||||||
|
.await?)
|
||||||
|
}
|
36
src/node/stack/mod.rs
Normal file
36
src/node/stack/mod.rs
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use super::container::ContainerInfo;
|
||||||
|
|
||||||
|
pub mod arion;
|
||||||
|
pub mod compose;
|
||||||
|
pub mod list;
|
||||||
|
pub mod operation;
|
||||||
|
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct ServiceInfo {
|
||||||
|
name: String,
|
||||||
|
container: Option<String>,
|
||||||
|
running: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct NodeInfo {
|
||||||
|
pub stacks: Vec<StackInfo>,
|
||||||
|
pub containers: Vec<ContainerInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct StackInfo {
|
||||||
|
pub folder: String,
|
||||||
|
pub name: String,
|
||||||
|
pub active: bool,
|
||||||
|
pub services: Vec<ServiceInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
const COMPOSE_FILE: &str = "arion-compose.nix";
|
||||||
|
const PACKAGE_FILE: &str = "arion-pkgs.nix";
|
||||||
|
const PACKAGE_CONTENTS: &str = r#"import <nixpkgs> { system = "x86_64-linux"; }
|
||||||
|
"#;
|
57
src/node/stack/operation.rs
Normal file
57
src/node/stack/operation.rs
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use futures_util::future::try_join;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tokio::fs;
|
||||||
|
|
||||||
|
use crate::node::git::ThreadSafeRepository;
|
||||||
|
|
||||||
|
use super::{arion, COMPOSE_FILE, PACKAGE_CONTENTS, PACKAGE_FILE};
|
||||||
|
|
||||||
|
pub async fn create(
|
||||||
|
repository: ThreadSafeRepository,
|
||||||
|
stack_name: &str,
|
||||||
|
source: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
// Calculate stack directory and create it
|
||||||
|
let stack_path = repository.path.join(stack_name);
|
||||||
|
fs::create_dir_all(&stack_path).await?;
|
||||||
|
|
||||||
|
// Create package file and compose file
|
||||||
|
try_join(
|
||||||
|
fs::write(stack_path.join(PACKAGE_FILE), PACKAGE_CONTENTS),
|
||||||
|
fs::write(stack_path.join(COMPOSE_FILE), source),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Commit everything
|
||||||
|
repository.commit_files(
|
||||||
|
&[
|
||||||
|
&PathBuf::from(format!("{}/{}", stack_name, PACKAGE_FILE)),
|
||||||
|
&PathBuf::from(format!("{}/{}", stack_name, COMPOSE_FILE)),
|
||||||
|
],
|
||||||
|
format!("Created stack {}", stack_name).as_str(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn remove(
|
||||||
|
base_dir: &Path,
|
||||||
|
arion_bin: &Path,
|
||||||
|
repository: ThreadSafeRepository,
|
||||||
|
stack_name: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
// Remove all containers and resources
|
||||||
|
arion::command(base_dir, stack_name, arion_bin, arion::StackCommand::Down).await?;
|
||||||
|
|
||||||
|
// Remove from repository
|
||||||
|
repository.remove_folder(
|
||||||
|
&PathBuf::from(stack_name),
|
||||||
|
format!("Removed stack {}", stack_name).as_str(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Remove from disk
|
||||||
|
fs::remove_dir_all(repository.path.join(stack_name)).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
9
src/node/stack/utils.rs
Normal file
9
src/node/stack/utils.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use std::path::Path;
|
||||||
|
use tokio::fs;
|
||||||
|
|
||||||
|
use super::COMPOSE_FILE;
|
||||||
|
|
||||||
|
pub(super) async fn is_stack(dir: &Path) -> Result<bool> {
|
||||||
|
Ok(fs::try_exists(dir.join(COMPOSE_FILE)).await?)
|
||||||
|
}
|
|
@ -27,7 +27,7 @@ struct HomeTemplate {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn home(State(state): State<AppState>) -> HandlerResponse {
|
async fn home(State(state): State<AppState>) -> HandlerResponse {
|
||||||
let info = list(&state.stack_dir, &state.docker)
|
let info = list::all(&state.stack_dir, &state.docker)
|
||||||
.await
|
.await
|
||||||
.map_err(AppError::from)?;
|
.map_err(AppError::from)?;
|
||||||
let system = system_info();
|
let system = system_info();
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use axum::{extract::State, http::StatusCode};
|
use axum::{extract::State, http::StatusCode};
|
||||||
|
|
||||||
use crate::{http::error::AppError, node::stack::check_compose, AppState};
|
use crate::{http::error::AppError, node::stack::compose, AppState};
|
||||||
|
|
||||||
pub(super) async fn check_stack_file(
|
pub(super) async fn check_stack_file(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
body: String,
|
body: String,
|
||||||
) -> Result<StatusCode, AppError> {
|
) -> Result<StatusCode, AppError> {
|
||||||
check_compose(&state.arion_bin, &body).await?;
|
compose::check(&state.arion_bin, &body).await?;
|
||||||
Ok(StatusCode::NO_CONTENT)
|
Ok(StatusCode::NO_CONTENT)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use serde::Deserialize;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
http::error::AppError,
|
http::error::AppError,
|
||||||
node::stack::{check_compose, create_new},
|
node::stack::{compose, operation},
|
||||||
AppState,
|
AppState,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -23,9 +23,9 @@ pub(super) async fn create_stack(
|
||||||
Form(form): Form<CreateStackForm>,
|
Form(form): Form<CreateStackForm>,
|
||||||
) -> Result<Redirect, AppError> {
|
) -> Result<Redirect, AppError> {
|
||||||
// Make sure body is is ok
|
// Make sure body is is ok
|
||||||
let name = check_compose(&state.arion_bin, &form.source).await?;
|
let name = compose::check(&state.arion_bin, &form.source).await?;
|
||||||
|
|
||||||
create_new(state.repository, &name, &form.source).await?;
|
operation::create(state.repository, &name, &form.source).await?;
|
||||||
|
|
||||||
Ok(Redirect::to(format!("/stack/{}/", name).as_str()))
|
Ok(Redirect::to(format!("/stack/{}/", name).as_str()))
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use axum::{
|
||||||
response::Redirect,
|
response::Redirect,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{http::error::AppError, node::stack::remove, AppState};
|
use crate::{http::error::AppError, node::stack::operation, AppState};
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(path = "stack/delete-one.html")]
|
#[template(path = "stack/delete-one.html")]
|
||||||
|
@ -21,7 +21,7 @@ pub(super) async fn delete_stack(
|
||||||
Path(stack_name): Path<String>,
|
Path(stack_name): Path<String>,
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
) -> Result<Redirect, AppError> {
|
) -> Result<Redirect, AppError> {
|
||||||
remove(
|
operation::remove(
|
||||||
&state.stack_dir,
|
&state.stack_dir,
|
||||||
&state.arion_bin,
|
&state.arion_bin,
|
||||||
state.repository,
|
state.repository,
|
||||||
|
|
|
@ -6,11 +6,7 @@ use axum::{
|
||||||
};
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
use crate::{
|
use crate::{http::error::AppError, node::stack::arion, node::stack::compose, AppState};
|
||||||
http::error::AppError,
|
|
||||||
node::stack::{check_compose, command, commit_compose, write_compose, StackCommand},
|
|
||||||
AppState,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub(super) struct EditStackForm {
|
pub(super) struct EditStackForm {
|
||||||
|
@ -41,20 +37,20 @@ pub(super) async fn edit_stack(
|
||||||
let source = form.source.replace("\r\n", "\n");
|
let source = form.source.replace("\r\n", "\n");
|
||||||
|
|
||||||
// Make sure file is ok
|
// Make sure file is ok
|
||||||
check_compose(&state.arion_bin, &source).await?;
|
compose::check(&state.arion_bin, &source).await?;
|
||||||
|
|
||||||
// Write compose file
|
// Write compose file
|
||||||
write_compose(&state.stack_dir, &stack_name, &source).await?;
|
compose::write(&state.stack_dir, &stack_name, &source).await?;
|
||||||
|
|
||||||
// Git commit
|
// Git commit
|
||||||
commit_compose(state.repository, &stack_name, &commit_message)?;
|
compose::commit(state.repository, &stack_name, &commit_message)?;
|
||||||
|
|
||||||
// Update stack
|
// Update stack
|
||||||
command(
|
arion::command(
|
||||||
&state.stack_dir,
|
&state.stack_dir,
|
||||||
&stack_name,
|
&stack_name,
|
||||||
&state.arion_bin,
|
&state.arion_bin,
|
||||||
StackCommand::Start,
|
arion::StackCommand::Start,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,11 @@ use axum::{
|
||||||
Router,
|
Router,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{http::error::AppError, node::stack::command, node::stack::StackCommand, AppState};
|
use crate::{
|
||||||
|
http::error::AppError,
|
||||||
|
node::stack::arion::{command, StackCommand},
|
||||||
|
AppState,
|
||||||
|
};
|
||||||
|
|
||||||
use axum::routing::get;
|
use axum::routing::get;
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,7 @@ use serde_json::json;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
http::response::{reply, HandlerResponse},
|
http::response::{reply, HandlerResponse},
|
||||||
node::{
|
node::{container::ContainerInfo, nix::parse_arion_compose, stack},
|
||||||
container::ContainerInfo,
|
|
||||||
nix::parse_arion_compose,
|
|
||||||
stack::{get_compose, get_containers},
|
|
||||||
},
|
|
||||||
AppState,
|
AppState,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,9 +21,9 @@ pub(super) async fn get_one(
|
||||||
Path(stack_name): Path<String>,
|
Path(stack_name): Path<String>,
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
) -> HandlerResponse {
|
) -> HandlerResponse {
|
||||||
let file_contents = get_compose(&state.stack_dir, &stack_name).await?;
|
let file_contents = stack::compose::get(&state.stack_dir, &stack_name).await?;
|
||||||
let info = parse_arion_compose(&file_contents)?;
|
let info = parse_arion_compose(&file_contents)?;
|
||||||
let containers = get_containers(&state.docker, &info.project).await?;
|
let containers = stack::list::containers(&state.docker, &info.project).await?;
|
||||||
|
|
||||||
reply(
|
reply(
|
||||||
json!({
|
json!({
|
||||||
|
|
Reference in a new issue