commit 01
This commit is contained in:
commit
7a65b34ef4
12 changed files with 4333 additions and 0 deletions
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
/target
|
||||
/run
|
||||
/.idea
|
27
Caddyfile
Normal file
27
Caddyfile
Normal file
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
auto_https off
|
||||
|
||||
admin unix//test
|
||||
}
|
||||
|
||||
:80 {
|
||||
handle_path /_sscdc/* {
|
||||
reverse_proxy unix//some/socket
|
||||
}
|
||||
}
|
||||
|
||||
http://moritzruth.de {
|
||||
handle_path /_sscdc/* {
|
||||
reverse_proxy unix//some/socket
|
||||
}
|
||||
|
||||
handle {
|
||||
redir /kontakt /contact permanent
|
||||
header /assets/* Cache-Control "public, max-age=604800, immutable"
|
||||
root * /home/moritz/dev/moritzruth/sscdc/run/sites/moritzruth.de
|
||||
try_files {path} {path}/index.html /index.html
|
||||
file_server {
|
||||
precompressed br gzip
|
||||
}
|
||||
}
|
||||
}
|
3742
Cargo.lock
generated
Normal file
3742
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
25
Cargo.toml
Normal file
25
Cargo.toml
Normal file
|
@ -0,0 +1,25 @@
|
|||
[package]
|
||||
name = "sscdc"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
reqwest = { version = "0.12.12", features = ["json", "stream"] }
|
||||
toml = "0.8.19"
|
||||
serde = "1.0.217"
|
||||
validator = { version = "0.19.0", features = ["derive"] }
|
||||
figment = { version = "0.10.19", features = ["toml"] }
|
||||
color-eyre = "0.6.3"
|
||||
serde_regex = "1.1.0"
|
||||
regex = "1.11.1"
|
||||
tokio = { version = "1.43.0", default-features = false, features = ["macros", "process", "fs", "rt"] }
|
||||
log = "0.4.26"
|
||||
env_logger = "0.11.6"
|
||||
serde_json = "1.0.139"
|
||||
camino = "1.1.9"
|
||||
tide = "0.17.0-beta.1"
|
||||
zip = { version = "2.2.3", default-features = false, features = ["deflate"] }
|
||||
futures = "0.3.31"
|
||||
hyper = { version = "1.6.0", features = ["client", "http1"] }
|
||||
hyper-util = { version = "0.1.10", features = ["client", "tokio"] }
|
||||
http-body-util = "0.1.2"
|
3
README.md
Normal file
3
README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# SSCDC
|
||||
|
||||
> Static Site Continuous Deployment Companion
|
7
run/config.toml
Normal file
7
run/config.toml
Normal file
|
@ -0,0 +1,7 @@
|
|||
port = 8000
|
||||
sites_directory = "./sites"
|
||||
sockets_directory = "./sockets"
|
||||
|
||||
[scopes.moritzruth_de]
|
||||
domain_pattern = "moritzruth\\.de"
|
||||
secret = "Q9hqq3hq^^MY2^JnD23isbqFm3oXAqiHArtp`F@dNgJ4fYs`LqEzXoK%&5W%Yi9#cjdY3DA%UYc%kxCMqAwJ%K^LxsWCFH~nvyzp"
|
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
[toolchain]
|
||||
channel = "nightly"
|
24
src/api.rs
Normal file
24
src/api.rs
Normal file
|
@ -0,0 +1,24 @@
|
|||
use camino::Utf8Path;
|
||||
use color_eyre::Result;
|
||||
use serde::Deserialize;
|
||||
use std::fs;
|
||||
use std::os::unix::net::UnixListener;
|
||||
use tide::Request;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct NotifyBody {
|
||||
pub site_archive_url: String,
|
||||
}
|
||||
|
||||
pub async fn start_api_server(socket_path: &Utf8Path) -> Result<()> {
|
||||
fs::remove_file(&socket_path)?;
|
||||
let listener = UnixListener::bind(&socket_path)?;
|
||||
let mut server = tide::new();
|
||||
server
|
||||
.at("/notify")
|
||||
.post(|req: Request<()>| async { Ok("hey") });
|
||||
|
||||
server.bind(listener).await?;
|
||||
Ok(())
|
||||
}
|
186
src/caddy.rs
Normal file
186
src/caddy.rs
Normal file
|
@ -0,0 +1,186 @@
|
|||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
use color_eyre::Result;
|
||||
use color_eyre::eyre::{OptionExt, eyre};
|
||||
use http_body_util::BodyExt;
|
||||
use hyper::{Request, StatusCode};
|
||||
use hyper_util::rt::TokioIo;
|
||||
use serde_json::json;
|
||||
use std::process::Stdio;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::net::UnixStream;
|
||||
use tokio::process::Command;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CaddyController {
|
||||
caddy_admin_socket_path: Utf8PathBuf,
|
||||
api_socket_path: Utf8PathBuf,
|
||||
}
|
||||
|
||||
impl CaddyController {
|
||||
pub async fn upsert_site_configuration(
|
||||
&self,
|
||||
domain: &String,
|
||||
content_path: &Utf8Path,
|
||||
) -> Result<()> {
|
||||
let configuration_object = json!({
|
||||
"@id": domain,
|
||||
"match": [{ "host": [domain] }],
|
||||
"handle": [
|
||||
{
|
||||
"group": "1",
|
||||
"match": [{ "path": ["/sscdc/*"] }],
|
||||
"handle": [
|
||||
{
|
||||
"handler": "rewrite",
|
||||
"strip_path_prefix": "/_sscdc"
|
||||
},
|
||||
{
|
||||
"handler": "reverse_proxy",
|
||||
"upstreams": [{ "dial": format!("unix/{}", &self.api_socket_path) }],
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "1",
|
||||
"handle": [
|
||||
{ "handler": "vars", "root": content_path.to_string() },
|
||||
{
|
||||
"handler": "file_server",
|
||||
"precompressed": {
|
||||
"br": {},
|
||||
"gzip": {}
|
||||
},
|
||||
"precompressed_order": ["br", "gzip"]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"terminal": true
|
||||
});
|
||||
|
||||
let stream = UnixStream::connect(&self.caddy_admin_socket_path).await?;
|
||||
let (mut sender, connection) =
|
||||
hyper::client::conn::http1::handshake(TokioIo::new(stream)).await?;
|
||||
|
||||
tokio::task::spawn(async move { connection.await.unwrap() });
|
||||
|
||||
let response = sender
|
||||
.send_request(
|
||||
Request::builder()
|
||||
.uri(&format!("/id/{}", domain))
|
||||
.body(configuration_object.to_string().boxed())?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
match response.status() {
|
||||
StatusCode::OK => {}
|
||||
StatusCode::NOT_FOUND => {
|
||||
// The site does not yet exist.
|
||||
let response = sender
|
||||
.send_request(
|
||||
Request::builder()
|
||||
.uri("/config/apps/http/servers/srv0/routes")
|
||||
.body(configuration_object.to_string().boxed())?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let status = response.status();
|
||||
|
||||
if !status.is_success() {
|
||||
return Err(eyre!(
|
||||
"The configuration update request to Caddy failed with status code {}",
|
||||
status
|
||||
));
|
||||
}
|
||||
}
|
||||
c => {
|
||||
return Err(eyre!(
|
||||
"The configuration update request to Caddy failed with status code {}",
|
||||
c
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_initial_caddy_configuration_object(
|
||||
admin_api_socket_path: &Utf8Path,
|
||||
api_socket_path: &Utf8Path,
|
||||
) -> serde_json::Value {
|
||||
json!({
|
||||
"admin": {
|
||||
"listen": format!("unix/{}", admin_api_socket_path),
|
||||
"config": { "persist": false }
|
||||
},
|
||||
"logging": {
|
||||
"sink": {
|
||||
"writer": { "output": "discard" }
|
||||
},
|
||||
"logs": {
|
||||
"": {
|
||||
"writer": { "output": "discard" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"storage": {
|
||||
"module": "file_system",
|
||||
"root": "/tmp/sscdc-caddy"
|
||||
},
|
||||
"apps": {
|
||||
"http": {
|
||||
"servers": {
|
||||
"srv0": {
|
||||
"listen": [":80"],
|
||||
"routes": [{
|
||||
"match": [{ "path": ["/sscdc/*"] }],
|
||||
"handle": [
|
||||
{
|
||||
"handler": "rewrite",
|
||||
"strip_path_prefix": "/_sscdc"
|
||||
},
|
||||
{
|
||||
"handler": "reverse_proxy",
|
||||
"upstreams": [{ "dial": format!("unix/{}", api_socket_path) }],
|
||||
}
|
||||
]
|
||||
}],
|
||||
"automatic_https": { "disable": true }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn start_caddy(
|
||||
api_socket_path: &Utf8Path,
|
||||
sockets_directory_path: &Utf8Path,
|
||||
) -> Result<CaddyController> {
|
||||
let caddy_admin_socket_path = sockets_directory_path.join("caddy-admin-api.sock");
|
||||
let process = Command::new("caddy")
|
||||
.args(&["run", "--config", "-"])
|
||||
.stdin(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
let initial_configuration_object =
|
||||
get_initial_caddy_configuration_object(&caddy_admin_socket_path, api_socket_path);
|
||||
|
||||
let initial_configuration_string = initial_configuration_object.to_string();
|
||||
log::debug!(
|
||||
"Initial caddy configuration: {}",
|
||||
initial_configuration_string
|
||||
);
|
||||
|
||||
process
|
||||
.stdin
|
||||
.ok_or_eyre("The caddy process stdin is not open.")?
|
||||
.write_all(initial_configuration_string.as_ref())
|
||||
.await?;
|
||||
|
||||
Ok(CaddyController {
|
||||
caddy_admin_socket_path,
|
||||
api_socket_path: api_socket_path.to_path_buf(),
|
||||
})
|
||||
}
|
34
src/config.rs
Normal file
34
src/config.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use color_eyre::Result;
|
||||
use figment::providers::{Format, Toml};
|
||||
use figment::Figment;
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
use color_eyre::eyre::WrapErr;
|
||||
use regex::Regex;
|
||||
use validator::Validate;
|
||||
|
||||
#[derive(Deserialize, Debug, Validate)]
|
||||
pub struct Config {
|
||||
pub port: u16,
|
||||
pub sites_directory: String,
|
||||
pub sockets_directory: String,
|
||||
#[validate(nested)]
|
||||
pub scopes: HashMap<String, ConfigScope>
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Validate)]
|
||||
pub struct ConfigScope {
|
||||
#[serde(with = "serde_regex")]
|
||||
pub domain_pattern: Regex,
|
||||
#[validate(length(equal = 100))]
|
||||
pub secret: String
|
||||
}
|
||||
|
||||
pub fn load_config() -> Result<Config> {
|
||||
let config: Config = Figment::new()
|
||||
.merge(Toml::file("./config.toml"))
|
||||
.extract()
|
||||
.wrap_err("Failed to load the configuration.")?;
|
||||
|
||||
Ok(config)
|
||||
}
|
47
src/main.rs
Normal file
47
src/main.rs
Normal file
|
@ -0,0 +1,47 @@
|
|||
#![feature(never_type)]
|
||||
|
||||
use crate::api::start_api_server;
|
||||
use crate::caddy::start_caddy;
|
||||
use crate::config::load_config;
|
||||
use crate::sites::start_sites_worker;
|
||||
use camino::Utf8Path;
|
||||
use color_eyre::Result;
|
||||
use color_eyre::eyre::WrapErr;
|
||||
use log::LevelFilter;
|
||||
use std::fs;
|
||||
|
||||
mod api;
|
||||
mod caddy;
|
||||
mod config;
|
||||
mod sites;
|
||||
|
||||
#[tokio::main(flavor = "current_thread")]
|
||||
async fn main() -> Result<()> {
|
||||
color_eyre::install()?;
|
||||
env_logger::Builder::new()
|
||||
.filter_module("sscdc", LevelFilter::Info)
|
||||
.parse_default_env()
|
||||
.init();
|
||||
|
||||
log::info!("Loading configuration…");
|
||||
let config = load_config()?;
|
||||
|
||||
let sockets_directory_path = Utf8Path::new(&config.sockets_directory);
|
||||
fs::create_dir_all(&sockets_directory_path)
|
||||
.wrap_err("Failed to access or create the sockets directory.")?;
|
||||
let sockets_directory_path = sockets_directory_path.canonicalize_utf8().unwrap();
|
||||
|
||||
log::info!("Starting internal API server…");
|
||||
let api_socket_path = sockets_directory_path.join("api.sock");
|
||||
start_api_server(&api_socket_path).await?;
|
||||
log::debug!("The internal API server is listening at {api_socket_path}");
|
||||
|
||||
log::info!("Starting the reverse proxy…");
|
||||
let caddy_controller = start_caddy(&api_socket_path, &sockets_directory_path).await?;
|
||||
|
||||
let sites_worker = start_sites_worker(config.sites_directory.to_string().into(), caddy_controller).await?;
|
||||
|
||||
log::info!("Startup complete.");
|
||||
|
||||
sites_worker.join_handle.await??;
|
||||
}
|
233
src/sites.rs
Normal file
233
src/sites.rs
Normal file
|
@ -0,0 +1,233 @@
|
|||
use crate::caddy::CaddyController;
|
||||
use camino::{Utf8Path, Utf8PathBuf};
|
||||
use color_eyre::Result;
|
||||
use color_eyre::eyre::WrapErr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, RwLockReadGuard, RwLockWriteGuard};
|
||||
use tokio::fs;
|
||||
use tokio::io::{AsyncWriteExt, BufWriter};
|
||||
use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender};
|
||||
use tokio::sync::{Mutex, RwLock};
|
||||
use tokio::task::JoinHandle;
|
||||
use futures::StreamExt;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SiteState {
|
||||
active_version: Option<SiteStateVersion>,
|
||||
current_version: Option<SiteStateVersion>,
|
||||
}
|
||||
|
||||
impl SiteState {
|
||||
async fn read_from_site_directory(site_directory_path: &Utf8Path) -> Result<Self> {
|
||||
let string = fs::read_to_string(site_directory_path.join("site.toml")).await?;
|
||||
Ok(toml::from_str::<SiteState>(&string)?)
|
||||
}
|
||||
|
||||
async fn write_to_site_directory(&self, site_directory_path: &Utf8Path) -> Result<()> {
|
||||
fs::write(
|
||||
site_directory_path.join("site.toml"),
|
||||
toml::to_string(&self).unwrap(),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_outdated(&self) -> bool {
|
||||
self.get_current_version_if_outdated().is_some()
|
||||
}
|
||||
|
||||
fn get_current_version_if_outdated(&self) -> Option<&SiteStateVersion> {
|
||||
if let Some(current_version) = &self.current_version {
|
||||
if let Some(active_version) = &self.active_version {
|
||||
if current_version.id != active_version.id {
|
||||
return Some(&active_version)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct SiteStateVersion {
|
||||
id: String,
|
||||
download_url: String,
|
||||
}
|
||||
|
||||
pub struct Site {
|
||||
pub domain: String,
|
||||
pub path: Utf8PathBuf,
|
||||
pub state: RwLock<SiteState>,
|
||||
}
|
||||
|
||||
type Sites = RwLock<HashMap<String, Arc<Site>>>;
|
||||
|
||||
pub struct SitesWorker {
|
||||
sites: Arc<Sites>,
|
||||
sites_directory_path: Utf8PathBuf,
|
||||
download_tasks_sender: UnboundedSender<String>,
|
||||
pub join_handle: JoinHandle<Result<!>>,
|
||||
}
|
||||
|
||||
impl SitesWorker {
|
||||
pub async fn set_current_version(
|
||||
&self,
|
||||
domain: String,
|
||||
id: String,
|
||||
download_url: String,
|
||||
) -> Result<()> {
|
||||
let mut sites = self.sites.write().await;
|
||||
let site = sites.entry(domain.clone()).or_insert_with(|| Arc::new(Site {
|
||||
domain: domain.clone(),
|
||||
path: self.sites_directory_path.join(&domain),
|
||||
state: RwLock::new(SiteState {
|
||||
current_version: None,
|
||||
active_version: None,
|
||||
}),
|
||||
}));
|
||||
|
||||
let mut state = site.state.write().await;
|
||||
state.current_version = Some(SiteStateVersion { id, download_url });
|
||||
state.write_to_site_directory(&site.path).await?;
|
||||
self.download_tasks_sender.send(domain).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_sites(sites_directory_path: &Utf8Path) -> Result<Sites> {
|
||||
let mut sites: HashMap<String, Arc<Site>> = HashMap::new();
|
||||
|
||||
if fs::try_exists(&sites_directory_path).await.unwrap_or(false) {
|
||||
let mut dir = fs::read_dir(&sites_directory_path).await?;
|
||||
loop {
|
||||
if let Some(entry) = dir.next_entry().await? {
|
||||
if entry.file_type().await?.is_dir() {
|
||||
let path = Utf8PathBuf::from_path_buf(entry.path()).unwrap();
|
||||
let domain = path.file_name().unwrap().to_string();
|
||||
let state = SiteState::read_from_site_directory(&path).await?;
|
||||
|
||||
sites.insert(
|
||||
domain.clone(),
|
||||
Arc::new(Site {
|
||||
path,
|
||||
domain,
|
||||
state: RwLock::new(state),
|
||||
}),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fs::create_dir_all(&sites_directory_path)
|
||||
.await
|
||||
.wrap_err("Failed to create the sites directory (and its parents)")?;
|
||||
}
|
||||
|
||||
Ok(RwLock::new(sites))
|
||||
}
|
||||
|
||||
async fn handle_download(sites: &Sites, caddy_controller: &CaddyController, domain: String) -> Result<()> {
|
||||
let site = {
|
||||
let sites = sites.read().await;
|
||||
match sites.get(&domain) {
|
||||
None => return Ok(()),
|
||||
Some(a) => Arc::clone(a)
|
||||
}
|
||||
};
|
||||
|
||||
let mut state = site.state.write().await;
|
||||
let current_version = match state.get_current_version_if_outdated() {
|
||||
None => return Ok(()),
|
||||
Some(v) => v
|
||||
};
|
||||
|
||||
// Download
|
||||
let mut archive_file = fs::File::create(site.path.join(format!("{}.zip", current_version.id))).await?;
|
||||
let mut file_writer = BufWriter::new(&mut archive_file);
|
||||
|
||||
let mut response = reqwest::get(¤t_version.download_url).await?;
|
||||
|
||||
while let Some(chunk) = response.chunk().await? {
|
||||
file_writer.write_all(&chunk).await?;
|
||||
}
|
||||
|
||||
let extraction_directory_path = site.path.join(¤t_version.id);
|
||||
let _ = fs::remove_dir_all(&extraction_directory_path).await;
|
||||
fs::create_dir_all(&extraction_directory_path).await?;
|
||||
|
||||
// Unpack to temp dir
|
||||
tokio::task::spawn_blocking({
|
||||
let archive_file = archive_file.into_std().await;
|
||||
let extraction_directory_path = extraction_directory_path.clone();
|
||||
|
||||
move || -> Result<()> {
|
||||
let mut archive = zip::ZipArchive::new(archive_file)?;
|
||||
archive.extract(&extraction_directory_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}).await??;
|
||||
|
||||
// Update and write state
|
||||
state.active_version = Some(current_version.clone());
|
||||
state.write_to_site_directory(&site.path).await?;
|
||||
|
||||
// Update Caddy configuration
|
||||
caddy_controller.upsert_site_configuration(&domain, &extraction_directory_path).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn sites_worker(
|
||||
mut download_tasks_receiver: UnboundedReceiver<String>,
|
||||
sites: Arc<Sites>,
|
||||
caddy_controller: CaddyController,
|
||||
) -> Result<!> {
|
||||
loop {
|
||||
let domain = download_tasks_receiver.recv().await.unwrap();
|
||||
handle_download(&sites, &caddy_controller, domain).await?;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn start_sites_worker(
|
||||
sites_directory_path: Utf8PathBuf,
|
||||
caddy_controller: CaddyController,
|
||||
) -> Result<SitesWorker> {
|
||||
let (download_tasks_sender, download_tasks_receiver) = tokio::sync::mpsc::unbounded_channel();
|
||||
let sites = Arc::new(load_sites(&sites_directory_path).await?);
|
||||
|
||||
for (_, site) in sites.read().await.iter() {
|
||||
let state = site.state.read().await;
|
||||
if let Some(current_version) = &state.current_version {
|
||||
if let Some(active_version) = &state.active_version {
|
||||
if current_version.id != active_version.id {
|
||||
download_tasks_sender.send(site.domain.clone()).unwrap()
|
||||
}
|
||||
|
||||
caddy_controller.upsert_site_configuration(&site.domain, &site.path.join(&active_version.id)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if state.is_outdated() {
|
||||
download_tasks_sender.send(site.domain.clone()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
let join_handle = tokio::spawn(sites_worker(
|
||||
download_tasks_receiver,
|
||||
Arc::clone(&sites),
|
||||
caddy_controller,
|
||||
));
|
||||
|
||||
Ok(SitesWorker {
|
||||
sites,
|
||||
sites_directory_path,
|
||||
download_tasks_sender,
|
||||
join_handle,
|
||||
})
|
||||
}
|
Loading…
Add table
Reference in a new issue