init
This commit is contained in:
commit
117cb6bc5e
2076
Cargo.lock
generated
Normal file
2076
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
Cargo.toml
Normal file
27
Cargo.toml
Normal file
@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "ssh-cert-dist"
|
||||
version = "0.1.0"
|
||||
authors = ["shimun <shimun@shimun.net>"]
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.66"
|
||||
async-trait = "0.1.59"
|
||||
axum = { version = "0.6.1", features = ["http2"] }
|
||||
axum-extra = { version = "0.4.1", features = ["typed-routing"] }
|
||||
clap = { version = "4.0.29", features = ["env", "derive"] }
|
||||
reqwest = "0.11.13"
|
||||
serde = { version = "1.0.148", features = ["derive"] }
|
||||
ssh-key = { version = "0.5.1", features = ["ed25519", "p256", "p384", "rsa", "signature"] }
|
||||
tokio = { version = "1.22.0", features = ["io-std", "test-util", "tracing", "macros", "fs"] }
|
||||
tower = { version = "0.4.13", features = ["util"] }
|
||||
tower-http = { version = "0.3.4", features = ["map-request-body"] }
|
||||
tracing = "0.1.37"
|
||||
tracing-subscriber = "0.3.16"
|
||||
url = "2.3.1"
|
||||
|
||||
[patch.crates-io]
|
||||
ssh-key = { git = "https://github.com/a-dma/SSH.git", branch = "u2f_signatures" }
|
||||
|
62
flake.lock
generated
Normal file
62
flake.lock
generated
Normal file
@ -0,0 +1,62 @@
|
||||
{
|
||||
"nodes": {
|
||||
"naersk": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1662220400,
|
||||
"narHash": "sha256-9o2OGQqu4xyLZP9K6kNe1pTHnyPz0Wr3raGYnr9AIgY=",
|
||||
"owner": "nmattia",
|
||||
"repo": "naersk",
|
||||
"rev": "6944160c19cb591eb85bbf9b2f2768a935623ed3",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nmattia",
|
||||
"repo": "naersk",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1669411043,
|
||||
"narHash": "sha256-LfPd3+EY+jaIHTRIEOUtHXuanxm59YKgUacmSzaqMLc=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "5dc7114b7b256d217fe7752f1614be2514e61bb8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"naersk": "naersk",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"utils": "utils"
|
||||
}
|
||||
},
|
||||
"utils": {
|
||||
"locked": {
|
||||
"lastModified": 1667395993,
|
||||
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
103
flake.nix
Normal file
103
flake.nix
Normal file
@ -0,0 +1,103 @@
|
||||
{
|
||||
description = "A naersk based rust flake";
|
||||
|
||||
inputs = {
|
||||
utils.url = "github:numtide/flake-utils";
|
||||
naersk = {
|
||||
url = "github:nmattia/naersk";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
# fenix.url = "github:nix-community/fenix";
|
||||
};
|
||||
|
||||
outputs = inputs @ { self, nixpkgs, utils, naersk, ... }:
|
||||
let
|
||||
root = inputs.source or self;
|
||||
pname = (builtins.fromTOML (builtins.readFile (root + "/Cargo.toml"))).package.name;
|
||||
# toolchains: stable, beta, default(nightly)
|
||||
toolchain = pkgs: if inputs ? fenix then inputs.fenix.packages."${pkgs.system}".complete.toolchain
|
||||
else with pkgs; symlinkJoin { name = "rust-toolchain"; paths = [ rustc cargo ]; };
|
||||
forSystem = system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages."${system}";
|
||||
in
|
||||
rec {
|
||||
# `nix build`
|
||||
packages.${pname} = (self.overlay pkgs pkgs).${pname};
|
||||
|
||||
packages.dockerImage = pkgs.runCommandLocal "docker-${pname}.tar.gz" {} "${apps.streamDockerImage.program} | gzip --fast > $out";
|
||||
|
||||
packages.default = packages.${pname};
|
||||
|
||||
# `nix run`
|
||||
apps.${pname} = utils.lib.mkApp {
|
||||
drv = packages.${pname};
|
||||
};
|
||||
|
||||
# `nix run .#streamDockerImage | docker load`
|
||||
apps.streamDockerImage = utils.lib.mkApp {
|
||||
drv = with pkgs; dockerTools.streamLayeredImage {
|
||||
name = pname;
|
||||
tag = self.shortRev or "latest";
|
||||
config = {
|
||||
Entrypoint = apps.default.program;
|
||||
};
|
||||
};
|
||||
exePath = "";
|
||||
};
|
||||
apps.default = apps.${pname};
|
||||
|
||||
# `nix flake check`
|
||||
checks = {
|
||||
fmt = with pkgs; runCommandLocal "${pname}-fmt" { buildInputs = [ cargo rustfmt nixpkgs-fmt ]; } ''
|
||||
cd ${root}
|
||||
cargo fmt -- --check
|
||||
nixpkgs-fmt --check *.nix
|
||||
touch $out
|
||||
'';
|
||||
};
|
||||
|
||||
hydraJobs = checks // packages;
|
||||
|
||||
# `nix develop`
|
||||
devShell = pkgs.mkShell rec {
|
||||
RUST_SRC_PATH = "${if inputs ? fenix then "${toolchain pkgs}/lib/rustlib" else pkgs.rustPlatform.rustLibSrc}";
|
||||
RUST_LOG = "debug";
|
||||
nativeBuildInputs = with pkgs; [ (toolchain pkgs) cargo-watch rustfmt nixpkgs-fmt ] ++ packages.default.nativeBuildInputs;
|
||||
inherit (packages.default) buildInputs;
|
||||
shellHook = ''
|
||||
printf "Rust version:"
|
||||
rustc --version
|
||||
printf "\nbuild inputs: ${pkgs.lib.concatStringsSep ", " (map (bi: bi.name) (buildInputs ++ nativeBuildInputs))}"
|
||||
'';
|
||||
};
|
||||
|
||||
};
|
||||
in
|
||||
(utils.lib.eachDefaultSystem forSystem) // {
|
||||
overlays.pinned = final: prev: (self.overlay final (import nixpkgs {
|
||||
inherit (final) localSystem;
|
||||
})).packages;
|
||||
overlay = final: prev:
|
||||
let
|
||||
naersk-lib = naersk.lib."${final.system}".override {
|
||||
rustc = toolchain prev;
|
||||
cargo = toolchain prev;
|
||||
};
|
||||
buildInputs = with prev; [
|
||||
openssl
|
||||
];
|
||||
nativeBuildInputs = with prev; [
|
||||
pkg-config
|
||||
];
|
||||
in
|
||||
{
|
||||
"${pname}" =
|
||||
naersk-lib.buildPackage {
|
||||
inherit pname root buildInputs nativeBuildInputs;
|
||||
};
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
}
|
205
src/api.rs
Normal file
205
src/api.rs
Normal file
@ -0,0 +1,205 @@
|
||||
mod extract;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::net::SocketAddr;
|
||||
use std::path::{self, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::certs::{load_cert, read_certs, read_pubkey, store_cert};
|
||||
use anyhow::Context;
|
||||
use axum::body::{self};
|
||||
use axum::extract::{Path, State};
|
||||
use axum::routing::{post, put};
|
||||
use axum::{http::StatusCode, response::IntoResponse, Router};
|
||||
use axum_extra::routing::{
|
||||
RouterExt, // for `Router::typed_*`
|
||||
TypedPath,
|
||||
};
|
||||
use clap::Parser;
|
||||
use serde::Deserialize;
|
||||
use ssh_key::{Certificate, PublicKey};
|
||||
use tokio::sync::Mutex;
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::ServiceBuilderExt;
|
||||
|
||||
use self::extract::CertificateBody;
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct ApiArgs {
|
||||
#[clap(short = 'a', long = "address")]
|
||||
address: SocketAddr,
|
||||
#[clap(short = 'c', long = "cert-store")]
|
||||
cert_dir: PathBuf,
|
||||
/// CA public key
|
||||
#[clap(long = "ca")]
|
||||
ca: PathBuf,
|
||||
}
|
||||
|
||||
impl Default for ApiArgs {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
address: SocketAddr::from(([127, 0, 0, 1], 3000)),
|
||||
cert_dir: "certs".into(),
|
||||
ca: "certs/ca.pub".into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ApiState {
|
||||
certs: Arc<Mutex<HashMap<String, Certificate>>>,
|
||||
cert_dir: PathBuf,
|
||||
ca: PublicKey,
|
||||
}
|
||||
|
||||
impl ApiState {
|
||||
async fn new(
|
||||
cert_dir: impl AsRef<path::Path>,
|
||||
ca_file: impl AsRef<path::Path>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let ca = read_pubkey(ca_file.as_ref()).await?;
|
||||
let certs = read_certs(&ca, cert_dir.as_ref()).await?;
|
||||
Ok(Self {
|
||||
certs: Arc::new(Mutex::new(
|
||||
certs
|
||||
.into_iter()
|
||||
.map(|cert| (cert.key_id().to_string(), cert))
|
||||
.collect(),
|
||||
)),
|
||||
cert_dir: cert_dir.as_ref().into(),
|
||||
ca,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
ApiArgs {
|
||||
address,
|
||||
cert_dir,
|
||||
ca,
|
||||
}: ApiArgs,
|
||||
) -> anyhow::Result<()> {
|
||||
let state = ApiState::new(&cert_dir, &ca).await?;
|
||||
|
||||
#[cfg(feature = "reload")]
|
||||
{
|
||||
let state = state.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
tokio::time::sleep(Duration::from_secs(30)).await;
|
||||
if let Ok(certs) = read_certs(&state.ca, &state.cert_dir).await {
|
||||
*state.certs.lock().await = certs
|
||||
.into_iter()
|
||||
.map(|cert| (cert.key_id().to_string(), cert))
|
||||
.collect();
|
||||
debug!("reloaded certs");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let app = Router::new()
|
||||
.typed_get(get_certs_identifier)
|
||||
.route("/certs/:identifier", post(post_certs_identifier))
|
||||
.route("/certs/:identifier", put(put_certs_identifier_update))
|
||||
.layer(ServiceBuilder::new().map_request_body(body::boxed))
|
||||
.with_state(state);
|
||||
|
||||
// run our app with hyper
|
||||
// `axum::Server` is a re-export of `hyper::Server`
|
||||
tracing::debug!("listening on {}", address);
|
||||
axum::Server::bind(&address)
|
||||
.serve(app.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub enum ApiError {
|
||||
Internal,
|
||||
NotFound,
|
||||
Invalid,
|
||||
LowSerial(u64, u64),
|
||||
}
|
||||
|
||||
type ApiResult<T> = Result<T, ApiError>;
|
||||
|
||||
impl IntoResponse for ApiError {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
match self {
|
||||
Self::NotFound => (StatusCode::NOT_FOUND, "not here").into_response(),
|
||||
Self::LowSerial(prev, next) => (
|
||||
StatusCode::BAD_REQUEST,
|
||||
format!(
|
||||
"new certificate serial must be greater than {}, got {}",
|
||||
prev, next
|
||||
),
|
||||
)
|
||||
.into_response(),
|
||||
_ => (StatusCode::INTERNAL_SERVER_ERROR, "Oops").into_response(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<anyhow::Error> for ApiError {
|
||||
fn from(_: anyhow::Error) -> Self {
|
||||
ApiError::Internal
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(TypedPath, Deserialize)]
|
||||
#[typed_path("/certs/:identifier")]
|
||||
pub struct GetCert {
|
||||
pub identifier: String,
|
||||
}
|
||||
|
||||
/// Retrieve an certificate for identifier
|
||||
/// TODO: add option to require auth
|
||||
/// return Unauthorized with an challenge
|
||||
/// upon which the client will ssh-keysign
|
||||
/// the challene an issue an post request
|
||||
async fn get_certs_identifier(
|
||||
GetCert { identifier }: GetCert,
|
||||
State(ApiState { certs, .. }): State<ApiState>,
|
||||
) -> ApiResult<String> {
|
||||
let certs = certs.lock().await;
|
||||
let cert = certs.get(&identifier).ok_or(ApiError::NotFound)?;
|
||||
Ok(cert.to_openssh().context("to openssh")?)
|
||||
}
|
||||
|
||||
/// POST with signed challenge
|
||||
async fn post_certs_identifier(
|
||||
State(ApiState { .. }): State<ApiState>,
|
||||
Path(_identifier): Path<String>,
|
||||
) -> ApiResult<String> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
/// Upload an cert with an higher serial than the previous
|
||||
async fn put_certs_identifier_update(
|
||||
State(ApiState {
|
||||
ca,
|
||||
cert_dir,
|
||||
certs,
|
||||
..
|
||||
}): State<ApiState>,
|
||||
Path(identifier): Path<String>,
|
||||
CertificateBody(cert): CertificateBody,
|
||||
) -> ApiResult<String> {
|
||||
cert.validate(&[ca.fingerprint(Default::default())])
|
||||
.map_err(|_| ApiError::Invalid)?;
|
||||
let _string_repr = cert.to_openssh();
|
||||
let prev = load_cert(&cert_dir, &ca, &identifier).await?;
|
||||
let mut prev_serial = 0;
|
||||
let serial = cert.serial();
|
||||
if let Some(prev) = prev {
|
||||
prev_serial = prev.serial();
|
||||
if prev.serial() >= cert.serial() {
|
||||
return Err(ApiError::LowSerial(prev_serial, serial));
|
||||
}
|
||||
}
|
||||
store_cert(&cert_dir, &ca, &cert).await?;
|
||||
certs.lock().await.insert(cert.key_id().to_string(), cert);
|
||||
Ok(format!("{} -> {}", prev_serial, serial))
|
||||
}
|
31
src/api/extract.rs
Normal file
31
src/api/extract.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use anyhow::Context;
|
||||
use axum::{
|
||||
async_trait, body::BoxBody, extract::FromRequest, http::Request, response::IntoResponse,
|
||||
};
|
||||
use ssh_key::Certificate;
|
||||
|
||||
use super::ApiError;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CertificateBody(pub Certificate);
|
||||
|
||||
// we must implement `FromRequest` (and not `FromRequestParts`) to consume the body
|
||||
#[async_trait]
|
||||
impl<S> FromRequest<S, BoxBody> for CertificateBody
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = ApiError;
|
||||
|
||||
async fn from_request(req: Request<BoxBody>, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let body = String::from_request(req, state)
|
||||
.await
|
||||
.map_err(|err| err.into_response())
|
||||
.unwrap(); //.context("failed to extract body")?;
|
||||
|
||||
let cert = Certificate::from_openssh(&body)
|
||||
.with_context(|| format!("failed to parse '{}'", body))?;
|
||||
|
||||
Ok(Self(cert))
|
||||
}
|
||||
}
|
96
src/certs.rs
Normal file
96
src/certs.rs
Normal file
@ -0,0 +1,96 @@
|
||||
use anyhow::Context;
|
||||
use ssh_key::{Certificate, PublicKey};
|
||||
use std::path::{Path, PathBuf};
|
||||
use tokio::fs;
|
||||
use tracing::trace;
|
||||
|
||||
pub async fn read_certs(
|
||||
ca: &PublicKey,
|
||||
path: impl AsRef<Path>,
|
||||
) -> anyhow::Result<Vec<Certificate>> {
|
||||
read_dir(path.as_ref().join(ca_dir(ca))).await
|
||||
}
|
||||
|
||||
pub async fn read_dir(path: impl AsRef<Path>) -> anyhow::Result<Vec<Certificate>> {
|
||||
let mut dir = fs::read_dir(path.as_ref())
|
||||
.await
|
||||
.context("read certs dir")?;
|
||||
let mut certs = Vec::new();
|
||||
while let Some(entry) = dir.next_entry().await? {
|
||||
//TODO: investigate why path().ends_with doesn't work
|
||||
if !entry
|
||||
.file_name()
|
||||
.into_string()
|
||||
.unwrap()
|
||||
.ends_with("-cert.pub")
|
||||
{
|
||||
trace!(
|
||||
"skipped {:?} due to missing '-cert.pub' extension",
|
||||
entry.path()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
let contents = fs::read(&entry.path())
|
||||
.await
|
||||
.with_context(|| format!("read {:?}", entry.path()))?;
|
||||
let string_repr = parse_utf8(contents)?;
|
||||
let cert = Certificate::from_openssh(&string_repr)
|
||||
.with_context(|| format!("parse {:?} as openssh certificate", entry.path()))?;
|
||||
certs.push(cert);
|
||||
}
|
||||
Ok(certs)
|
||||
}
|
||||
|
||||
fn parse_utf8(bytes: Vec<u8>) -> anyhow::Result<String> {
|
||||
String::from_utf8(bytes).context("invalid utf-8")
|
||||
}
|
||||
|
||||
pub async fn read_pubkey(path: impl AsRef<Path>) -> anyhow::Result<PublicKey> {
|
||||
let contents = fs::read(&path)
|
||||
.await
|
||||
.with_context(|| format!("read {:?}", path.as_ref()))?;
|
||||
let string_repr = parse_utf8(contents)?;
|
||||
PublicKey::from_openssh(&string_repr)
|
||||
.with_context(|| format!("parse '{}' as public key", string_repr))
|
||||
}
|
||||
|
||||
fn ca_dir(ca: &PublicKey) -> String {
|
||||
ca.comment().to_string()
|
||||
}
|
||||
|
||||
fn cert_path(ca: &PublicKey, identifier: &str) -> String {
|
||||
let _ca_fingerprint = ca.fingerprint(Default::default());
|
||||
format!("{}/{}-cert.pub", ca_dir(ca), identifier)
|
||||
}
|
||||
|
||||
pub async fn store_cert(
|
||||
cert_dir: impl AsRef<Path>,
|
||||
ca: &PublicKey,
|
||||
cert: &Certificate,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
// TODO: proper store
|
||||
let path = cert_dir.as_ref().join(cert_path(&ca, cert.key_id()));
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
fs::write(&path, cert.to_openssh().context("encode cert")?).await?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub async fn load_cert(
|
||||
cert_dir: impl AsRef<Path>,
|
||||
ca: &PublicKey,
|
||||
identifier: &str,
|
||||
) -> anyhow::Result<Option<Certificate>> {
|
||||
let path = cert_dir.as_ref().join(cert_path(ca, identifier));
|
||||
if !path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
let contents = fs::read(&path)
|
||||
.await
|
||||
.with_context(|| format!("read {:?}", &path))?;
|
||||
let string_repr = parse_utf8(contents)?;
|
||||
Ok(Some(Certificate::from_openssh(&string_repr).with_context(
|
||||
|| format!("parse {:?} as openssh certificate", &path),
|
||||
)?))
|
||||
}
|
90
src/client.rs
Normal file
90
src/client.rs
Normal file
@ -0,0 +1,90 @@
|
||||
use axum_extra::routing::TypedPath;
|
||||
use clap::{Args, Parser, Subcommand};
|
||||
use reqwest::StatusCode;
|
||||
use ssh_key::Certificate;
|
||||
use std::path::PathBuf;
|
||||
use tokio::fs;
|
||||
use tracing::{debug, info};
|
||||
use url::Url;
|
||||
|
||||
use crate::{api::GetCert, certs::read_dir};
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct ClientArgs {
|
||||
/// Url for the API endpoint
|
||||
#[clap(short = 'a', long = "api-endpoint")]
|
||||
api: Url,
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct FetchArgs {
|
||||
#[clap(flatten)]
|
||||
args: ClientArgs,
|
||||
#[clap(short = 'c', long = "cert-dir", default_value = "~/.ssh")]
|
||||
cert_dir: PathBuf,
|
||||
/// CA public key
|
||||
#[clap(long = "ca")]
|
||||
ca: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct ClientCommand {
|
||||
#[clap(subcommand)]
|
||||
cmd: ClientCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum ClientCommands {
|
||||
Fetch(FetchArgs),
|
||||
Upload,
|
||||
}
|
||||
|
||||
pub async fn run(ClientCommand { cmd }: ClientCommand) -> anyhow::Result<()> {
|
||||
match cmd {
|
||||
ClientCommands::Fetch(args) => fetch(args).await,
|
||||
ClientCommands::Upload => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch(
|
||||
FetchArgs {
|
||||
cert_dir,
|
||||
ca: _,
|
||||
args: ClientArgs { api },
|
||||
}: FetchArgs,
|
||||
) -> anyhow::Result<()> {
|
||||
let certs = read_dir(&cert_dir).await?;
|
||||
let client = reqwest::Client::new();
|
||||
for cert in certs {
|
||||
let path = GetCert {
|
||||
identifier: cert.key_id().to_string(),
|
||||
};
|
||||
debug!("checking {}", cert.key_id());
|
||||
let url = api.join(path.to_uri().path())?;
|
||||
let resp = client.get(url.clone()).send().await?;
|
||||
if resp.status() != StatusCode::OK {
|
||||
continue;
|
||||
}
|
||||
let string_repr = resp.text().await?;
|
||||
let remote_cert = Certificate::from_openssh(&string_repr)?;
|
||||
if remote_cert
|
||||
.validate(&[cert.signature_key().fingerprint(Default::default())])
|
||||
.is_err()
|
||||
{
|
||||
info!("invalid signature {}, skipping", &url);
|
||||
continue;
|
||||
}
|
||||
if cert.serial() >= remote_cert.serial() {
|
||||
debug!("{} is not newer than local version", &url);
|
||||
continue;
|
||||
}
|
||||
fs::write(cert_dir.join(cert.key_id()), remote_cert.to_openssh()?).await?;
|
||||
info!(
|
||||
"updated {}: {} -> {}",
|
||||
cert.key_id(),
|
||||
cert.serial(),
|
||||
remote_cert.serial()
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
29
src/main.rs
Normal file
29
src/main.rs
Normal file
@ -0,0 +1,29 @@
|
||||
use api::ApiArgs;
|
||||
use clap::Parser;
|
||||
#[cfg(feature = "client")]
|
||||
use client::ClientCommand;
|
||||
use tracing_subscriber;
|
||||
|
||||
mod api;
|
||||
mod certs;
|
||||
#[cfg(feature = "client")]
|
||||
mod client;
|
||||
|
||||
#[derive(Parser)]
|
||||
enum Command {
|
||||
Server(ApiArgs),
|
||||
#[cfg(feature = "client")]
|
||||
Client(ClientCommand),
|
||||
}
|
||||
|
||||
#[tokio::main(flavor = "current_thread")]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
match Command::parse() {
|
||||
Command::Server(args) => api::run(args).await?,
|
||||
#[cfg(feature = "client")]
|
||||
Command::Client(args) => client::run(args).await?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user