Compare commits

..

1 Commits

Author SHA1 Message Date
419049068a
skeleton [CI SKIP] 2019-09-10 16:00:10 +02:00
14 changed files with 1551 additions and 1124 deletions

View File

@ -3,11 +3,12 @@ name: default
steps:
- name: build
image: rust:1.41.0-buster
image: rust:1.37.0-buster
commands:
- apt update && apt install git clang make pkg-config nettle-dev libssl-dev capnproto libsqlite3-dev -y
- cargo test
- cargo install --path . --root .
- rustup target add x86_64-unknown-linux-musl
- cargo build --release --target x86_64-unknown-linux-musl
- name: dockerize
image: plugins/docker
settings:

2140
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,23 +1,22 @@
[package]
name = "brownpaper"
version = "0.4.0"
authors = ["shimun <shimun@shimun.net>"]
version = "0.1.0"
authors = ["shim_ <shimun@shimun.net>"]
edition = "2018"
[profile.release]
opt-level = 'z'
panic = 'abort'
[dependencies]
snap = "0.1"
rustc-serialize = "0.3.19"
iron = "0.6.0"
rand = "0.4.2"
byteorder = "1.3.2"
chrono = "0.4.9"
sequoia-openpgp = "0.12.0"
lazy_static = "1.4.0"
c2-chacha = "0.3.3"
sequoia-openpgp = "0.9.0"
c2-chacha = "0.2.2"
sha2 = "0.8.0"
hex = "0.3.2"
tower-web = "0.3.7"
tokio = "0.1.22"

View File

@ -1,21 +1,9 @@
FROM rust:1.41.0 as builder
COPY . /bp
RUN apt update
RUN apt install clang nettle-dev -y
RUN cargo install --path /bp --root /usr
FROM debian:buster-slim
VOLUME /snips
EXPOSE 3000
COPY --from=builder /usr/bin/brownpaper /bin/
WORKDIR /
COPY target/release/brownpaper /bin/
ENTRYPOINT [ "/bin/brownpaper" ]

9
bp.sh
View File

@ -1,9 +0,0 @@
#!/usr/bin/env bash
BP_ENDPOINT=${BROWNPAPER_ENDPOINT:-https://shimun.net/bp}
CURL_ARGS="--write-out %{url_effective}\\n --silent -o /dev/null"
GPG_ARGS="$([ ! -z "$BROWNPAPER_KEY" ] && echo "--local-user $BROWNPAPER_KEY")"
if [ ! -z "$1" ]; then
printf "brownpaper$1" | gpg --sign -a $GPG_ARGS | curl -s --data @- -X POST $BP_ENDPOINT/new -Ls $CURL_ARGS
else
(printf "brownpaper"; cat) | gpg --sign -a $GPG_ARGS | curl -s --data @- -X POST $BP_ENDPOINT/new -Ls $CURL_ARGS
fi

View File

@ -1,31 +0,0 @@
{ pkgs ? (import <nixpkgs> { })
, callPackage ? pkgs.callPackage
, stdenv ? pkgs.stdenv
, fetchgit ? pkgs.fetchgit
, name ? "brownpaper"
, src ? ./.
}:
let
filteredSrc = src; # flake only includeds files tracked by git builtins.filterSource pkgs.lib.cleanSourceFilter src;
crate2nix_tools = callPackage (pkgs.crate2nix.src + "/tools.nix") { };
overrides = with pkgs.llvmPackages_10;
pkgs.defaultCrateOverrides // rec {
nettle-sys = attrs: rec {
nativeBuildInputs = [ clang pkgs.pkg-config ];
buildInputs = [ pkgs.nettle ];
LIBCLANG_PATH = "${libclang.lib}/lib";
};
brownpaper = attrs: {
buildInputs = [ pkgs.gmp ];
};
};
client = with pkgs; runCommandLocal "brownpaper"
{
script = "${filteredSrc}/bp.sh";
nativeBuildInputs = [ makeWrapper ];
} ''
makeWrapper $script $out/bin/brownpaper \
--prefix PATH : ${with pkgs; lib.makeBinPath [ bash curl gnupg ]}
'';
in
{ server = (callPackage (crate2nix_tools.generatedCargoNix { inherit name; src = filteredSrc; }) { inherit pkgs; defaultCrateOverrides = overrides; }).rootCrate.build; inherit client; }

42
flake.lock generated
View File

@ -1,42 +0,0 @@
{
"nodes": {
"flake-utils": {
"locked": {
"lastModified": 1642700792,
"narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "846b2ae0fc4cc943637d3d1def4454213e203cba",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1642847654,
"narHash": "sha256-0rwMCG/xGoVlV01qMZSQu/1GdhKNpWPWyu2Xr5CNhg4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "7534942c6a009e8035d24c273b77a0b275db9bf1",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View File

@ -1,64 +0,0 @@
{
description = "Brownpaper pastebin";
inputs.flake-utils.url = "github:numtide/flake-utils";
inputs.nixpkgs.url = "github:NixOS/nixpkgs";
inputs.naersk.url = "github:nix-community/naersk";
outputs = { self, nixpkgs, flake-utils, naersk }:
(flake-utils.lib.eachSystem [ "x86_64-linux" ]
(system:
let
pkgs = import nixpkgs { inherit system; };
naersk-lib = naersk.lib."${system}";
bp = pkgs.callPackage ./default.nix { inherit pkgs; src = ./.; };
in
rec {
apps = builtins.mapAttrs (_n: pkg: flake-utils.lib.mkApp { drv = pkg; }) packages;
defaultApp = apps.brownpaper;
packages = rec {
brownpaper = bp.client;
brownpaper-server = bp.server;
brownpaper-server-nk = naersk-lib.buildPackage (with pkgs; with llvmPackages_10; {
pname = "brownpaper";
root = self;
nativeBuildInputs = [ clang pkg-config ];
buildInputs = [ gmp nettle ];
LIBCLANG_PATH = "${libclang.lib}/lib";
});
brownpaper-server-docker = pkgs.dockerTools.buildLayeredImage {
name = "brownpaper";
config = {
Volume = "/snips";
Port = "3000";
Cmd = [ "${brownpaper-server}/bin/brownpaper" ];
};
};
};
defaultPackage = packages.brownpaper;
devShell = with pkgs; mkShell {
buildInputs = [ cargo rustfmt nettle llvmPackages_10.clang llvmPackages_10.libclang pkgconfig ];
LIBCLANG_PATH = "${llvmPackages_10.libclang}/lib";
};
hydraJobs.build = defaultPackage;
}
) // rec {
nixosModules.brownpaper = import ./mod.nix;
nixosModule = nixosModules.brownpaper;
}) // rec {
overlay = final: prev:
let
bp = final.callPackage ./default.nix { pkgs = final; src = self; };
in
{
brownpaper = bp.client;
brownpaper-server = bp.server;
};
overlays = {
pinned = final: prev: (overlay (import nixpkgs { inherit (final) system; }) { });
};
};
}

102
mod.nix
View File

@ -1,102 +0,0 @@
{ lib, pkgs, config, ... }:
with lib;
let
cfg = config.services.brownpaper;
cfgc = config.programs.brownpaper;
package = if pkgs ? brownpaper-server && pkgs ? brownpaper then { server = pkgs.brownpaper-server; client = pkgs.brownpaper; } else (pkgs.callPackage ./. { inherit pkgs; src = ./.; });
keyDir = pkgs.runCommand "brownpaper-keys" { } ''
mkdir -p $out
${concatStringsSep " && " (builtins.map (key: "cp ${key} $out/") cfg.pgpKeys)}
'';
in
{
options.services.brownpaper = {
enable = mkEnableOption "brownpaper service";
listen = mkOption {
type = types.str;
default = "127.0.0.1";
};
port = mkOption {
type = types.int;
default = 3000;
};
dataDir = mkOption {
type = types.path;
default = "/var/lib/brownpaper";
};
user = mkOption {
type = types.str;
default = "brownpaper";
};
pgpKeys = mkOption {
type = with types; listOf path;
default = [ ];
};
gc = {
enable = mkEnableOption "delete old snippets";
dates = mkOption {
type = types.str;
default = "00:00";
description = ''
Specification (in the format described by
<citerefentry><refentrytitle>systemd.time</refentrytitle>
<manvolnum>7</manvolnum></citerefentry>) of the time at
which the garbage collector will run.
'';
};
maxAge = mkOption {
type = types.ints.positive;
default = 60 * 24 * 30;
description = "maximum age in minutes after which snippets will be garbage collected. Defaults to 30 days";
};
};
};
options.programs.brownpaper = {
enable = mkEnableOption "brownpaper client";
endpoint = mkOption {
type = types.str;
default = "http://${cfg.listen}:${toString cfg.port}";
};
};
config = {
users.users = mkIf (cfg.enable && cfg.user == "brownpaper") { ${cfg.user} = { isSystemUser = true; group = "brownpaper"; }; };
systemd.services = mkIf cfg.enable {
brownpaper-init.script = ''
mkdir -p '${cfg.dataDir}'
chown ${cfg.user} -R '${cfg.dataDir}'
'' + (optionalString (cfg.pgpKeys != [ ]) ''
DATADIR='${toString cfg.dataDir}'
([ ! -s "$DATADIR/keys" ] && [ -d "$DATADIR/keys" ]) && mv "$DATADIR/keys" "$DATADIR/keys.bak"
[ -s "$DATADIR/keys" ] && rm "$DATADIR/keys"
ln -s ${keyDir} "$DATADIR/keys"
'');
brownpaper = {
wantedBy = [ "multi-user.target" ];
wants = [ "brownpaper-init.service" ];
after = [ "brownpaper-init.service" "network-online.target" ];
path = [ pkgs.coreutils ];
environment.BROWNPAPER_STORAGE_DIR = "${toString cfg.dataDir}";
confinement = {
enable = true;
packages = with pkgs; [ bash coreutils findutils tzdata keyDir ];
};
script = ''
${package.server}/bin/brownpaper ${cfg.listen}:${toString cfg.port}
'';
serviceConfig = {
BindPaths = [ cfg.dataDir ] ++ (optional (cfg.pgpKeys != [ ]) keyDir);
User = cfg.user;
};
};
brownpaper-gc = mkIf cfg.gc.enable {
startAt = cfg.gc.dates;
script = "${pkgs.findutils}/bin/find ${cfg.dataDir} -maxdepth 1 -type f -mmin +${toString cfg.gc.maxAge} -delete";
};
};
environment.systemPackages = optionals cfgc.enable [
(pkgs.writeShellScriptBin "brownpaper" ''
BROWNPAPER_ENDPOINT='${cfgc.endpoint}' ${package.client}/bin/brownpaper "$@"
'')
];
};
}

View File

@ -1,12 +1,11 @@
use c2_chacha::stream_cipher::{NewStreamCipher, SyncStreamCipher, SyncStreamCipherSeek};
use c2_chacha::{ChaCha12, ChaCha20};
use std::convert::TryInto;
use c2_chacha::stream_cipher::{NewStreamCipher, SyncStreamCipher};
use c2_chacha::ChaCha20;
use std::io::{Read, Result, Write};
pub struct ChaChaReader<'a>(ChaCha20, &'a mut Read);
pub struct ChaChaReader<'a>(ChaCha20, &'a mut dyn Read);
impl<'a> ChaChaReader<'a> {
pub fn new(key: &[u8], nonce: &[u8], source: &'a mut Read) -> ChaChaReader<'a> {
pub fn new(key: &[u8], nonce: &[u8], source: &'a mut dyn Read) -> ChaChaReader<'a> {
ChaChaReader(ChaCha20::new_var(key, nonce).unwrap(), source)
}
}
@ -19,10 +18,10 @@ impl<'a> Read for ChaChaReader<'a> {
}
}
pub struct ChaChaWriter<'a>(ChaCha20, &'a mut Write);
pub struct ChaChaWriter<'a>(ChaCha20, &'a mut dyn Write);
impl<'a> ChaChaWriter<'a> {
pub fn new(key: &[u8], nonce: &[u8], sink: &'a mut Write) -> ChaChaWriter<'a> {
pub fn new(key: &[u8], nonce: &[u8], sink: &'a mut dyn Write) -> ChaChaWriter<'a> {
ChaChaWriter(ChaCha20::new_var(key, nonce).unwrap(), sink)
}
}

View File

@ -1,140 +1,13 @@
#[macro_use]
extern crate tower_web;
#[macro_use]
extern crate lazy_static;
extern crate chrono;
extern crate iron;
extern crate rand;
extern crate sequoia_openpgp as openpgp;
extern crate snap;
mod chacha_io;
mod pgp;
mod server;
mod snippet;
use crate::pgp::KnownKeys;
use crate::snippet::*;
use iron::method::Method;
use iron::modifiers::Redirect;
use iron::prelude::*;
use iron::url::Url;
use iron::mime::Mime;
use sha2::Digest;
use std::env::{self, args};
use std::io;
use std::io::prelude::*;
use std::iter::Iterator;
use std::net::SocketAddr;
use std::path::Path;
use std::sync::Arc;
use std::sync::Mutex;
lazy_static! {
static ref STORAGE_DIR: String =
env::var("BROWNPAPER_STORAGE_DIR").unwrap_or("/snips".to_string());
static ref KNOWN_KEYS: Arc<Mutex<KnownKeys>> = Arc::new(Mutex::new(
KnownKeys::load_dir([&*STORAGE_DIR, "keys"].join("/")).expect("Failed to load pubkeys")
));
}
const VERSION: &str = env!("CARGO_PKG_VERSION");
fn handle(req: &mut Request) -> IronResult<Response> {
println!("{}", req.url);
let storage = SnippetStorage::new(&Path::new(&*STORAGE_DIR));
let segments: Vec<&str> = req.url.path();
match (&req.method, segments.first()) {
(Method::Get, Some(&"version")) => Ok(Response::with((iron::status::Ok, VERSION))),
(Method::Post, Some(path)) => {
if path == &"new" {
let snip = {
let pgp_text: String = {
let bytes = ((&mut req.body).bytes().take(1024 * 512).collect::<Result<
Vec<u8>,
io::Error,
>>(
))
.map_err(|err| IronError::new(err, ""))?;
String::from_utf8(bytes)
.map_err(|err| IronError::new(err, "Invalid utf8"))?
};
let b_text = KNOWN_KEYS
.lock()
.unwrap() //.map_err(|_| IronError::new(std::error::Error::from("Mutex Err"), "PGP Context unavailable"))?
.verify(pgp_text.as_bytes())
.map_err(|err| IronError::new(err, "Untrusted signature"))?;
let text = String::from_utf8(b_text).unwrap();
Snippet::random(&storage).write(&*text).map_err(|err| {
let msg = format!("Failed to save snippet: {:?}", &err);
IronError::new(err, msg)
})
};
snip.map(|snip| {
let mut snip_url: Url = req.url.clone().into();
snip_url.set_path(&*("/".to_string() + &*snip.id));
Response::with((
iron::status::TemporaryRedirect,
Redirect(iron::Url::from_generic_url(snip_url).unwrap()),
))
/*Response::with((
iron::status::Ok,
format!(
"<meta http-equiv=\"refresh\" content=\"0; url={}/\" />",
snip_url
),
))*/
})
} else {
Ok(Response::with((
iron::status::BadRequest,
"Post to /new or die",
)))
}
}
(Method::Get, Some(path)) => {
let (id, mime) = {
let mut parts = path.split(".");
(
parts.next().unwrap().to_string(),
Some(parts.collect::<Vec<_>>().join("/"))
.filter(|s| s.len() > 0)
.and_then(|format| format.parse::<Mime>().ok()),
)
};
let mime = mime.unwrap_or("text/plain".parse().unwrap());
let att = storage.open(&id).map(|snip| snip.contents()).map(|res| {
Response::with(
match res.map(|text| (iron::status::Ok, text)).map_err(|err| {
let msg = format!("Failed to load snippet: {:?}", &err);
msg
}) {
Ok(res) => res,
Err(e) => (iron::status::InternalServerError, e),
},
)
.set(mime)
});
Ok(att.unwrap_or(Response::with((iron::status::NotFound, "Not here sry"))))
}
(Method::Get, _) => Ok(Response::with((iron::status::NotFound, "Wrong path pal"))),
_ => Ok(Response::with((iron::status::BadRequest, "Give or take"))),
}
}
fn main() {
let chain = Chain::new(handle);
println!("Starting brownpaper: {}", &*STORAGE_DIR);
Iron::new(chain).http(
args()
.skip(1)
.next()
.map(|ip| {
ip.parse::<SocketAddr>()
.expect("can't parse socket address")
})
.unwrap_or("0.0.0.0:3000".parse::<SocketAddr>().unwrap())
.to_string()
.as_str(),
);
server::run();
}

View File

@ -1,12 +1,13 @@
use crate::snippet::*;
use openpgp::parse::stream::*;
use openpgp::parse::Parse;
use openpgp::*;
use std::fs;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
#[derive(Debug, Clone)]
pub struct KnownKeys {
keys: Vec<openpgp::TPK>,
}
@ -40,29 +41,6 @@ impl KnownKeys {
let mut v = Verifier::<&KnownKeys>::from_reader(r, helper, None).map_err(|e| {
io::Error::new(io::ErrorKind::InvalidData, "Failed to verify signature")
})?;
let mut buf = [0u8; 512];
let bp = "brownpaper".as_bytes();
loop {
match v.read(&mut buf)? {
0 => break,
read => {
// first buffer read
if content.len() == 0 {
if !(buf.len() > bp.len() && bp == &buf[0..bp.len()]) {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"Failed to verify signature(prefix)",
));
} else {
// remove prefix
content.extend_from_slice(&buf[bp.len()..read])
}
} else {
content.extend_from_slice(&buf[0..read]);
}
}
}
}
if v.read_to_end(&mut content).is_err() {
return Err(io::Error::new(
io::ErrorKind::InvalidData,

53
src/server.rs Normal file
View File

@ -0,0 +1,53 @@
use crate::pgp::KnownKeys;
use crate::snippet::*;
use std::path::Path;
use std::sync::Mutex;
use tokio::prelude::*;
use tower_web::ServiceBuilder;
use tower_web::{derive_resource_impl, impl_web};
#[derive(Debug)]
struct Brownpaper {
storage: SnippetStorage,
known_keys: Mutex<KnownKeys>,
}
impl_web! {
impl Brownpaper {
#[get("/")]
async fn root(&self) -> &'static str {
"Post /new, Get /<id>"
}
#[post("/new")]
async fn new(&self) -> Result<> {}
#[get("/:id")]
async fn snipptet(&self, id: String) -> Result<> {}
#[get("/file/:file_id")]
async fn snipptet_file(&self, file_id: String) -> Result<> {}
}
}
#[cfg(not(debug_assertions))]
const STORAGE_DIR: &str = "/snips";
#[cfg(debug_assertions)]
const STORAGE_DIR: &str = "/tmp";
pub fn run() {
let addr = "0.0.0.0:3000".parse().expect("Invalid address");
println!("Listening on http://{}", addr);
ServiceBuilder::new()
.resource(Brownpaper {
storage: SnippetStorage::new(Path::new(STORAGE_DIR)),
known_keys: Mutex::new(
KnownKeys::load_dir([STORAGE_DIR, "keys"].join("/"))
.expect("Failed to load PGP Keys"),
),
})
.run(&addr)
.unwrap();
}

View File

@ -1,31 +1,31 @@
use crate::chacha_io::{ChaChaReader, ChaChaWriter};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use chrono::*;
use rand::Rng;
use sha2::{Digest, Sha256};
use std::convert::TryInto;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::iter::Iterator;
use std::path::{Path, PathBuf};
#[derive(Debug)]
pub struct Snippet<'a> {
pub id: String,
pub storage: &'a SnippetStorage<'a>,
pub storage: &'a SnippetStorage,
}
#[allow(dead_code)]
#[derive(Debug)]
pub struct SnippetMeta {
created: DateTime<Utc>,
compression: Option<String>,
}
pub struct SnippetStorage<'a> {
directory: &'a Path,
#[derive(Debug)]
pub struct SnippetStorage {
directory: PathBuf,
}
impl<'a> Snippet<'a> {
@ -80,7 +80,7 @@ impl<'a> Snippet<'a> {
hdl.read_exact(&mut comp)?;
let comp = String::from_utf8(comp).unwrap();
Ok(SnippetMeta {
created: Utc.timestamp(timestamp, 0),
created: Utc.timestamp_millis(timestamp),
compression: Some(comp).filter(|_| comp_len > 0),
})
}
@ -128,10 +128,10 @@ impl<'a> Snippet<'a> {
}
}
impl<'a> SnippetStorage<'a> {
pub fn new(directory: &'a Path) -> SnippetStorage<'a> {
impl SnippetStorage {
pub fn new(directory: impl Into<PathBuf>) -> SnippetStorage {
SnippetStorage {
directory: directory,
directory: directory.into(),
}
}
pub fn file_id(id: &str) -> String {