diff --git a/build.rs b/build.rs index e2b94f4..ad2f59e 100644 --- a/build.rs +++ b/build.rs @@ -1,4 +1,4 @@ -use vergen::{Config, vergen}; +use vergen::{vergen, Config}; fn main() { vergen(Config::default()).unwrap(); diff --git a/gitarena-common/src/database.rs b/gitarena-common/src/database.rs index d336c6a..97756e2 100644 --- a/gitarena-common/src/database.rs +++ b/gitarena-common/src/database.rs @@ -30,7 +30,12 @@ pub async fn create_postgres_pool(module: &'static str, max_conns: Option) .after_connect(move |connection| { Box::pin(async move { // If setting the app name fails it's not a big deal if the connection is still fine so let's ignore the error - let _ = connection.execute(ONCE.get_or_init(|| format!("set application_name = '{}';", module)).as_str()).await; + let _ = connection + .execute( + ONCE.get_or_init(|| format!("set application_name = '{}';", module)) + .as_str(), + ) + .await; Ok(()) }) }) @@ -57,8 +62,11 @@ async fn read_database_config() -> Result { let password = fs::read_to_string(file).await?; options = options.password(password.as_str()); } - Err(VarError::NotUnicode(_)) => bail!("`DATABASE_PASSWORD_FILE` environment variable is not valid unicode"), - Err(VarError::NotPresent) => { /* No password auth required, or it was already set in the connection string; safe to ignore */ } + Err(VarError::NotUnicode(_)) => { + bail!("`DATABASE_PASSWORD_FILE` environment variable is not valid unicode") + } + Err(VarError::NotPresent) => { /* No password auth required, or it was already set in the connection string; safe to ignore */ + } } Ok(options) @@ -66,8 +74,12 @@ async fn read_database_config() -> Result { fn get_max_connections() -> Result { Ok(match env::var("MAX_POOL_CONNECTIONS") { - Ok(env_str) => env_str.parse::().context("Unable to parse MAX_POOL_CONNECTIONS environment variable into a u32")?, + Ok(env_str) => env_str + .parse::() + .context("Unable to parse MAX_POOL_CONNECTIONS environment variable into a u32")?, Err(VarError::NotPresent) => num_cpus::get() as u32, - Err(VarError::NotUnicode(_)) => bail!("MAX_POOL_CONNECTIONS environment variable is not a valid unicode string") + Err(VarError::NotUnicode(_)) => { + bail!("MAX_POOL_CONNECTIONS environment variable is not a valid unicode string") + } }) } diff --git a/gitarena-common/src/database/models.rs b/gitarena-common/src/database/models.rs index 8415f35..9bb2d71 100644 --- a/gitarena-common/src/database/models.rs +++ b/gitarena-common/src/database/models.rs @@ -1,5 +1,5 @@ -use std::fmt::{Display, Formatter}; use std::fmt; +use std::fmt::{Display, Formatter}; use anyhow::{bail, Error}; use serde::{Deserialize, Serialize}; @@ -12,7 +12,7 @@ pub enum KeyType { EcdsaSha2Nistp256, EcdsaSha2Nistp384, EcdsaSha2Nistp521, - SshEd25519 + SshEd25519, } impl Display for KeyType { @@ -24,7 +24,7 @@ impl Display for KeyType { EcdsaSha2Nistp256 => "ecdsa-sha2-nistp256", EcdsaSha2Nistp384 => "ecdsa-sha2-nistp384", EcdsaSha2Nistp521 => "ecdsa-sha2-nistp521", - SshEd25519 => "ssh-ed25519" + SshEd25519 => "ssh-ed25519", }) } } @@ -41,7 +41,7 @@ impl TryFrom<&str> for KeyType { "ecdsa-sha2-nistp384" => EcdsaSha2Nistp384, "ecdsa-sha2-nistp521" => EcdsaSha2Nistp521, "ssh-ed25519" => SshEd25519, - _ => bail!("Unknown key type: {}", value) + _ => bail!("Unknown key type: {}", value), }) } } diff --git a/gitarena-common/src/ipc.rs b/gitarena-common/src/ipc.rs index 8fa474f..f374921 100644 --- a/gitarena-common/src/ipc.rs +++ b/gitarena-common/src/ipc.rs @@ -1,32 +1,43 @@ +use std::fmt::{Debug, Display, Formatter}; use std::io::{Read, Write}; use std::{fmt, fs, mem}; -use std::fmt::{Debug, Display, Formatter}; use anyhow::{Context, Result}; -use bincode::config::{AllowTrailing, Bounded, LittleEndian, VarintEncoding, WithOtherEndian, WithOtherIntEncoding, WithOtherLimit, WithOtherTrailing}; +use bincode::config::{ + AllowTrailing, Bounded, LittleEndian, VarintEncoding, WithOtherEndian, WithOtherIntEncoding, + WithOtherLimit, WithOtherTrailing, +}; use bincode::{DefaultOptions, Options as _}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; // World's longest type, thank you -pub type BincodeType = WithOtherTrailing, LittleEndian>, VarintEncoding>, AllowTrailing>; +pub type BincodeType = WithOtherTrailing< + WithOtherIntEncoding< + WithOtherEndian, LittleEndian>, + VarintEncoding, + >, + AllowTrailing, +>; /// [Type-length-value](https://en.wikipedia.org/wiki/Type%E2%80%93length%E2%80%93value) packet to be used for GitArena IPC #[derive(Deserialize, Serialize)] pub struct IpcPacket { id: u64, length: u64, - data: T + data: T, } impl IpcPacket { pub fn new(data: T) -> Self { - let size = Self::bincode().serialized_size(&data).unwrap_or(mem::size_of::() as u64); + let size = Self::bincode() + .serialized_size(&data) + .unwrap_or(mem::size_of::() as u64); IpcPacket { id: data.id(), length: size, - data + data, } } } diff --git a/gitarena-common/src/log.rs b/gitarena-common/src/log.rs index cab95e8..73ebf49 100644 --- a/gitarena-common/src/log.rs +++ b/gitarena-common/src/log.rs @@ -14,14 +14,15 @@ use tracing_subscriber::fmt::Layer; use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::registry::LookupSpan; use tracing_subscriber::util::SubscriberInitExt; -use tracing_subscriber::{EnvFilter, layer, Registry}; +use tracing_subscriber::{layer, EnvFilter, Registry}; use tracing_unwrap::ResultExt; // Keep in sync with `gitarena::init_logger` pub fn init_logger(module: &str, directives: &'static [&str]) -> Result> { let mut guards = Vec::new(); - let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|err| default_env(err, directives)); + let env_filter = + EnvFilter::try_from_default_env().unwrap_or_else(|err| default_env(err, directives)); let stdout_layer = stdout().map(|(layer, guard)| { guards.push(guard); @@ -49,21 +50,22 @@ pub fn init_logger(module: &str, directives: &'static [&str]) -> Result LookupSpan<'a>>() -> Option<(impl layer::Layer, WorkerGuard)> { +pub fn stdout LookupSpan<'a>>( +) -> Option<(impl layer::Layer, WorkerGuard)> { if env::var_os("NO_STDOUT_LOG").is_some() { return None; } let (writer, guard) = tracing_appender::non_blocking(io::stdout()); - let layer = Layer::new() - .with_thread_ids(true) - .with_writer(writer); + let layer = Layer::new().with_thread_ids(true).with_writer(writer); Some((layer, guard)) } -pub fn log_file LookupSpan<'a>>(module: &str) -> Result, WorkerGuard)>> { +pub fn log_file LookupSpan<'a>>( + module: &str, +) -> Result, WorkerGuard)>> { if cfg!(debug_assertions) || env::var_os("DEBUG_FILE_LOG").is_none() { return Ok(None); } @@ -85,7 +87,9 @@ pub fn log_file LookupSpan<'a>>(module: &str) -> Result< Ok(Some((layer, guard))) } -pub fn tokio_console LookupSpan<'a>>(filter: EnvFilter) -> (EnvFilter, Option>) { +pub fn tokio_console LookupSpan<'a>>( + filter: EnvFilter, +) -> (EnvFilter, Option>) { if !cfg!(tokio_unstable) { return (filter, None); } @@ -100,12 +104,20 @@ pub fn tokio_console LookupSpan<'a>>(filter: EnvFilter) } pub fn default_env(err: FromEnvError, directives: &[&str]) -> EnvFilter { - let not_found = err.source() - .map(|o| o.downcast_ref::().map_or_else(|| false, |err| matches!(err, VarError::NotPresent))) + let not_found = err + .source() + .map(|o| { + o.downcast_ref::() + .map_or_else(|| false, |err| matches!(err, VarError::NotPresent)) + }) .unwrap_or(false); if !not_found { - eprintln!("Warning: Unable to parse `{}` environment variable, using default values: {}", EnvFilter::DEFAULT_ENV, err); + eprintln!( + "Warning: Unable to parse `{}` environment variable, using default values: {}", + EnvFilter::DEFAULT_ENV, + err + ); } let level = if cfg!(debug_assertions) { diff --git a/gitarena-common/src/packets/git.rs b/gitarena-common/src/packets/git.rs index 38817b9..b88219d 100644 --- a/gitarena-common/src/packets/git.rs +++ b/gitarena-common/src/packets/git.rs @@ -1,10 +1,10 @@ -use serde::{Deserialize, Serialize}; use gitarena_macros::IpcPacket; +use serde::{Deserialize, Serialize}; #[derive(Deserialize, Serialize, Debug, Default, IpcPacket)] #[ipc(packet = "Git", id = 1)] // = 1001 pub struct GitImport { pub url: String, pub username: Option, - pub password: Option + pub password: Option, } diff --git a/gitarena-common/src/packets/mod.rs b/gitarena-common/src/packets/mod.rs index a10a967..2bf6d7f 100644 --- a/gitarena-common/src/packets/mod.rs +++ b/gitarena-common/src/packets/mod.rs @@ -4,7 +4,7 @@ pub mod git; // 1xxx #[repr(u64)] pub enum PacketCategory { - Git = 1000 + Git = 1000, } // TODO: Find a way to automatically generate this @@ -13,5 +13,5 @@ pub enum PacketCategory { #[repr(u64)] #[derive(FromPrimitive, ToPrimitive)] pub enum PacketId { - GitImport = 1001 + GitImport = 1001, } diff --git a/gitarena-macros/src/config.rs b/gitarena-macros/src/config.rs index 9d006f5..daaafcc 100644 --- a/gitarena-macros/src/config.rs +++ b/gitarena-macros/src/config.rs @@ -1,11 +1,11 @@ use std::fmt::{Debug, Formatter}; -use proc_macro2::TokenStream; use proc_macro::TokenStream as ProcMacroTS; +use proc_macro2::TokenStream; use quote::{quote, ToTokens}; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; -use syn::{Ident, LitStr, parse_macro_input, Token, Type}; +use syn::{parse_macro_input, Ident, LitStr, Token, Type}; pub(crate) fn from_config(input: ProcMacroTS) -> ProcMacroTS { let settings = parse_macro_input!(input as SettingsList); @@ -42,7 +42,7 @@ pub(crate) fn from_optional_config(input: ProcMacroTS) -> ProcMacroTS { #[derive(Debug)] struct SettingsList { - settings: Vec + settings: Vec, } impl SettingsList { @@ -52,7 +52,12 @@ impl SettingsList { fn as_optional(&self) -> OptionalSettingsList { OptionalSettingsList { - settings: self.settings.iter().cloned().map(|s| s.as_optional()).collect() + settings: self + .settings + .iter() + .cloned() + .map(|s| s.as_optional()) + .collect(), } } } @@ -62,7 +67,7 @@ impl Parse for SettingsList { let punctuated = Punctuated::::parse_terminated(input)?; Ok(SettingsList { - settings: punctuated.iter().cloned().collect::>() + settings: punctuated.iter().cloned().collect::>(), }) } } @@ -83,7 +88,7 @@ impl ToTokens for SettingsList { struct Setting { identifier: Ident, key: String, - ty: Type + ty: Type, } impl Debug for Setting { @@ -98,7 +103,7 @@ impl Debug for Setting { impl Setting { fn as_optional(&self) -> OptionalSetting { OptionalSetting { - original: self.clone() + original: self.clone(), } } } @@ -114,7 +119,7 @@ impl Parse for Setting { Ok(Setting { identifier: ident, key: key_str, - ty + ty, }) } } @@ -137,7 +142,7 @@ impl ToTokens for Setting { #[derive(Debug)] struct OptionalSettingsList { - settings: Vec + settings: Vec, } impl ToTokens for OptionalSettingsList { @@ -154,7 +159,7 @@ impl ToTokens for OptionalSettingsList { #[derive(Debug)] struct OptionalSetting { - original: Setting + original: Setting, } impl ToTokens for OptionalSetting { diff --git a/gitarena-macros/src/ipc_packet.rs b/gitarena-macros/src/ipc_packet.rs index fc8f168..31d45ce 100644 --- a/gitarena-macros/src/ipc_packet.rs +++ b/gitarena-macros/src/ipc_packet.rs @@ -1,12 +1,12 @@ -use proc_macro2::{Ident, Span}; use proc_macro::TokenStream; +use proc_macro2::{Ident, Span}; use proc_macro_error::{emit_call_site_error, emit_error}; use quote::quote; use syn::spanned::Spanned; -use syn::{DeriveInput, Lit, Meta, NestedMeta, parse_macro_input}; +use syn::{parse_macro_input, DeriveInput, Lit, Meta, NestedMeta}; pub(crate) fn ipc_packet(input: TokenStream) -> TokenStream { - let mut input = parse_macro_input!(input as DeriveInput); + let mut input = parse_macro_input!(input as DeriveInput); let identifier = input.ident; let mut category = None; @@ -14,7 +14,12 @@ pub(crate) fn ipc_packet(input: TokenStream) -> TokenStream { input.attrs.retain(|attribute| { if let Ok(Meta::List(list)) = attribute.parse_meta() { - let ipc = list.path.segments.first().map(|segment| segment.ident == "ipc").unwrap_or_default(); + let ipc = list + .path + .segments + .first() + .map(|segment| segment.ident == "ipc") + .unwrap_or_default(); if ipc { for args in list.nested { @@ -57,7 +62,7 @@ pub(crate) fn ipc_packet(input: TokenStream) -> TokenStream { _ => emit_error! { segment.span(), "unknown identifier, expected `packet` or `id`" - } + }, } } } @@ -78,7 +83,7 @@ pub(crate) fn ipc_packet(input: TokenStream) -> TokenStream { match chars.next() { Some(c) => c.to_uppercase().collect::() + chars.as_str(), - None => String::new() + None => String::new(), } }; diff --git a/gitarena-macros/src/route.rs b/gitarena-macros/src/route.rs index 21bd016..a3dcc27 100644 --- a/gitarena-macros/src/route.rs +++ b/gitarena-macros/src/route.rs @@ -1,11 +1,11 @@ use std::ops::DerefMut; -use proc_macro2::{Ident, Span, TokenStream as TokenStream2}; use proc_macro::TokenStream; +use proc_macro2::{Ident, Span, TokenStream as TokenStream2}; use proc_macro_error::{abort, abort_call_site, abort_if_dirty, emit_error}; use quote::{quote, ToTokens}; use syn::spanned::Spanned; -use syn::{AttributeArgs, FnArg, ItemFn, Lit, LitStr, Meta, NestedMeta, parse_macro_input, Pat}; +use syn::{parse_macro_input, AttributeArgs, FnArg, ItemFn, Lit, LitStr, Meta, NestedMeta, Pat}; pub(crate) fn route(args: TokenStream, input: TokenStream) -> TokenStream { let mut args = parse_macro_input!(args as AttributeArgs); @@ -17,20 +17,22 @@ pub(crate) fn route(args: TokenStream, input: TokenStream) -> TokenStream { for (index, meta) in args.iter().enumerate() { match meta { - NestedMeta::Meta(meta) => if let Meta::NameValue(name_value) = meta { - if let Some(segment) = name_value.path.segments.first() { - let lowered = segment.ident.to_string().to_lowercase(); - - if lowered.as_str() == "err" { - if let Some(parsed_error_type) = match_error_type(&name_value.lit) { - error_type = parsed_error_type; - error_type_index = index; + NestedMeta::Meta(meta) => { + if let Meta::NameValue(name_value) = meta { + if let Some(segment) = name_value.path.segments.first() { + let lowered = segment.ident.to_string().to_lowercase(); + + if lowered.as_str() == "err" { + if let Some(parsed_error_type) = match_error_type(&name_value.lit) { + error_type = parsed_error_type; + error_type_index = index; + } + } + } else { + emit_error! { + meta.span(), + "meta name cannot be empty" } - } - } else { - emit_error! { - meta.span(), - "meta name cannot be empty" } } } @@ -91,13 +93,11 @@ pub(crate) fn route(args: TokenStream, input: TokenStream) -> TokenStream { FnArg::Typed(pat_type) => { let pat = &*pat_type.pat; match pat { - Pat::Ident(pat_ident) => { - pat_ident.ident.to_token_stream() - }, - _ => unimplemented!() + Pat::Ident(pat_ident) => pat_ident.ident.to_token_stream(), + _ => unimplemented!(), } - }, - _ => unimplemented!() + } + _ => unimplemented!(), }; idents_vec.push(ident_ts); } @@ -158,7 +158,7 @@ enum ErrorDisplayType { Plain, #[doc(hidden)] - Unset + Unset, } impl ToTokens for ErrorDisplayType { @@ -170,11 +170,11 @@ impl ToTokens for ErrorDisplayType { let ts = unboxed.to_token_stream(); quote! { Htmx(Box::new(crate::error::ErrorDisplayType::#ts)) } - }, + } ErrorDisplayType::Json => quote! { Json }, ErrorDisplayType::Git => quote! { Git }, ErrorDisplayType::Plain => quote! { Plain }, - ErrorDisplayType::Unset => unimplemented!("unset is not mapped to a GitArena type yet") + ErrorDisplayType::Unset => unimplemented!("unset is not mapped to a GitArena type yet"), }) } } @@ -192,7 +192,9 @@ fn match_error_type(input: &Lit) -> Option { "htmx+html" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Html))), "htmx+json" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Json))), "htmx+git" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Git))), - "htmx+text" | "htmx+plain" => Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Plain))), + "htmx+text" | "htmx+plain" => { + Some(ErrorDisplayType::Htmx(Box::new(ErrorDisplayType::Plain))) + } "htmx" => { emit_error! { input.span(), diff --git a/gitarena-ssh/src/keys.rs b/gitarena-ssh/src/keys.rs index 01dcedc..65fe3de 100644 --- a/gitarena-ssh/src/keys.rs +++ b/gitarena-ssh/src/keys.rs @@ -1,14 +1,15 @@ use anyhow::Result; use futures::TryStreamExt; -use gitarena_common::database::Database; use gitarena_common::database::models::KeyType; +use gitarena_common::database::Database; use gitarena_common::prelude::*; use sqlx::{Executor, Row}; pub(crate) async fn print_all<'e, E: Executor<'e, Database = Database>>(executor: E) -> Result<()> { let mut stream = sqlx::query( - "select algorithm, key from ssh_keys where expires_at is null or expires_at < now()" - ).fetch(executor); + "select algorithm, key from ssh_keys where expires_at is null or expires_at < now()", + ) + .fetch(executor); while let Some(row) = stream.try_next().await? { let algorithm: KeyType = row.try_get("algorithm")?; diff --git a/gitarena-ssh/src/main.rs b/gitarena-ssh/src/main.rs index 8efe7b2..51595df 100644 --- a/gitarena-ssh/src/main.rs +++ b/gitarena-ssh/src/main.rs @@ -1,5 +1,5 @@ use anyhow::{bail, Result}; -use clap::{Subcommand, Parser}; +use clap::{Parser, Subcommand}; use gitarena_common::database::create_postgres_pool; use gitarena_common::prelude::*; @@ -17,7 +17,7 @@ async fn main() -> Result<()> { match &args.command { Some(AuthorizedKeys) => keys::print_all(&mut transaction).await?, - _ => bail!("GitArena does currently not provide SSH access") + _ => bail!("GitArena does currently not provide SSH access"), } transaction.commit().await?; @@ -28,14 +28,19 @@ async fn main() -> Result<()> { enum Command { /// Prints out all non-expired SSH keys added by all GitArena users. /// This command should be invoked by the OpenSSH server via [`AuthorizedKeysCommand`](https://man.openbsd.org/sshd_config#AuthorizedKeysCommand) - AuthorizedKeys + AuthorizedKeys, } #[derive(Parser, Debug)] -#[clap(author, version, about = "SSH component for GitArena", long_about = "SSH component for GitArena: a software development platform with built-in vcs, issue tracking and code review")] +#[clap( + author, + version, + about = "SSH component for GitArena", + long_about = "SSH component for GitArena: a software development platform with built-in vcs, issue tracking and code review" +)] struct Args { user: Option, #[clap(subcommand)] - command: Option + command: Option, } diff --git a/gitarena-workhorse/src/main.rs b/gitarena-workhorse/src/main.rs index 577033e..d921a47 100644 --- a/gitarena-workhorse/src/main.rs +++ b/gitarena-workhorse/src/main.rs @@ -19,7 +19,12 @@ async fn main() -> Result<()> { Endpoint::new(ipc_path()?.to_owned()) .incoming() - .with_context(|| format!("Failed to create endpoint at {}", ipc_path().unwrap_or_log()))? // .unwrap_or_log() is safe as it would've excited early two lines above if this errors + .with_context(|| { + format!( + "Failed to create endpoint at {}", + ipc_path().unwrap_or_log() + ) + })? // .unwrap_or_log() is safe as it would've excited early two lines above if this errors .for_each(|connection| async { if let Err(err) = handle(connection).await { error!("Error occurred while reading stream: {}", err); @@ -32,19 +37,31 @@ async fn main() -> Result<()> { Ok(()) } -async fn handle(connection: Result) -> Result<()> { +async fn handle( + connection: Result, +) -> Result<()> { let mut connection = connection?; let type_ = connection.read_u64().await.context("Failed to read type")?; - let length = connection.read_u64().await.context("Failed to read length")?; + let length = connection + .read_u64() + .await + .context("Failed to read length")?; - let id: PacketId = PacketId::from_u64(type_).with_context(|| format!("Received unknown packet id: {}", type_))?; + let id: PacketId = PacketId::from_u64(type_) + .with_context(|| format!("Received unknown packet id: {}", type_))?; let mut buffer: Vec = Vec::with_capacity(length as usize); - let read_length = connection.read(buffer.as_mut_slice()).await.context("Failed to read payload")? as u64; + let read_length = connection + .read(buffer.as_mut_slice()) + .await + .context("Failed to read payload")? as u64; if read_length != length { - warn!("Failed to read correct payload size, expected: {} read: {}", length, read_length); + warn!( + "Failed to read correct payload size, expected: {} read: {}", + length, read_length + ); } // Bincode is configured in gitarena-common/src/ipc.rs to use little endianness diff --git a/src/captcha.rs b/src/captcha.rs index a5c7d01..a2ced23 100644 --- a/src/captcha.rs +++ b/src/captcha.rs @@ -8,20 +8,35 @@ use log::{error, warn}; use serde::{Deserialize, Serialize}; use sqlx::{Executor, Postgres}; -pub(crate) async fn verify_captcha<'e, E: Executor<'e, Database = Postgres>>(token: &String, executor: E) -> Result { +pub(crate) async fn verify_captcha<'e, E: Executor<'e, Database = Postgres>>( + token: &String, + executor: E, +) -> Result { let api_key = match get_optional_setting::("hcaptcha.site_key", executor).await? { Some(api_key) => api_key, - None => return Ok(true) + None => return Ok(true), }; let response: HCaptchaResponse = Client::gitarena() .post("https://hcaptcha.com/siteverify") .send_form(&[("response", token), ("secret", &api_key)]) .await - .map_err(|err| err!(BAD_GATEWAY, "Unable to verify hCaptcha captcha token: {}", err))? + .map_err(|err| { + err!( + BAD_GATEWAY, + "Unable to verify hCaptcha captcha token: {}", + err + ) + })? .json() .await - .map_err(|err| err!(BAD_GATEWAY, "Unable to convert hCaptcha response into Json structure: {}", err))?; + .map_err(|err| { + err!( + BAD_GATEWAY, + "Unable to convert hCaptcha response into Json structure: {}", + err + ) + })?; if let Some(errors) = response.errors { let errors_str = errors.join(", "); @@ -44,5 +59,5 @@ struct HCaptchaResponse { hostname: Option, credit: Option, #[serde(rename(deserialize = "error-codes"))] - errors: Option> + errors: Option>, } diff --git a/src/config.rs b/src/config.rs index 99a4669..d153f3a 100644 --- a/src/config.rs +++ b/src/config.rs @@ -20,10 +20,14 @@ use tracing_unwrap::OptionExt; /// If the setting does not exist, returns SQL Err. /// /// The later case should never happen if the programmer added their setting to schema.sql -pub(crate) async fn get_optional_setting<'e, T, E>(key: &'static str, executor: E) -> Result> - where T: TryFrom + Send, - E: Executor<'e, Database = Postgres>, - >::Error: HoldsError + Send + Sync + 'static +pub(crate) async fn get_optional_setting<'e, T, E>( + key: &'static str, + executor: E, +) -> Result> +where + T: TryFrom + Send, + E: Executor<'e, Database = Postgres>, + >::Error: HoldsError + Send + Sync + 'static, { let setting = sqlx::query_as::<_, Setting>("select * from settings where key = $1 limit 1") .bind(key) @@ -32,7 +36,9 @@ pub(crate) async fn get_optional_setting<'e, T, E>(key: &'static str, executor: .with_context(|| format!("Unable to read setting {} from database", key))?; if setting.is_set() { - let result: T = setting.try_into().map_err(|err: T::Error| err.into_inner())?; + let result: T = setting + .try_into() + .map_err(|err: T::Error| err.into_inner())?; Ok(Some(result)) } else { Ok(None) @@ -47,9 +53,10 @@ pub(crate) async fn get_optional_setting<'e, T, E>(key: &'static str, executor: /// /// The later case should never happen if the programmer added their setting to schema.sql pub(crate) async fn get_setting<'e, T, E>(key: &'static str, executor: E) -> Result - where T: TryFrom + Send, - E: Executor<'e, Database = Postgres>, - >::Error: HoldsError + Send + Sync + 'static +where + T: TryFrom + Send, + E: Executor<'e, Database = Postgres>, + >::Error: HoldsError + Send + Sync + 'static, { let setting = sqlx::query_as::<_, Setting>("select * from settings where key = $1 limit 1") .bind(key) @@ -57,19 +64,32 @@ pub(crate) async fn get_setting<'e, T, E>(key: &'static str, executor: E) -> Res .await .with_context(|| format!("Unable to read setting {} from database", key))?; - let result: T = setting.try_into().map_err(|err: T::Error| err.into_inner())?; + let result: T = setting + .try_into() + .map_err(|err: T::Error| err.into_inner())?; Ok(result) } -pub(crate) async fn get_all_settings<'e, E: Executor<'e, Database = Postgres>>(executor: E) -> Result> { - Ok(sqlx::query_as::<_, Setting>("select * from settings order by key").fetch_all(executor).await?) +pub(crate) async fn get_all_settings<'e, E: Executor<'e, Database = Postgres>>( + executor: E, +) -> Result> { + Ok( + sqlx::query_as::<_, Setting>("select * from settings order by key") + .fetch_all(executor) + .await?, + ) } // This function returns impl Future instead of relying on async fn to automatically convert it into doing just that // Because async fn tries to unify lifetimes, we need to do this. More info: https://stackoverflow.com/a/68733302 -pub(crate) fn set_setting<'e, 'q, T, E>(key: &'static str, value: T, executor: E) -> impl Future> + 'q - where T: TryFrom + Encode<'q, Postgres> + Type + Send + 'q, - E: Executor<'e, Database = Postgres> + 'q +pub(crate) fn set_setting<'e, 'q, T, E>( + key: &'static str, + value: T, + executor: E, +) -> impl Future> + 'q +where + T: TryFrom + Encode<'q, Postgres> + Type + Send + 'q, + E: Executor<'e, Database = Postgres> + 'q, { async move { sqlx::query("update settings set value = $1 where key = $2") @@ -88,7 +108,7 @@ pub(crate) struct Setting { pub(crate) key: String, pub(crate) value: Option, #[sqlx(rename = "type")] - pub(crate) type_constraint: TypeConstraint + pub(crate) type_constraint: TypeConstraint, } impl Setting { @@ -153,9 +173,19 @@ impl TryFrom for String { fn try_from(setting: Setting) -> StdResult { (|| match setting.type_constraint { - TypeConstraint::String => Ok(setting.value.ok_or_else(|| anyhow!("Value for String setting `{}` is not set", setting.key.as_str()))?), - _ => bail!("Tried to cast setting `{}` into string despite it being {}", setting.key.as_str(), setting.type_constraint) - })().map_err(ErrorHolder) + TypeConstraint::String => Ok(setting.value.ok_or_else(|| { + anyhow!( + "Value for String setting `{}` is not set", + setting.key.as_str() + ) + })?), + _ => bail!( + "Tried to cast setting `{}` into string despite it being {}", + setting.key.as_str(), + setting.type_constraint + ), + })() + .map_err(ErrorHolder) } } @@ -167,9 +197,9 @@ generate_try_from!(Int, i64); #[sqlx(type_name = "type_constraint", rename_all = "lowercase")] #[serde(rename_all(serialize = "lowercase", deserialize = "lowercase"))] pub(crate) enum TypeConstraint { - Boolean, // bool, bool - Char, // i8, char - Int, // i32/i64, int/bigint - String, // &str, varchar, char, text - Bytes // &[u8], bytea // TODO: Implement Bytes when needed + Boolean, // bool, bool + Char, // i8, char + Int, // i32/i64, int/bigint + String, // &str, varchar, char, text + Bytes, // &[u8], bytea // TODO: Implement Bytes when needed } diff --git a/src/crypto.rs b/src/crypto.rs index ee16a21..2085288 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -14,21 +14,24 @@ const ARGON_CONFIG: Config = Config { thread_mode: ThreadMode::Parallel, time_cost: 3, variant: Variant::Argon2id, - version: Version::Version13 + version: Version::Version13, }; pub(crate) fn random_string_charset(length: usize, charset: &'static [u8]) -> String { let mut rng = rand::thread_rng(); let uniform = Uniform::new(0, charset.len()); - (0..length).map(|_| { - let index = uniform.sample(&mut rng); - charset[index] as char - }).collect() + (0..length) + .map(|_| { + let index = uniform.sample(&mut rng); + charset[index] as char + }) + .collect() } pub(crate) fn random_string(length: usize) -> String { - const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789)(*&^%$#@!~"; + const CHARSET: &[u8] = + b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789)(*&^%$#@!~"; random_string_charset(length, CHARSET) } @@ -48,13 +51,11 @@ pub(crate) fn random_hex_string(length: usize) -> String { pub(crate) fn hash_password(password: &str) -> Result { let salt = random_string(16); - argon2::hash_encoded( - password.as_bytes(), salt.as_bytes(), &ARGON_CONFIG - ).context("Failed to hash password") + argon2::hash_encoded(password.as_bytes(), salt.as_bytes(), &ARGON_CONFIG) + .context("Failed to hash password") } pub(crate) fn check_password(user: &User, password: &str) -> Result { - argon2::verify_encoded( - user.password.as_str(), password.as_bytes() - ).with_context(|| format!("Failed to check password for user #{}", user.id)) + argon2::verify_encoded(user.password.as_str(), password.as_bytes()) + .with_context(|| format!("Failed to check password for user #{}", user.id)) } diff --git a/src/error.rs b/src/error.rs index b6c0c7b..f89416a 100644 --- a/src/error.rs +++ b/src/error.rs @@ -11,10 +11,10 @@ use std::sync::Arc; use actix_web::body::{BoxBody, MessageBody}; use actix_web::dev::{ResponseHead, Service, ServiceRequest, ServiceResponse}; -use actix_web::Error as ActixError; use actix_web::error::InternalError; -use actix_web::http::header::{CONTENT_TYPE, HeaderValue}; +use actix_web::http::header::{HeaderValue, CONTENT_TYPE}; use actix_web::http::StatusCode; +use actix_web::Error as ActixError; use actix_web::Result as ActixResult; use actix_web::{HttpResponse, HttpResponseBuilder, ResponseError}; use anyhow::{Error, Result}; @@ -110,14 +110,19 @@ macro_rules! err { pub(crate) struct WithStatusCode { pub(crate) code: StatusCode, pub(crate) source: Option, - pub(crate) display: bool // Whenever cause() should be shown to the user + pub(crate) display: bool, // Whenever cause() should be shown to the user } impl Display for WithStatusCode { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match &self.source { Some(source) if self.display => write!(f, "{}", source), - _ => write!(f, "{} {}", self.code.as_str(), self.code.canonical_reason().unwrap_or_default()) + _ => write!( + f, + "{} {}", + self.code.as_str(), + self.code.canonical_reason().unwrap_or_default() + ), } } } @@ -127,7 +132,7 @@ impl WithStatusCode { WithStatusCode { code, source: None, - display: false + display: false, } } @@ -135,7 +140,7 @@ impl WithStatusCode { Ok(WithStatusCode { code: StatusCode::from_u16(code)?, source: None, - display: false + display: false, }) } } @@ -143,14 +148,14 @@ impl WithStatusCode { #[derive(Clone)] pub(crate) struct GitArenaError { pub(crate) source: Arc, - pub(crate) display_type: ErrorDisplayType + pub(crate) display_type: ErrorDisplayType, } impl GitArenaError { fn status_code(&self) -> StatusCode { match self.source.downcast_ref::() { Some(with_code) => with_code.code, - None => StatusCode::INTERNAL_SERVER_ERROR + None => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -161,14 +166,18 @@ impl GitArenaError { /// Whenever this error should be displayed to the end user fn should_display_message(&self) -> bool { - self.source.downcast_ref::().map_or_else(|| false, |w| w.display) + self.source + .downcast_ref::() + .map_or_else(|| false, |w| w.display) } fn message(&self) -> String { if self.should_display_message() { self.source.to_string() } else { - self.status_code().canonical_reason().map_or_else(String::new, str::to_owned) + self.status_code() + .canonical_reason() + .map_or_else(String::new, str::to_owned) } } } @@ -199,13 +208,15 @@ impl ResponseError for GitArenaError { match &self.display_type { ErrorDisplayType::Html | ErrorDisplayType::Git => { - builder.extensions_mut().insert::(self.clone()); + builder + .extensions_mut() + .insert::(self.clone()); // This method is not async which means we can't call async renders such as HTML and Git // As a workaround, we let a middleware (which is async) render these two error types // More information: https://github.com/actix/actix-web/discussions/2593 builder.finish() - }, + } ErrorDisplayType::Htmx(inner) => { // TODO: Send partial htmx instead let mut error = self.clone(); @@ -216,22 +227,29 @@ impl ResponseError for GitArenaError { ErrorDisplayType::Json => builder.json(json!({ "error": self.message() })), - ErrorDisplayType::Plain => builder.body(self.message()) + ErrorDisplayType::Plain => builder.body(self.message()), } } } /// Middleware which renders HTML and Git errors -pub(crate) fn error_renderer_middleware(request: ServiceRequest, service: &S) -> impl Future>> + 'static - where S: Service, Error = ActixError>, - S::Future: 'static, - B: MessageBody + 'static +pub(crate) fn error_renderer_middleware( + request: ServiceRequest, + service: &S, +) -> impl Future>> + 'static +where + S: Service, Error = ActixError>, + S::Future: 'static, + B: MessageBody + 'static, { let future = service.call(request); async { let mut response = future.await?.map_into_boxed_body(); - let gitarena_error = response.response_mut().extensions_mut().remove::(); + let gitarena_error = response + .response_mut() + .extensions_mut() + .remove::(); Ok(if let Some(error) = gitarena_error { match error.display_type { @@ -239,11 +257,14 @@ pub(crate) fn error_renderer_middleware(request: ServiceRequest, service: let result = render_html_error(&error).await; response.map_body(|head, _| { - head.headers.insert(CONTENT_TYPE, HeaderValue::from_static("text/html; charset=utf-8")); + head.headers.insert( + CONTENT_TYPE, + HeaderValue::from_static("text/html; charset=utf-8"), + ); result.unwrap_or_else(|err| error_render_error(err, &error, head)) }) - }, + } ErrorDisplayType::Git => { let result = render_git_error(&error).await; @@ -252,15 +273,20 @@ pub(crate) fn error_renderer_middleware(request: ServiceRequest, service: Ok(body) => { // Git doesn't show client errors if the response isn't 200 for some reason head.status = StatusCode::OK; - head.headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/octet-stream")); + head.headers.insert( + CONTENT_TYPE, + HeaderValue::from_static("application/octet-stream"), + ); body } - Err(err) => error_render_error(err, &error, head) + Err(err) => error_render_error(err, &error, head), } }) } - _ => unreachable!("Only html and Git error responses are handled in the async middleware") + _ => unreachable!( + "Only html and Git error responses are handled in the async middleware" + ), } } else { response @@ -284,7 +310,9 @@ async fn render_html_error(renderer: &GitArenaError) -> Result { async fn render_git_error(renderer: &GitArenaError) -> Result { let mut writer = GitWriter::new(); - writer.write_text_sideband(Band::Error, format!("error: {}", renderer.message())).await?; + writer + .write_text_sideband(Band::Error, format!("error: {}", renderer.message())) + .await?; Ok(BoxBody::new(writer.serialize().await?)) } @@ -298,7 +326,8 @@ fn error_render_error(err: Error, ga_error: &GitArenaError, head: &mut ResponseH error!("| Caused by: {:?}", ga_error); // Fall back to the generic actix response - let actix_response = InternalError::new(err, StatusCode::INTERNAL_SERVER_ERROR).error_response(); + let actix_response = + InternalError::new(err, StatusCode::INTERNAL_SERVER_ERROR).error_response(); head.status = StatusCode::INTERNAL_SERVER_ERROR; head.headers = actix_response.headers().clone(); @@ -319,7 +348,7 @@ impl ExtendWithStatusCode for StdResu self.map_err(|err| WithStatusCode { code: status_code, source: Some(Error::from(err)), - display: false + display: false, }) } @@ -327,7 +356,7 @@ impl ExtendWithStatusCode for StdResu self.map_err(|err| WithStatusCode { code: status_code, source: Some(Error::from(err)), - display: true + display: true, }) } } @@ -338,7 +367,7 @@ pub(crate) enum ErrorDisplayType { Htmx(Box), Json, Git, - Plain + Plain, } /// Simple struct which wraps an anyhow [Error](anyhow::Error). Used in conjunction with [HoldsError] trait. diff --git a/src/git/basic_auth.rs b/src/git/basic_auth.rs index 5c41b8f..e628b17 100644 --- a/src/git/basic_auth.rs +++ b/src/git/basic_auth.rs @@ -1,8 +1,8 @@ -use crate::{crypto, die, err}; use crate::prelude::*; use crate::privileges::repo_visibility::RepoVisibility; use crate::repository::Repository; use crate::user::User; +use crate::{crypto, die, err}; use actix_web::http::header::{CONTENT_TYPE, WWW_AUTHENTICATE}; use actix_web::{Either, HttpRequest, HttpResponse}; @@ -12,20 +12,26 @@ use tracing::instrument; use tracing_unwrap::OptionExt; #[instrument(skip(request, executor), err)] -pub(crate) async fn validate_repo_access<'e, E>(repo: Option, content_type: &str, request: &HttpRequest, executor: E) -> Result, Repository), HttpResponse>> - where E: Executor<'e, Database = Postgres> +pub(crate) async fn validate_repo_access<'e, E>( + repo: Option, + content_type: &str, + request: &HttpRequest, + executor: E, +) -> Result, Repository), HttpResponse>> +where + E: Executor<'e, Database = Postgres>, { match repo { Some(repo) => { if repo.visibility != RepoVisibility::Public { return match login_flow(request, executor, content_type).await? { Either::Left(user) => Ok(Either::Left((Some(user), repo))), - Either::Right(response) => Ok(Either::Right(response)) - } + Either::Right(response) => Ok(Either::Right(response)), + }; } Ok(Either::Left((None, repo))) - }, + } None => { // Prompt for authentication even if the repo does not exist to prevent leakage of private repositories let _ = login_flow(request, executor, content_type).await?; @@ -36,8 +42,13 @@ pub(crate) async fn validate_repo_access<'e, E>(repo: Option, conten } #[instrument(skip(request, executor), err)] -pub(crate) async fn login_flow<'e, E>(request: &HttpRequest, executor: E, content_type: &str) -> Result> - where E: Executor<'e, Database = Postgres> +pub(crate) async fn login_flow<'e, E>( + request: &HttpRequest, + executor: E, + content_type: &str, +) -> Result> +where + E: Executor<'e, Database = Postgres>, { if !is_present(request).await { return Ok(Either::Right(prompt(content_type).await)); @@ -50,13 +61,17 @@ pub(crate) async fn login_flow<'e, E>(request: &HttpRequest, executor: E, conten pub(crate) async fn prompt(content_type: &str) -> HttpResponse { HttpResponse::Unauthorized() .append_header((CONTENT_TYPE, content_type)) - .append_header((WWW_AUTHENTICATE, "Basic realm=\"GitArena\", charset=\"UTF-8\"")) + .append_header(( + WWW_AUTHENTICATE, + "Basic realm=\"GitArena\", charset=\"UTF-8\"", + )) .finish() } #[instrument(skip_all, err)] pub(crate) async fn authenticate<'e, E>(request: &HttpRequest, transaction: E) -> Result - where E: Executor<'e, Database = Postgres> +where + E: Executor<'e, Database = Postgres>, { // TODO: Add more verbose logging to this function similar to frontend login (for usage by fail2ban) @@ -68,10 +83,11 @@ pub(crate) async fn authenticate<'e, E>(request: &HttpRequest, transaction: E) - die!(UNAUTHORIZED, "Username and password cannot be empty"); } - let option: Option = sqlx::query_as::<_, User>("select * from users where username = $1 limit 1") - .bind(&username) - .fetch_optional(transaction) - .await?; + let option: Option = + sqlx::query_as::<_, User>("select * from users where username = $1 limit 1") + .bind(&username) + .fetch_optional(transaction) + .await?; if option.is_none() { die!(UNAUTHORIZED, "User does not exist"); @@ -85,30 +101,41 @@ pub(crate) async fn authenticate<'e, E>(request: &HttpRequest, transaction: E) - // TODO: Check for allowed login /*let primary_email = Email::find_primary_email(&user, transaction) - .await? - .ok_or_else(|| anyhow!("No primary email".to_owned()))?;*/ - - if user.disabled/* || !primary_email.is_allowed_login()*/ { - die!(UNAUTHORIZED, "Account has been disabled. Please contact support."); + .await? + .ok_or_else(|| anyhow!("No primary email".to_owned()))?;*/ + + if user.disabled + /* || !primary_email.is_allowed_login()*/ + { + die!( + UNAUTHORIZED, + "Account has been disabled. Please contact support." + ); } Ok(user) } - None => die!(UNAUTHORIZED) + None => die!(UNAUTHORIZED), } } #[instrument(skip(auth_header), err)] pub(crate) async fn parse_basic_auth(auth_header: &str) -> Result<(String, String)> { - let (auth_type, base64_credentials) = auth_header.split_once(' ').ok_or_else(|| err!(BAD_REQUEST))?; + let (auth_type, base64_credentials) = auth_header + .split_once(' ') + .ok_or_else(|| err!(BAD_REQUEST))?; if auth_type != "Basic" { - die!(UNAUTHORIZED, "Unsupported authentication type, only Basic auth allowed"); + die!( + UNAUTHORIZED, + "Unsupported authentication type, only Basic auth allowed" + ); } let credentials = String::from_utf8(base64::decode(base64_credentials)?)?; - Ok(credentials.split_once(':') + Ok(credentials + .split_once(':') .map(|(username, password)| (username.to_owned(), password.to_owned())) .ok_or_else(|| err!(UNAUTHORIZED, "Both username and password is required"))?) } diff --git a/src/git/capabilities.rs b/src/git/capabilities.rs index 6826672..7eee3eb 100644 --- a/src/git/capabilities.rs +++ b/src/git/capabilities.rs @@ -14,7 +14,9 @@ pub(crate) async fn capabilities(service: &str) -> Result { writer.flush().await?; writer.write_text("version 2").await?; - writer.write_text(concat!("agent=git/gitarena-", env!("CARGO_PKG_VERSION"))).await?; + writer + .write_text(concat!("agent=git/gitarena-", env!("CARGO_PKG_VERSION"))) + .await?; writer.write_text("ls-refs").await?; writer.write_text("unborn").await?; writer.write_text("fetch").await?; diff --git a/src/git/fetch.rs b/src/git/fetch.rs index b03fa47..790691b 100644 --- a/src/git/fetch.rs +++ b/src/git/fetch.rs @@ -82,7 +82,10 @@ pub(crate) async fn fetch(input: Vec>, repo: &Git2Repository) -> Result< } #[instrument(err, skip(repo))] -pub(crate) async fn process_haves(repo: &Git2Repository, options: &Fetch) -> Result> { +pub(crate) async fn process_haves( + repo: &Git2Repository, + options: &Fetch, +) -> Result> { if options.have.is_empty() { return Ok(None); } @@ -113,11 +116,19 @@ pub(crate) async fn process_haves(repo: &Git2Repository, options: &Fetch) -> Res } #[instrument(err, skip(repo))] -pub(crate) async fn process_wants(repo: &Git2Repository, options: &Fetch) -> Result> { +pub(crate) async fn process_wants( + repo: &Git2Repository, + options: &Fetch, +) -> Result> { let mut writer = GitWriter::new(); writer.write_text("packfile").await?; - writer.write_text_sideband(Band::Progress, format!("Enumerating objects: {}, done.", options.want.len())).await?; + writer + .write_text_sideband( + Band::Progress, + format!("Enumerating objects: {}, done.", options.want.len()), + ) + .await?; let mut progress_writer = ProgressWriter::new(); @@ -137,9 +148,10 @@ pub(crate) async fn process_wants(repo: &Git2Repository, options: &Fetch) -> Res if let Some(commit) = object.as_commit() { insert_commit_with_parents(commit, &mut pack_builder).await?; } - }, + } ObjectType::Tree => pack_builder.insert_tree(object.id())?, - _ => pack_builder.insert_object(object.id(), Some(wanted_obj.as_str()))? + _ => pack_builder + .insert_object(object.id(), Some(wanted_obj.as_str()))?, } } else { pack_builder.insert_object(object.id(), Some(wanted_obj.as_str()))?; @@ -159,7 +171,9 @@ pub(crate) async fn process_wants(repo: &Git2Repository, options: &Fetch) -> Res writer.append(progress_writer.to_writer().await?).await?; - writer.write_binary_sideband(Band::Data, buffer.as_ref()).await?; + writer + .write_binary_sideband(Band::Data, buffer.as_ref()) + .await?; let total = object_count; let total_delta = progress_writer.delta_total.unwrap_or_default() as usize; @@ -172,17 +186,25 @@ pub(crate) async fn process_wants(repo: &Git2Repository, options: &Fetch) -> Res let _obj_pack_reused = 0 /*reused_delta - reused*/; let pack_reused = 0 /*obj_pack_total + obj_pack_reused*/; - writer.write_text_sideband(Band::Progress, format!( - "Total {} (delta {}), reused {} (delta {}), pack-reused {}", - total, total_delta, reused, reused_delta, pack_reused - )).await?; + writer + .write_text_sideband( + Band::Progress, + format!( + "Total {} (delta {}), reused {} (delta {}), pack-reused {}", + total, total_delta, reused, reused_delta, pack_reused + ), + ) + .await?; Ok(Some(writer)) } #[instrument(err, skip(pack_builder))] #[async_recursion(?Send)] -async fn insert_commit_with_parents(commit: &Commit<'_>, pack_builder: &mut PackBuilder<'_>) -> Result<()> { +async fn insert_commit_with_parents( + commit: &Commit<'_>, + pack_builder: &mut PackBuilder<'_>, +) -> Result<()> { pack_builder.insert_commit(commit.id())?; for parent in commit.parents() { diff --git a/src/git/history.rs b/src/git/history.rs index 0a300b5..b4c3646 100644 --- a/src/git/history.rs +++ b/src/git/history.rs @@ -4,7 +4,11 @@ use git2::{DiffOptions, Oid, Repository as Git2Repository, Sort}; use tracing::instrument; #[instrument(err, skip(repo))] -pub(crate) async fn last_commit_for_blob(repo: &Git2Repository, reference_name: &str, file_name: &str) -> Result> { +pub(crate) async fn last_commit_for_blob( + repo: &Git2Repository, + reference_name: &str, + file_name: &str, +) -> Result> { let commits = commits_for_blob(repo, reference_name, file_name, Some(1)).await?; Ok(commits.get(0).copied()) @@ -12,7 +16,10 @@ pub(crate) async fn last_commit_for_blob(repo: &Git2Repository, reference_name: #[instrument(err, skip(repo))] #[async_recursion(?Send)] -pub(crate) async fn last_commit_for_ref(repo: &Git2Repository, reference_name: &str) -> Result> { +pub(crate) async fn last_commit_for_ref( + repo: &Git2Repository, + reference_name: &str, +) -> Result> { let reference = repo.find_reference(reference_name)?; if let Some(target) = reference.symbolic_target() { @@ -23,7 +30,12 @@ pub(crate) async fn last_commit_for_ref(repo: &Git2Repository, reference_name: & } #[instrument(err, skip(repo))] -pub(crate) async fn commits_for_blob(repo: &Git2Repository, reference: &str, file_name: &str, max_results: Option) -> Result> { +pub(crate) async fn commits_for_blob( + repo: &Git2Repository, + reference: &str, + file_name: &str, + max_results: Option, +) -> Result> { let mut results = Vec::::new(); if let Some(max) = max_results { @@ -53,7 +65,8 @@ pub(crate) async fn commits_for_blob(repo: &Git2Repository, reference: &str, fil diff_options.skip_binary_check(true); diff_options.pathspec(file_name); - let diff = repo.diff_tree_to_tree(previous_tree.as_ref(), Some(&tree), Some(&mut diff_options))?; + let diff = + repo.diff_tree_to_tree(previous_tree.as_ref(), Some(&tree), Some(&mut diff_options))?; for _ in diff.deltas() { results.push(commit_oid); @@ -72,7 +85,11 @@ pub(crate) async fn commits_for_blob(repo: &Git2Repository, reference: &str, fil /// `reference` can be either a full ref name or a OID string (ascii-hex-numeric, 40 digits) /// Returns at most `limit` commits or all commits if `limit == 0` #[instrument(err, skip(repo))] -pub(crate) async fn all_commits(repo: &Git2Repository, reference: &str, limit: usize) -> Result> { +pub(crate) async fn all_commits( + repo: &Git2Repository, + reference: &str, + limit: usize, +) -> Result> { let mut results = Vec::::with_capacity(limit); let mut rev_walk = repo.revwalk()?; @@ -80,7 +97,7 @@ pub(crate) async fn all_commits(repo: &Git2Repository, reference: &str, limit: u match Oid::from_str(reference) { Ok(oid) => rev_walk.push(oid)?, - Err(_) => rev_walk.push_ref(reference)? + Err(_) => rev_walk.push_ref(reference)?, } for result in rev_walk { @@ -106,7 +123,6 @@ pub(crate) async fn all_branches(repo: &Git2Repository) -> Result> { if let Some(name) = reference.name() { results.push(name.replacen("refs/heads/", "", 1)); } - } Ok(results) @@ -116,7 +132,8 @@ pub(crate) async fn all_branches(repo: &Git2Repository) -> Result> { pub(crate) async fn all_tags(repo: &Git2Repository, prefix: Option<&str>) -> Result> { let tags = repo.tag_names(prefix)?; - Ok(tags.iter() + Ok(tags + .iter() .filter_map(|o| o.map(|o| o.to_owned())) .collect()) } diff --git a/src/git/hooks/detect_license.rs b/src/git/hooks/detect_license.rs index 3407944..a309485 100644 --- a/src/git/hooks/detect_license.rs +++ b/src/git/hooks/detect_license.rs @@ -1,6 +1,6 @@ use crate::git::utils::{read_blob_content, repo_files_at_head}; -use crate::licenses::license_file_names; use crate::licenses; +use crate::licenses::license_file_names; use crate::repository::Repository; use std::sync::Arc; @@ -13,7 +13,11 @@ use git_repository::odb::Store; use tracing::instrument; #[instrument(err, skip(store))] -pub(crate) async fn detect_license(store: Arc, gitoxide_repo: &git_repository::Repository, repo: &mut Repository) -> Result<()> { +pub(crate) async fn detect_license( + store: Arc, + gitoxide_repo: &git_repository::Repository, + repo: &mut Repository, +) -> Result<()> { let mut buffer = Vec::::new(); let tree = repo_files_at_head(store.clone(), gitoxide_repo, &mut buffer).await?; @@ -22,7 +26,7 @@ pub(crate) async fn detect_license(store: Arc, gitoxide_repo: &git_reposi let lowered_file_name = entry.filename.to_lowercase(); if !license_file_names().contains(&lowered_file_name.as_slice()) { - continue + continue; } match entry.mode { diff --git a/src/git/hooks/post_update.rs b/src/git/hooks/post_update.rs index 419a75a..2fec8b0 100644 --- a/src/git/hooks/post_update.rs +++ b/src/git/hooks/post_update.rs @@ -12,7 +12,11 @@ use sqlx::{Executor, Postgres}; // prefered: https://www.reddit.com/r/rust/comments/fddf6y/handling_longrunning_background_tasks_in_actixweb/ // https://stackoverflow.com/a/66181410 -pub(crate) async fn run<'e, E: Executor<'e, Database = Postgres>>(store: Arc, repo: &mut Repository, executor: E) -> Result<()> { +pub(crate) async fn run<'e, E: Executor<'e, Database = Postgres>>( + store: Arc, + repo: &mut Repository, + executor: E, +) -> Result<()> { let gitoxide_repo = repo.gitoxide(executor).await?; if let Err(err) = detect_license(store, &gitoxide_repo, repo).await { diff --git a/src/git/io/band.rs b/src/git/io/band.rs index 6eaf0e0..9ea344e 100644 --- a/src/git/io/band.rs +++ b/src/git/io/band.rs @@ -4,7 +4,7 @@ use derive_more::Display; pub(crate) enum Band { Data, Progress, - Error + Error, } impl Band { @@ -12,7 +12,7 @@ impl Band { match self { Band::Data => b"\x01", Band::Progress => b"\x02", - Band::Error => b"\x03" + Band::Error => b"\x03", } } } diff --git a/src/git/io/progress_writer.rs b/src/git/io/progress_writer.rs index 7271bdb..8a1a546 100644 --- a/src/git/io/progress_writer.rs +++ b/src/git/io/progress_writer.rs @@ -11,14 +11,14 @@ use tracing::instrument; #[derive(Clone, Debug)] pub(crate) struct ProgressWriter { lines: Vec, - pub(crate) delta_total: Option + pub(crate) delta_total: Option, } impl ProgressWriter { pub(crate) fn new() -> ProgressWriter { ProgressWriter { lines: Vec::::new(), - delta_total: None + delta_total: None, } } @@ -27,7 +27,9 @@ impl ProgressWriter { } #[instrument] - pub(crate) fn pack_builder_callback(&mut self) -> impl FnMut(PackBuilderStage, u32, u32) -> bool + '_ { + pub(crate) fn pack_builder_callback( + &mut self, + ) -> impl FnMut(PackBuilderStage, u32, u32) -> bool + '_ { let rc = Rc::new(RefCell::new(self)); move |stage: PackBuilderStage, current: u32, total: u32| -> bool { @@ -39,7 +41,10 @@ impl ProgressWriter { match stage { PackBuilderStage::AddingObjects => { let ref_cell = &mut rc.borrow_mut(); - ref_cell.lines.push(format!("Counting objects: {:>3}% ({}/{}){}", percentage, current, total, ending)); + ref_cell.lines.push(format!( + "Counting objects: {:>3}% ({}/{}){}", + percentage, current, total, ending + )); } PackBuilderStage::Deltafication => { let ref_cell = &mut rc.borrow_mut(); @@ -48,7 +53,10 @@ impl ProgressWriter { ref_cell.delta_total = Some(total); } - ref_cell.lines.push(format!("Compressing objects: {:>3}% ({}/{}){}", percentage, current, total, ending)); + ref_cell.lines.push(format!( + "Compressing objects: {:>3}% ({}/{}){}", + percentage, current, total, ending + )); } } @@ -60,7 +68,9 @@ impl ProgressWriter { let mut writer = GitWriter::new(); for line in &self.lines { - writer.write_binary_sideband(Band::Progress, line.as_bytes()).await?; + writer + .write_binary_sideband(Band::Progress, line.as_bytes()) + .await?; } Ok(writer) diff --git a/src/git/io/reader.rs b/src/git/io/reader.rs index e06f779..2d2f9b6 100644 --- a/src/git/io/reader.rs +++ b/src/git/io/reader.rs @@ -8,22 +8,20 @@ use tracing_unwrap::OptionExt; pub(crate) async fn read_until_command(mut body: Vec>) -> Result<(String, Vec>)> { for (index, raw_line) in body.iter().enumerate() { match String::from_utf8(raw_line.to_vec()) { - Ok(line) => { - match line.split_once('=') { - Some((key, value)) => { - if key != "command" { - continue; - } - - for i in 0..index { - body.remove(i); - } - - return Ok((value.to_owned(), body)); + Ok(line) => match line.split_once('=') { + Some((key, value)) => { + if key != "command" { + continue; } - None => continue + + for i in 0..index { + body.remove(i); + } + + return Ok((value.to_owned(), body)); } - } + None => continue, + }, Err(err) => { warn!("Failed to read line into UTF-8 vec: {}", err); continue; @@ -35,23 +33,27 @@ pub(crate) async fn read_until_command(mut body: Vec>) -> Result<(String } #[instrument(err, skip(iter))] -pub(crate) async fn read_data_lines(iter: &mut StreamingPeekableIter<&[u8]>) -> Result>> { +pub(crate) async fn read_data_lines( + iter: &mut StreamingPeekableIter<&[u8]>, +) -> Result>> { let mut body = Vec::>::new(); while let Some(line_result) = iter.read_line().await { match line_result { - Ok(Ok(line)) => if let PacketLineRef::Data(data) = line { - if data.is_empty() { - continue; - } + Ok(Ok(line)) => { + if let PacketLineRef::Data(data) = line { + if data.is_empty() { + continue; + } - // We can safely unwrap() as we checked above that the slice is not empty - let length = data.len() - (data.last().unwrap_or_log() == &10_u8) as usize; + // We can safely unwrap() as we checked above that the slice is not empty + let length = data.len() - (data.last().unwrap_or_log() == &10_u8) as usize; - body.push(data[..length].to_vec()); - }, + body.push(data[..length].to_vec()); + } + } Ok(Err(err)) => warn!("Failed to read Git data line: {}", err), - Err(err) => warn!("Failed to read Git data line: {}", err) + Err(err) => warn!("Failed to read Git data line: {}", err), } } diff --git a/src/git/io/writer.rs b/src/git/io/writer.rs index 09d3cbd..6242a88 100644 --- a/src/git/io/writer.rs +++ b/src/git/io/writer.rs @@ -8,13 +8,13 @@ use tracing::instrument; use tracing_unwrap::ResultExt; pub(crate) struct GitWriter { - inner: PacketlineWriter> + inner: PacketlineWriter>, } impl GitWriter { pub(crate) fn new() -> GitWriter { GitWriter { - inner: PacketlineWriter::new(Vec::::new()).text_mode() + inner: PacketlineWriter::new(Vec::::new()).text_mode(), } } @@ -22,65 +22,111 @@ impl GitWriter { pub(crate) async fn write_text>(&mut self, text: S) -> Result<&mut GitWriter> { let str_ref = text.as_ref(); - self.inner.write(str_ref.as_bytes()).await.with_context(|| format!("Unable to write text to Git writer: `{}`", str_ref))?; + self.inner + .write(str_ref.as_bytes()) + .await + .with_context(|| format!("Unable to write text to Git writer: `{}`", str_ref))?; Ok(self) } // Example: [hexl]\x01text - pub(crate) async fn write_text_sideband>(&mut self, band: Band, text: S) -> Result<&mut GitWriter> { + pub(crate) async fn write_text_sideband>( + &mut self, + band: Band, + text: S, + ) -> Result<&mut GitWriter> { let str_ref = text.as_ref(); let with_band = [band.serialize(), str_ref.as_bytes()].concat(); - self.inner.write(with_band.as_slice()).await.with_context(|| { - format!("Unable to write text to sideband {} in Git writer: `{}`", band, str_ref) - })?; + self.inner + .write(with_band.as_slice()) + .await + .with_context(|| { + format!( + "Unable to write text to sideband {} in Git writer: `{}`", + band, str_ref + ) + })?; Ok(self) } // Example: "[hexl]\x01[hexl]text" - pub(crate) async fn write_text_sideband_pktline>(&mut self, band: Band, text: S) -> Result<&mut GitWriter> { + pub(crate) async fn write_text_sideband_pktline>( + &mut self, + band: Band, + text: S, + ) -> Result<&mut GitWriter> { let str_ref = text.as_ref(); let hex_prefix = &u16_to_hex((str_ref.len() + 4 + 1) as u16); // 4 for length, 1 for newline let with_band = [band.serialize(), hex_prefix, str_ref.as_bytes()].concat(); - self.inner.write(with_band.as_slice()).await.with_context(|| { - format!("Unable to write text to sideband {} in Git writer: `{}`", band, str_ref) - })?; + self.inner + .write(with_band.as_slice()) + .await + .with_context(|| { + format!( + "Unable to write text to sideband {} in Git writer: `{}`", + band, str_ref + ) + })?; Ok(self) } pub(crate) async fn write_text_bytes(&mut self, text: &[u8]) -> Result<&mut GitWriter> { - self.inner.write(text).await.with_context(|| format!("Unable to write text bytes to Git writer: {:?}", text))?; + self.inner + .write(text) + .await + .with_context(|| format!("Unable to write text bytes to Git writer: {:?}", text))?; Ok(self) } pub(crate) async fn write_binary(&mut self, binary: &[u8]) -> Result<&mut GitWriter> { self.inner.enable_binary_mode(); - self.inner.write(binary).await.with_context(|| format!("Unable to write binary to Git writer: {:?}", binary))?; + self.inner + .write(binary) + .await + .with_context(|| format!("Unable to write binary to Git writer: {:?}", binary))?; self.inner.enable_text_mode(); Ok(self) } - pub(crate) async fn write_binary_sideband(&mut self, band: Band, binary: &[u8]) -> Result<&mut GitWriter> { + pub(crate) async fn write_binary_sideband( + &mut self, + band: Band, + binary: &[u8], + ) -> Result<&mut GitWriter> { let with_band = [band.serialize(), binary].concat(); self.inner.enable_binary_mode(); - self.inner.write(with_band.as_slice()).await.with_context(|| { - format!("Unable to write binary to sideband {} in Git writer: {:?}", band, binary) - })?; + self.inner + .write(with_band.as_slice()) + .await + .with_context(|| { + format!( + "Unable to write binary to sideband {} in Git writer: {:?}", + band, binary + ) + })?; self.inner.enable_text_mode(); Ok(self) } pub(crate) async fn write_raw(&mut self, binary: &[u8]) -> Result<&mut GitWriter> { - self.inner.inner_mut().write(binary).await.with_context(|| format!("Unable to write raw data to Git writer: {:?}", binary))?; + self.inner + .inner_mut() + .write(binary) + .await + .with_context(|| format!("Unable to write raw data to Git writer: {:?}", binary))?; Ok(self) } pub(crate) async fn flush(&mut self) -> Result<&mut GitWriter> { - PacketLineRef::Flush.write_to(self.inner.inner_mut()).await.context("Unable to write flush to Git writer")?; + PacketLineRef::Flush + .write_to(self.inner.inner_mut()) + .await + .context("Unable to write flush to Git writer")?; Ok(self) } @@ -88,27 +134,39 @@ impl GitWriter { let with_band = [band.serialize(), b"0000"].concat(); self.inner.enable_binary_mode(); - self.inner.write(with_band.as_slice()).await.with_context(|| { - format!("Unable to write flush to sideband {} in Git writer", band) - })?; + self.inner + .write(with_band.as_slice()) + .await + .with_context(|| format!("Unable to write flush to sideband {} in Git writer", band))?; self.inner.enable_text_mode(); Ok(self) } pub(crate) async fn delimiter(&mut self) -> Result<&mut GitWriter> { - PacketLineRef::Delimiter.write_to(self.inner.inner_mut()).await.context("Unable to write delimiter to Git writer")?; + PacketLineRef::Delimiter + .write_to(self.inner.inner_mut()) + .await + .context("Unable to write delimiter to Git writer")?; Ok(self) } pub(crate) async fn response_end(&mut self) -> Result<&mut GitWriter> { - PacketLineRef::ResponseEnd.write_to(self.inner.inner_mut()).await.context("Unable to write response end to Git writer")?; + PacketLineRef::ResponseEnd + .write_to(self.inner.inner_mut()) + .await + .context("Unable to write response end to Git writer")?; Ok(self) } pub(crate) async fn append(&mut self, other: GitWriter) -> Result<&mut GitWriter> { - let serialized = other.serialize().await.context("Unable to write deserialize Git writer")?; - self.write_raw(serialized.to_vec().as_slice()).await.context("Unable to write other Git writer to Git writer")?; + let serialized = other + .serialize() + .await + .context("Unable to write deserialize Git writer")?; + self.write_raw(serialized.to_vec().as_slice()) + .await + .context("Unable to write other Git writer to Git writer")?; Ok(self) } diff --git a/src/git/ls_refs.rs b/src/git/ls_refs.rs index 711bc05..049b7fb 100644 --- a/src/git/ls_refs.rs +++ b/src/git/ls_refs.rs @@ -44,7 +44,9 @@ pub(crate) async fn ls_refs(input: Vec>, repo: &Git2Repository) -> Resul // HEAD is a special case as `repo.references_glob` does not find it but `repo.find_reference` does if prefix == "HEAD" { - if let Some(output_line) = build_ref_line(repo.find_reference("HEAD"), repo, &options).await { + if let Some(output_line) = + build_ref_line(repo.find_reference("HEAD"), repo, &options).await + { writer.write_text(output_line).await?; } } @@ -63,7 +65,11 @@ pub(crate) async fn ls_refs(input: Vec>, repo: &Git2Repository) -> Resul writer.serialize().await } -pub(crate) async fn build_ref_list(prefix: &str, repo: &Git2Repository, options: &LsRefs) -> Result> { +pub(crate) async fn build_ref_list( + prefix: &str, + repo: &Git2Repository, + options: &LsRefs, +) -> Result> { let mut output = Vec::::new(); for result in repo.references_glob(format!("{}*", prefix).as_str())? { @@ -76,7 +82,11 @@ pub(crate) async fn build_ref_list(prefix: &str, repo: &Git2Repository, options: } #[instrument(skip(ref_result, repo))] -pub(crate) async fn build_ref_line(ref_result: CoreResult, Git2Error>, repo: &Git2Repository, options: &LsRefs) -> Option { +pub(crate) async fn build_ref_line( + ref_result: CoreResult, Git2Error>, + repo: &Git2Repository, + options: &LsRefs, +) -> Option { return match ref_result { Ok(reference) => { let name = reference.name().unwrap_or_default(); @@ -89,14 +99,23 @@ pub(crate) async fn build_ref_line(ref_result: CoreResult, Git2Err match repo.find_reference(sym_target).ok() { Some(sym_target_ref) => { if let Some(sym_target_oid) = sym_target_ref.target() { - line = format!("{} {} symref-target:{}", sym_target_oid, name, sym_target_ref.name().unwrap_or_default()); + line = format!( + "{} {} symref-target:{}", + sym_target_oid, + name, + sym_target_ref.name().unwrap_or_default() + ); } else if options.unborn { - line = format!("unborn {} symref-target:{}", name, sym_target_ref.name().unwrap_or_default()); + line = format!( + "unborn {} symref-target:{}", + name, + sym_target_ref.name().unwrap_or_default() + ); } else { return None; } } - None => return None // Reference points to a symbolic target that doesn't exist? + None => return None, // Reference points to a symbolic target that doesn't exist? } } else if options.unborn { line = format!("unborn {}", name); @@ -111,7 +130,7 @@ pub(crate) async fn build_ref_line(ref_result: CoreResult, Git2Err } Some(line) - }, + } Err(e) => { if e.code() != ErrorCode::NotFound { error!("Failed to find reference asked for by Git client: {}", e); @@ -119,7 +138,7 @@ pub(crate) async fn build_ref_line(ref_result: CoreResult, Git2Err None } - } + }; } // Used by git-receive-pack ref discovery @@ -149,14 +168,23 @@ pub(crate) async fn ls_refs_all(repo: &Git2Repository) -> Result { } } Err(e) => { - warn!("Failed to grab repository references for {}: {}", repo.path().display(), e); + warn!( + "Failed to grab repository references for {}: {}", + repo.path().display(), + e + ); } } } // If we didn't tell the client our capabilities in the previous ref list, send a null ref with them if !once.is_completed() { - writer.write_text(format!("0000000000000000000000000000000000000000 capabilities^{{}}{}", receive_pack_capabilities())).await?; + writer + .write_text(format!( + "0000000000000000000000000000000000000000 capabilities^{{}}{}", + receive_pack_capabilities() + )) + .await?; } writer.flush().await?; @@ -172,5 +200,5 @@ pub(crate) struct LsRefs { pub(crate) peel: bool, pub(crate) symrefs: bool, pub(crate) prefixes: Vec, - pub(crate) unborn: bool + pub(crate) unborn: bool, } diff --git a/src/git/pack.rs b/src/git/pack.rs index 1b8619d..47eec62 100644 --- a/src/git/pack.rs +++ b/src/git/pack.rs @@ -3,12 +3,12 @@ use crate::repository::Repository; use std::io::BufReader; use std::path::PathBuf; -use std::sync::Arc; use std::sync::atomic::AtomicBool; +use std::sync::Arc; use anyhow::{anyhow, Result}; use git_repository::odb::pack::bundle::write::Options as GitPackWriteOptions; -use git_repository::odb::pack::data::input::{Mode as PackIterationMode}; +use git_repository::odb::pack::data::input::Mode as PackIterationMode; use git_repository::odb::pack::index::Version as PackVersion; use git_repository::odb::pack::{Bundle, FindExt}; use git_repository::progress; @@ -20,25 +20,35 @@ use tracing::instrument; /// Ensure that the third tuple argument, the temporary dir, is alive for the whole duration of your usage. /// It being dropped results in the index and pack file to be deleted and thus the paths becoming invalid #[instrument(err, skip(data, executor))] -pub(crate) async fn read<'e, E: Executor<'e, Database = Postgres>>(data: &[u8], repo: &Repository, executor: E) -> Result<(Option, Option, TempDir)> { +pub(crate) async fn read<'e, E: Executor<'e, Database = Postgres>>( + data: &[u8], + repo: &Repository, + executor: E, +) -> Result<(Option, Option, TempDir)> { let temp_dir = Builder::new().prefix("gitarena_").tempdir()?; match write_to_fs(data, &temp_dir, repo, executor).await { Ok((index_path, pack_path)) => Ok((Some(index_path), Some(pack_path), temp_dir)), - Err(err) => match err.to_string().as_str() { // Gitoxide does not export the error enum so this is a whacky workaround + Err(err) => match err.to_string().as_str() { + // Gitoxide does not export the error enum so this is a whacky workaround "Did not encounter a single base" => Ok((None, None, temp_dir)), - _ => Err(err) - } + _ => Err(err), + }, } } #[instrument(err, skip(data, executor))] -pub(crate) async fn write_to_fs<'e, E: Executor<'e, Database = Postgres>>(data: &[u8], temp_dir: &TempDir, repo: &Repository, executor: E) -> Result<(PathBuf, PathBuf)> { +pub(crate) async fn write_to_fs<'e, E: Executor<'e, Database = Postgres>>( + data: &[u8], + temp_dir: &TempDir, + repo: &Repository, + executor: E, +) -> Result<(PathBuf, PathBuf)> { let options = GitPackWriteOptions { thread_limit: Some(num_cpus::get()), iteration_mode: PackIterationMode::Verify, index_kind: PackVersion::V2, - object_hash: GIT_HASH_KIND + object_hash: GIT_HASH_KIND, }; let repo = repo.gitoxide(executor).await?; @@ -52,13 +62,21 @@ pub(crate) async fn write_to_fs<'e, E: Executor<'e, Database = Postgres>>(data: progress::Discard, &AtomicBool::new(false), // The Actix runtime (+ tokio) handles timeouts for us Some(Box::new(move |oid, buffer| { - objects.to_cache_arc().find(oid, buffer).ok().map(|(data, _)| data) + objects + .to_cache_arc() + .find(oid, buffer) + .ok() + .map(|(data, _)| data) })), - options + options, )?; - let index_path = bundle.index_path.ok_or_else(|| anyhow!("Failed to unpack index file"))?; - let data_path = bundle.data_path.ok_or_else(|| anyhow!("Failed to unpack data file"))?; + let index_path = bundle + .index_path + .ok_or_else(|| anyhow!("Failed to unpack index file"))?; + let data_path = bundle + .data_path + .ok_or_else(|| anyhow!("Failed to unpack data file"))?; Ok((index_path, data_path)) } diff --git a/src/git/receive_pack.rs b/src/git/receive_pack.rs index 54a65de..7d820a6 100644 --- a/src/git/receive_pack.rs +++ b/src/git/receive_pack.rs @@ -1,7 +1,7 @@ -use crate::git::GIT_HASH_KIND; use crate::git::io::band::Band; use crate::git::io::writer::GitWriter; use crate::git::ref_update::RefUpdate; +use crate::git::GIT_HASH_KIND; use crate::prelude::*; use crate::repository::Repository; use crate::utils::oid; @@ -21,13 +21,22 @@ use git_repository::odb::pack::data::{File as DataFile, ResolvedBase}; use git_repository::odb::pack::index::File as IndexFile; use git_repository::odb::pack::{cache, FindExt}; use git_repository::odb::Store; -use git_repository::refs::Target; use git_repository::refs::transaction::{Change, LogChange, PreviousValue, RefEdit, RefLog}; +use git_repository::refs::Target; use sqlx::{Executor, PgPool, Postgres}; use tracing::instrument; #[instrument(err, skip(writer, store))] -pub(crate) async fn process_create_update(ref_update: &RefUpdate, repo: &Repository, store: Arc, db_pool: &PgPool, writer: &mut GitWriter, index_path: Option<&PathBuf>, pack_path: Option<&PathBuf>, raw_pack: &[u8]) -> Result<()> { +pub(crate) async fn process_create_update( + ref_update: &RefUpdate, + repo: &Repository, + store: Arc, + db_pool: &PgPool, + writer: &mut GitWriter, + index_path: Option<&PathBuf>, + pack_path: Option<&PathBuf>, + raw_pack: &[u8], +) -> Result<()> { assert!(ref_update.new.is_some()); let mut transaction = db_pool.begin().await?; @@ -42,7 +51,9 @@ pub(crate) async fn process_create_update(ref_update: &RefUpdate, repo: &Reposit (Some(index_path), Some(pack_path)) => { let index_file = IndexFile::at(index_path, GIT_HASH_KIND)?; - let index = index_file.lookup(new_oid.as_ref()).ok_or_else(|| anyhow!("Failed to lookup new oid in index file"))?; + let index = index_file + .lookup(new_oid.as_ref()) + .ok_or_else(|| anyhow!("Failed to lookup new oid in index file"))?; let offset = index_file.pack_offset_at_index(index); let data_file = DataFile::at(pack_path, GIT_HASH_KIND)?; @@ -61,24 +72,29 @@ pub(crate) async fn process_create_update(ref_update: &RefUpdate, repo: &Reposit Some(ResolvedBase::InPack(entry)) } else { - store.to_cache_arc().find(oid, vec).ok().map(|(data, _)| ResolvedBase::OutOfPack { - kind: data.kind, - end: data.data.len() + store.to_cache_arc().find(oid, vec).ok().map(|(data, _)| { + ResolvedBase::OutOfPack { + kind: data.kind, + end: data.data.len(), + } }) } }, - &mut cache::Never + &mut cache::Never, )?; match outcome.kind { Kind::Commit => CommitRef::from_bytes(buffer.as_slice())?, - _ => die!(BAD_REQUEST, "Unexpected payload data type") + _ => die!(BAD_REQUEST, "Unexpected payload data type"), } - }, + } _ => { // This is a force push to an existing repository // TODO: Handle non existing refs as client errors instead of server errors - store.to_cache_arc().find_commit(new_oid.as_ref(), &mut buffer).map(|(data, _)| data)? + store + .to_cache_arc() + .find_commit(new_oid.as_ref(), &mut buffer) + .map(|(data, _)| data)? } }; @@ -91,25 +107,25 @@ pub(crate) async fn process_create_update(ref_update: &RefUpdate, repo: &Reposit PreviousValue::Any }; - let edits = vec![ - RefEdit { - change: Change::Update { - log: LogChange { - mode: RefLog::AndReference, - force_create_reflog: true, - message: BString::from(commit.message) - }, - expected: previous_value, - new: Target::Peeled(new_oid), + let edits = vec![RefEdit { + change: Change::Update { + log: LogChange { + mode: RefLog::AndReference, + force_create_reflog: true, + message: BString::from(commit.message), }, - name: ref_update.target_ref.as_str().try_into()?, - deref: true - } - ]; + expected: previous_value, + new: Target::Peeled(new_oid), + }, + name: ref_update.target_ref.as_str().try_into()?, + deref: true, + }]; let gitoxide_repo = repo.gitoxide(&mut transaction).await?; - gitoxide_repo.refs.transaction() + gitoxide_repo + .refs + .transaction() .prepare(edits, Fail::Immediately) .map_err(|err| anyhow!("Failed to commit transaction: {}", err))? .commit(&Signature::from(commit.committer))?; @@ -128,39 +144,55 @@ pub(crate) async fn process_create_update(ref_update: &RefUpdate, repo: &Reposit } if ref_update.report_status || ref_update.report_status_v2 { - writer.write_text_sideband_pktline(Band::Data, format!("ok {}", ref_update.target_ref)).await?; + writer + .write_text_sideband_pktline(Band::Data, format!("ok {}", ref_update.target_ref)) + .await?; } Ok(()) } #[instrument(err, skip(writer))] -pub(crate) async fn process_delete<'e, E: Executor<'e, Database = Postgres>>(ref_update: &RefUpdate, repo: &Repository, executor: E, writer: &mut GitWriter) -> Result<()> { +pub(crate) async fn process_delete<'e, E: Executor<'e, Database = Postgres>>( + ref_update: &RefUpdate, + repo: &Repository, + executor: E, + writer: &mut GitWriter, +) -> Result<()> { assert!(ref_update.old.is_some()); assert!(ref_update.new.is_none()); let gitoxide_repo = repo.gitoxide(executor).await?; - let object_id = oid::from_hex_str(ref_update.old.as_deref()).map_err(|_| err!(NOT_FOUND, "Ref does not exist"))?; - - let edits = vec![ - RefEdit { - change: Change::Delete { - expected: PreviousValue::MustExistAndMatch(Target::Peeled(object_id)), - log: RefLog::AndReference - }, - name: ref_update.target_ref.as_str().try_into()?, - deref: true - } - ]; - - gitoxide_repo.refs.transaction() + let object_id = oid::from_hex_str(ref_update.old.as_deref()) + .map_err(|_| err!(NOT_FOUND, "Ref does not exist"))?; + + let edits = vec![RefEdit { + change: Change::Delete { + expected: PreviousValue::MustExistAndMatch(Target::Peeled(object_id)), + log: RefLog::AndReference, + }, + name: ref_update.target_ref.as_str().try_into()?, + deref: true, + }]; + + gitoxide_repo + .refs + .transaction() .prepare(edits, Fail::Immediately) - .map_err(|err| err!(INTERNAL_SERVER_ERROR, "Failed to commit transaction: {}", err))? + .map_err(|err| { + err!( + INTERNAL_SERVER_ERROR, + "Failed to commit transaction: {}", + err + ) + })? .commit(&Signature::gitarena_default())?; if ref_update.report_status || ref_update.report_status_v2 { - writer.write_text_sideband_pktline(Band::Data, format!("ok {}", ref_update.target_ref)).await?; + writer + .write_text_sideband_pktline(Band::Data, format!("ok {}", ref_update.target_ref)) + .await?; } Ok(()) diff --git a/src/git/ref_update.rs b/src/git/ref_update.rs index 0e6bca6..3706094 100644 --- a/src/git/ref_update.rs +++ b/src/git/ref_update.rs @@ -7,17 +7,32 @@ use tracing::instrument; pub(crate) async fn parse_line(raw_line: Vec) -> Result { let line = String::from_utf8(raw_line)?; let mut ref_update = RefUpdate::default(); - let mut split = line.split(|c: char| { - c.is_whitespace() || c == '\x00' - }).filter(|s| !s.is_empty()); + let mut split = line + .split(|c: char| c.is_whitespace() || c == '\x00') + .filter(|s| !s.is_empty()); - let old_ref = split.next().ok_or_else::(|| anyhow!("Failed to parse ref update payload. Expected old ref, got: {}", line.clone()))?; - let new_ref = split.next().ok_or_else::(|| anyhow!("Failed to parse ref update payload. Expected new ref, got: {}", line.clone()))?; + let old_ref = split.next().ok_or_else::(|| { + anyhow!( + "Failed to parse ref update payload. Expected old ref, got: {}", + line.clone() + ) + })?; + let new_ref = split.next().ok_or_else::(|| { + anyhow!( + "Failed to parse ref update payload. Expected new ref, got: {}", + line.clone() + ) + })?; ref_update.old = oid::normalize_str(Some(old_ref)).map(|o| o.to_owned()); ref_update.new = oid::normalize_str(Some(new_ref)).map(|o| o.to_owned()); - let target_ref = split.next().ok_or_else::(|| anyhow!("Failed to parse ref update payload. Expected target ref, got: {}", line.clone()))?; + let target_ref = split.next().ok_or_else::(|| { + anyhow!( + "Failed to parse ref update payload. Expected target ref, got: {}", + line.clone() + ) + })?; if !target_ref.starts_with("refs/") { bail!("Received target ref which does not start with \"refs/\", is this a partial ref instead of a FQN? Got: {}", target_ref); @@ -30,15 +45,13 @@ pub(crate) async fn parse_line(raw_line: Vec) -> Result { "report-status" => ref_update.report_status = true, "report-status-v2" => ref_update.report_status_v2 = true, "side-band-64k" => ref_update.side_band_64k = true, - _ => { - match ref_update.push_options { - Some(ref mut options) => options.push(option.to_owned()), - None => { - let vec = vec![option.to_owned()]; - ref_update.push_options = Some(vec); - } + _ => match ref_update.push_options { + Some(ref mut options) => options.push(option.to_owned()), + None => { + let vec = vec![option.to_owned()]; + ref_update.push_options = Some(vec); } - } + }, } } @@ -49,7 +62,7 @@ pub(crate) async fn is_only_deletions(updates: &[RefUpdate]) -> Result { for update in updates { match RefUpdateType::determinate(&update.old, &update.new).await? { RefUpdateType::Delete => continue, - _ => return Ok(false) + _ => return Ok(false), } } @@ -64,22 +77,27 @@ pub(crate) struct RefUpdate { pub(crate) report_status: bool, pub(crate) report_status_v2: bool, pub(crate) side_band_64k: bool, - pub(crate) push_options: Option> + pub(crate) push_options: Option>, } pub(crate) enum RefUpdateType { Create, Delete, - Update + Update, } impl RefUpdateType { - pub(crate) async fn determinate(old: &Option, new: &Option) -> Result { + pub(crate) async fn determinate( + old: &Option, + new: &Option, + ) -> Result { match (old, new) { - (None, None) => bail!("Unable to determinate ref update type, both old and new OID are None"), + (None, None) => { + bail!("Unable to determinate ref update type, both old and new OID are None") + } (None, Some(_)) => Ok(RefUpdateType::Create), (Some(_), None) => Ok(RefUpdateType::Delete), - (Some(_), Some(_)) => Ok(RefUpdateType::Update) + (Some(_), Some(_)) => Ok(RefUpdateType::Update), } } } diff --git a/src/git/utils.rs b/src/git/utils.rs index 13d4cfc..d576948 100644 --- a/src/git/utils.rs +++ b/src/git/utils.rs @@ -14,7 +14,12 @@ use tracing::instrument; #[instrument(err, skip(store, repo))] #[async_recursion(?Send)] -pub(crate) async fn repo_files_at_ref<'a>(reference: &Reference, store: Arc, repo: &'a Repository, buffer: &'a mut Vec) -> Result> { +pub(crate) async fn repo_files_at_ref<'a>( + reference: &Reference, + store: Arc, + repo: &'a Repository, + buffer: &'a mut Vec, +) -> Result> { match &reference.target { Target::Peeled(object_id) => { let cache = store.to_cache_arc(); @@ -32,7 +37,11 @@ pub(crate) async fn repo_files_at_ref<'a>(reference: &Reference, store: Arc(store: Arc, repo: &'a Repository, buffer: &'a mut Vec) -> Result> { +pub(crate) async fn repo_files_at_head<'a>( + store: Arc, + repo: &'a Repository, + buffer: &'a mut Vec, +) -> Result> { let reference = repo.refs.find_loose("HEAD")?; repo_files_at_ref(&reference, store, repo, buffer).await diff --git a/src/git/write.rs b/src/git/write.rs index a33dd3f..e316ba4 100644 --- a/src/git/write.rs +++ b/src/git/write.rs @@ -7,7 +7,14 @@ use git2::{Repository as LibGit2Repo, Signature}; use sqlx::{Pool, Postgres}; /// Writes and commits a file into the repository -pub(crate) async fn write_file(repo: &LibGit2Repo, user: &User, branch: Option<&str>, file_name: &str, content: &[u8], db_pool: &Pool) -> Result<()> { +pub(crate) async fn write_file( + repo: &LibGit2Repo, + user: &User, + branch: Option<&str>, + file_name: &str, + content: &[u8], + db_pool: &Pool, +) -> Result<()> { let mut transaction = db_pool.begin().await?; let author_email = Email::find_commit_email(user, &mut transaction) @@ -20,8 +27,12 @@ pub(crate) async fn write_file(repo: &LibGit2Repo, user: &User, branch: Option<& let blob = repo.blob(content).context("Failed to create blob")?; - let mut tree_builder = repo.treebuilder(None).context("Failed to acquire tree builder")?; - tree_builder.insert(file_name, blob, 0o100644).context("Failed to create blob")?; + let mut tree_builder = repo + .treebuilder(None) + .context("Failed to acquire tree builder")?; + tree_builder + .insert(file_name, blob, 0o100644) + .context("Failed to create blob")?; let tree_oid = tree_builder.write().context("Failed to write tree")?; let tree = repo.find_tree(tree_oid)?; @@ -32,8 +43,9 @@ pub(crate) async fn write_file(repo: &LibGit2Repo, user: &User, branch: Option<& &root_signature, "Initial commit", &tree, - &[] - ).context("Failed to commit")?; + &[], + ) + .context("Failed to commit")?; transaction.commit().await?; diff --git a/src/ipc.rs b/src/ipc.rs index 3d51858..2b7c3f3 100644 --- a/src/ipc.rs +++ b/src/ipc.rs @@ -10,7 +10,7 @@ use tokio::io::AsyncWriteExt; use tracing_unwrap::ResultExt; pub(crate) struct Ipc { - connection: Option + connection: Option, } impl Ipc { @@ -30,9 +30,7 @@ impl Ipc { } }; - Ok(Self { - connection - }) + Ok(Self { connection }) } pub(crate) async fn connect() -> Result { @@ -45,7 +43,8 @@ impl Ipc { let packet = IpcPacket::new(packet); let bytes = packet.serialize().context("Failed to serialize packet")?; - self.connection.as_mut() + self.connection + .as_mut() .ok_or_else(|| anyhow!("Not connected to workhorse"))? .write_all(bytes.as_slice()) .await @@ -76,7 +75,10 @@ pub(crate) fn spawn_connection_task(data: RwLock) { info!("Successfully connected to workhorse at {}", ipc_path); break; } - Err(err) => debug!("Failed to re-establish connection to workhorse, retrying in 60 seconds: {}", err) + Err(err) => debug!( + "Failed to re-establish connection to workhorse, retrying in 60 seconds: {}", + err + ), } } }); diff --git a/src/issue.rs b/src/issue.rs index 9aa7604..3b85df4 100644 --- a/src/issue.rs +++ b/src/issue.rs @@ -27,5 +27,5 @@ pub(crate) struct Issue { #[serde(with = "ts_seconds")] created_at: DateTime, #[serde(with = "ts_seconds")] - updated_at: DateTime + updated_at: DateTime, } diff --git a/src/licenses.rs b/src/licenses.rs index 73037f8..0bde277 100644 --- a/src/licenses.rs +++ b/src/licenses.rs @@ -16,7 +16,8 @@ pub(crate) async fn init() { } fn init_askalono() -> Store { - let file = File::open("askalono-cache.bin.zstd").expect_or_log("Failed to open askalono cache file"); + let file = + File::open("askalono-cache.bin.zstd").expect_or_log("Failed to open askalono cache file"); Store::from_cache(file).expect_or_log("Failed to parse askalono cache file") } @@ -27,7 +28,23 @@ pub(crate) fn store() -> &'static Store { pub(crate) const fn license_file_names() -> [&'static [u8]; 18] { [ - b"copying", b"copyright", b"eula", b"license", b"notice", b"patents", b"unlicense", b"agpl", b"gpl", - b"lgpl", b"apache-", b"bsd-", b"cc-by-", b"gfdl-", b"gnu-", b"mit-", b"mpl-", b"ofl-" + b"copying", + b"copyright", + b"eula", + b"license", + b"notice", + b"patents", + b"unlicense", + b"agpl", + b"gpl", + b"lgpl", + b"apache-", + b"bsd-", + b"cc-by-", + b"gfdl-", + b"gnu-", + b"mit-", + b"mpl-", + b"ofl-", ] } diff --git a/src/mail.rs b/src/mail.rs index 5a7ed71..5f65245 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -37,7 +37,7 @@ pub(crate) struct Email { pub(crate) public: bool, pub(crate) created_at: DateTime, - pub(crate) verified_at: Option> + pub(crate) verified_at: Option>, } impl Email { @@ -51,18 +51,30 @@ impl Email { match self.verified_at { Some(_) => true, - None => self.created_at.signed_duration_since(Local::now()).num_hours() < 24 + None => { + self.created_at + .signed_duration_since(Local::now()) + .num_hours() + < 24 + } } } } macro_rules! generate_find { ($method_name:ident, $field:literal) => { - pub(crate) async fn $method_name<'e, E: Executor<'e, Database = Postgres>, U: Into>(user: U, executor: E) -> Result> { - let query = concat!("select * from emails where owner = $1 and ", $field, " = true limit 1"); + pub(crate) async fn $method_name<'e, E: Executor<'e, Database = Postgres>, U: Into>( + user: U, + executor: E, + ) -> Result> { + let query = concat!( + "select * from emails where owner = $1 and ", + $field, + " = true limit 1" + ); Email::find_specific_email(user, query, executor).await } - } + }; } impl Email { @@ -72,9 +84,14 @@ impl Email { generate_find!(find_public_email, "public"); // Private helper called by the functions defined using the `generate_find!` macro - async fn find_specific_email<'e, E, U>(user: U, query: &'static str, executor: E) -> Result> - where E: Executor<'e, Database = Postgres>, - U: Into + async fn find_specific_email<'e, E, U>( + user: U, + query: &'static str, + executor: E, + ) -> Result> + where + E: Executor<'e, Database = Postgres>, + U: Into, { let email: Option = sqlx::query_as(query) .bind(user.into()) @@ -107,7 +124,7 @@ impl Debug for Email { f.write_str(match self.verified_at { Some(_) => ", verified", - None => ", NOT verified" + None => ", NOT verified", })?; f.write_char(')') @@ -127,7 +144,12 @@ pub(crate) async fn get_root_mailbox(db_pool: &Pool) -> Result) -> Result<()> { +pub(crate) async fn send_user_mail( + user: &User, + subject: &str, + body: String, + db_pool: &Pool, +) -> Result<()> { // This is in an extra block so `transaction` gets dropped early let email = { let mut transaction = db_pool.begin().await?; @@ -176,7 +198,10 @@ async fn send_mail(message: Message, db_pool: &Pool) -> Result<()> { .build() }; - transporter.send(message).await.context("Unable to send email")?; + transporter + .send(message) + .await + .context("Unable to send email")?; Ok(()) } diff --git a/src/main.rs b/src/main.rs index a159869..0b1adba 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,8 +6,8 @@ use crate::sse::Broadcaster; use crate::utils::admin_panel_layer::AdminPanelLayer; use crate::utils::system::SYSTEM_INFO; -use std::env::VarError; use std::env; +use std::env::VarError; use std::sync::Arc; use actix_files::Files; @@ -15,10 +15,10 @@ use actix_identity::{CookieIdentityPolicy, IdentityService}; use actix_web::body::{BoxBody, EitherBody}; use actix_web::cookie::SameSite; use actix_web::dev::{Service, ServiceResponse}; -use actix_web::http::header::{ACCESS_CONTROL_ALLOW_ORIGIN, CACHE_CONTROL, HeaderValue, LOCATION}; +use actix_web::http::header::{HeaderValue, ACCESS_CONTROL_ALLOW_ORIGIN, CACHE_CONTROL, LOCATION}; use actix_web::http::Method; use actix_web::middleware::{NormalizePath, TrailingSlash}; -use actix_web::web::{Data, route, to}; +use actix_web::web::{route, to, Data}; use actix_web::{App, HttpResponse, HttpServer}; use anyhow::{anyhow, Context, Result}; use futures_locks::RwLock; @@ -70,9 +70,11 @@ async fn main() -> Result<()> { let _ = SYSTEM_INFO.read().await; let _watcher = templates::init().await?; - let bind_address = env::var("BIND_ADDRESS").context("Unable to read mandatory BIND_ADDRESS environment variable")?; + let bind_address = env::var("BIND_ADDRESS") + .context("Unable to read mandatory BIND_ADDRESS environment variable")?; - let (secret, domain): (Option, Option) = from_optional_config!("secret" => String, "domain" => String); + let (secret, domain): (Option, Option) = + from_optional_config!("secret" => String, "domain" => String); let secret = secret.ok_or_else(|| anyhow!("Unable to read secret from database"))?; let secure = domain.map_or_else(|| false, |d| d.starts_with("https")); @@ -89,7 +91,7 @@ async fn main() -> Result<()> { .max_age(TimeDuration::days(10)) .http_only(true) .same_site(SameSite::Lax) - .secure(secure) + .secure(secure), ); let cookie = Arc::new(read_magic_database().expect_or_log("Failed to libmagic database")); @@ -110,44 +112,58 @@ async fn main() -> Result<()> { // https://git-scm.com/docs/http-protocol/en#_smart_server_response // "Cache-Control headers SHOULD be used to disable caching of the returned entity." res.headers_mut().insert( - CACHE_CONTROL, HeaderValue::from_static("no-cache, max-age=0, must-revalidate"), + CACHE_CONTROL, + HeaderValue::from_static("no-cache, max-age=0, must-revalidate"), ); } if res.request().path().starts_with("/api") { - res.headers_mut().insert( - ACCESS_CONTROL_ALLOW_ORIGIN, HeaderValue::from_static("*"), - ); + res.headers_mut() + .insert(ACCESS_CONTROL_ALLOW_ORIGIN, HeaderValue::from_static("*")); } Ok(res) } }) .wrap_fn(error_renderer_middleware) - .default_service(route().method(Method::GET).to(routes::not_found::default_handler)) + .default_service( + route() + .method(Method::GET) + .to(routes::not_found::default_handler), + ) .service(routes::admin::all()) .configure(routes::init) .configure(routes::proxy::init) .configure(routes::user::init) .configure(routes::repository::init) // Repository routes need to be always last - .route("/favicon.ico", to(|| async { - HttpResponse::MovedPermanently().append_header((LOCATION, "/static/img/favicon.ico")).finish() - })); + .route( + "/favicon.ico", + to(|| async { + HttpResponse::MovedPermanently() + .append_header((LOCATION, "/static/img/favicon.ico")) + .finish() + }), + ); let debug_mode = cfg!(debug_assertions); - let serve_static = matches!(env::var("SERVE_STATIC_FILES"), Ok(_) | Err(VarError::NotUnicode(_))) || debug_mode; + let serve_static = matches!( + env::var("SERVE_STATIC_FILES"), + Ok(_) | Err(VarError::NotUnicode(_)) + ) || debug_mode; if serve_static { app = app.service( Files::new("/static", "./static") .use_etag(!debug_mode) .use_last_modified(!debug_mode) - .use_hidden_files() + .use_hidden_files(), ); } app - }).bind(bind_address.as_str()).context("Unable to bind HTTP server.")?; + }) + .bind(bind_address.as_str()) + .context("Unable to bind HTTP server.")?; server.run().await.context("Unable to start HTTP server.")?; @@ -161,17 +177,22 @@ async fn main() -> Result<()> { fn init_logger(broadcaster: Data>) -> Result> { let mut guards = Vec::new(); - let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|err| default_env(err, &[ - "actix_http=info", - "actix_server=info", - "askalono=warn", - "globset=info", - "h2=info", - "hyper=info", - "reqwest=info", - "rustls=info", - "sqlx=warn" - ])); + let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|err| { + default_env( + err, + &[ + "actix_http=info", + "actix_server=info", + "askalono=warn", + "globset=info", + "h2=info", + "hyper=info", + "reqwest=info", + "rustls=info", + "sqlx=warn", + ], + ) + }); let stdout_layer = stdout().map(|(layer, guard)| { guards.push(guard); @@ -203,7 +224,9 @@ fn read_magic_database() -> Result { // https://man7.org/linux/man-pages/man3/libmagic.3.html let database_path = if let Some(magic_env) = env::var_os("MAGIC") { - magic_env.into_string().expect_or_log("`MAGIC` environment variable contains invalid UTF-8 string") + magic_env + .into_string() + .expect_or_log("`MAGIC` environment variable contains invalid UTF-8 string") } else { "magic".to_owned() }; diff --git a/src/prelude.rs b/src/prelude.rs index bfd6775..e8e175b 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -111,18 +111,27 @@ impl LibGit2TimeExtensions for LibGit2Time { let abs_offset_seconds = abs_offset_minutes * 60; let offset = match self.sign() { - '+' => FixedOffset::east_opt(abs_offset_seconds).ok_or_else(|| anyhow!("Offset out of bounds"))?, - '-' => FixedOffset::west_opt(abs_offset_seconds).ok_or_else(|| anyhow!("Offset out of bounds"))?, - _ => unreachable!("unexpected sign: {}", self.sign()) + '+' => FixedOffset::east_opt(abs_offset_seconds) + .ok_or_else(|| anyhow!("Offset out of bounds"))?, + '-' => FixedOffset::west_opt(abs_offset_seconds) + .ok_or_else(|| anyhow!("Offset out of bounds"))?, + _ => unreachable!("unexpected sign: {}", self.sign()), }; match offset.timestamp_opt(self.seconds(), 0) { LocalResult::Single(date_time) => Ok(date_time), LocalResult::Ambiguous(min, max) => { - warn!("Received ambiguous result for commit: {} and {}", &min, &max); + warn!( + "Received ambiguous result for commit: {} and {}", + &min, &max + ); Ok(min) - }, - LocalResult::None => bail!("Cannot convert to UNIX time {} to DateTime<{}>", self.seconds(), offset) + } + LocalResult::None => bail!( + "Cannot convert to UNIX time {} to DateTime<{}>", + self.seconds(), + offset + ), } } } @@ -153,20 +162,30 @@ pub(crate) trait LibGit2SignatureExtensions { /// ``` /// /// [signature]: git2::Signature - async fn try_disassemble<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> (String, Option, String); + async fn try_disassemble<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> (String, Option, String); } #[async_trait(?Send)] impl LibGit2SignatureExtensions for LibGit2Signature<'_> { - async fn try_disassemble<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> (String, Option, String) { + async fn try_disassemble<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> (String, Option, String) { let email = self.email().unwrap_or("Invalid email address"); - User::find_using_email(email, executor) - .await - .map_or_else( - || (self.name().unwrap_or("Ghost").to_owned(), None, email.to_owned()), - |user| (user.username, Some(user.id), email.to_owned()) - ) + User::find_using_email(email, executor).await.map_or_else( + || { + ( + self.name().unwrap_or("Ghost").to_owned(), + None, + email.to_owned(), + ) + }, + |user| (user.username, Some(user.id), email.to_owned()), + ) } } @@ -187,8 +206,8 @@ impl GitoxideSignatureExtensions for GitoxideSignature { time: GitoxideTime { time: naive.timestamp() as u32, offset: 0, - sign: Sign::Plus - } + sign: Sign::Plus, + }, } } } @@ -216,7 +235,11 @@ impl ContextExtensions for Context { } } -pub(crate) const USER_AGENT_STR: &str = concat!("GitArena v", env!("CARGO_PKG_VERSION"), " (https://github.com/mellowagain/gitarena/)"); +pub(crate) const USER_AGENT_STR: &str = concat!( + "GitArena v", + env!("CARGO_PKG_VERSION"), + " (https://github.com/mellowagain/gitarena/)" +); pub(crate) trait AwcExtensions { /// Returns a [Client](awc::client::Client) configured with GitArena's default user agent diff --git a/src/privileges/mod.rs b/src/privileges/mod.rs index a57c3bd..df5c72d 100644 --- a/src/privileges/mod.rs +++ b/src/privileges/mod.rs @@ -1,3 +1,3 @@ +pub(crate) mod privilege; pub(crate) mod repo_access; pub(crate) mod repo_visibility; -pub(crate) mod privilege; diff --git a/src/privileges/privilege.rs b/src/privileges/privilege.rs index 580eb05..640e4d9 100644 --- a/src/privileges/privilege.rs +++ b/src/privileges/privilege.rs @@ -11,17 +11,26 @@ pub(crate) struct Privilege { pub(crate) id: i32, pub(crate) user_id: i32, pub(crate) repo_id: i32, - pub(crate) access_level: AccessLevel + pub(crate) access_level: AccessLevel, } macro_rules! generate_check { ($name:ident, $target:ident) => { - pub(crate) async fn $name<'e, E: Executor<'e, Database = Postgres>>(repo: &Repository, user: Option<&User>, executor: E) -> Result { + pub(crate) async fn $name<'e, E: Executor<'e, Database = Postgres>>( + repo: &Repository, + user: Option<&User>, + executor: E, + ) -> Result { Ok(if let Some(user) = user { if &user.id != &repo.owner && !user.admin { get_repo_privilege(repo, user, executor) .await - .with_context(|| format!("Unable to get repo privileges for user {} in repo {}", &user.id, &repo.id))? + .with_context(|| { + format!( + "Unable to get repo privileges for user {} in repo {}", + &user.id, &repo.id + ) + })? .map_or_else(|| false, |privilege| privilege.access_level.$target()) } else { true @@ -30,10 +39,14 @@ macro_rules! generate_check { false }) } - } + }; } -pub(crate) async fn check_access<'e, E: Executor<'e, Database = Postgres>>(repo: &Repository, user: Option<&User>, executor: E) -> Result { +pub(crate) async fn check_access<'e, E: Executor<'e, Database = Postgres>>( + repo: &Repository, + user: Option<&User>, + executor: E, +) -> Result { if repo.disabled { return Ok(user.map_or_else(|| false, |user| user.admin)); } @@ -44,7 +57,12 @@ pub(crate) async fn check_access<'e, E: Executor<'e, Database = Postgres>>(repo: if user.id != repo.owner && !user.admin { get_repo_privilege(repo, user, executor) .await - .with_context(|| format!("Unable to get repo privileges for user {} in repo {}", &user.id, &repo.id))? + .with_context(|| { + format!( + "Unable to get repo privileges for user {} in repo {}", + &user.id, &repo.id + ) + })? .map_or_else(|| false, |privilege| privilege.access_level.can_view()) } else { true @@ -54,7 +72,7 @@ pub(crate) async fn check_access<'e, E: Executor<'e, Database = Postgres>>(repo: } } RepoVisibility::Internal => user.is_some(), - RepoVisibility::Public => true + RepoVisibility::Public => true, }) } @@ -62,10 +80,16 @@ generate_check!(check_manage_issues, can_manage_issues); generate_check!(check_push, can_push); generate_check!(check_admin, can_admin); -async fn get_repo_privilege<'e, E: Executor<'e, Database = Postgres>>(repo: &Repository, user: &User, executor: E) -> Result> { - Ok(sqlx::query_as::<_, Privilege>("select * from privileges where user_id = $1 and repo_id = $2 limit 1") - .bind(&user.id) - .bind(&repo.id) - .fetch_optional(executor) - .await?) +async fn get_repo_privilege<'e, E: Executor<'e, Database = Postgres>>( + repo: &Repository, + user: &User, + executor: E, +) -> Result> { + Ok(sqlx::query_as::<_, Privilege>( + "select * from privileges where user_id = $1 and repo_id = $2 limit 1", + ) + .bind(&user.id) + .bind(&repo.id) + .fetch_optional(executor) + .await?) } diff --git a/src/privileges/repo_access.rs b/src/privileges/repo_access.rs index c8c6484..ee871b1 100644 --- a/src/privileges/repo_access.rs +++ b/src/privileges/repo_access.rs @@ -10,7 +10,7 @@ pub(crate) enum AccessLevel { Supporter, Coder, Manager, - Admin + Admin, } // Currently all these methods are hard coded but in the future they will be configurable on a per repo/org basis @@ -22,14 +22,14 @@ impl AccessLevel { pub(crate) fn can_manage_issues(&self) -> bool { match self { AccessLevel::Viewer | AccessLevel::Coder => false, - AccessLevel::Supporter | AccessLevel::Manager | AccessLevel::Admin => true + AccessLevel::Supporter | AccessLevel::Manager | AccessLevel::Admin => true, } } pub(crate) fn can_push(&self) -> bool { match self { AccessLevel::Viewer | AccessLevel::Supporter => false, - AccessLevel::Coder | AccessLevel::Manager | AccessLevel::Admin => true + AccessLevel::Coder | AccessLevel::Manager | AccessLevel::Admin => true, } } diff --git a/src/privileges/repo_visibility.rs b/src/privileges/repo_visibility.rs index 5c43535..f95628b 100644 --- a/src/privileges/repo_visibility.rs +++ b/src/privileges/repo_visibility.rs @@ -8,5 +8,5 @@ use sqlx::Type; pub(crate) enum RepoVisibility { Public, Internal, - Private + Private, } diff --git a/src/repository.rs b/src/repository.rs index 6b4d52d..e0a8c7e 100644 --- a/src/repository.rs +++ b/src/repository.rs @@ -40,30 +40,40 @@ pub(crate) struct Repository { pub(crate) mirrored_from: Option, pub(crate) archived: bool, - pub(crate) disabled: bool + pub(crate) disabled: bool, } impl Repository { - pub(crate) async fn open<'e, E, I, S>(user_id: I, repo_name: S, executor: E) -> Option - where E: Executor<'e, Database = Postgres>, - I: Into, - S: AsRef + pub(crate) async fn open<'e, E, I, S>( + user_id: I, + repo_name: S, + executor: E, + ) -> Option + where + E: Executor<'e, Database = Postgres>, + I: Into, + S: AsRef, { let user_id = user_id.into(); let repo_name = repo_name.as_ref(); - let repo: Option = sqlx::query_as::<_, Repository>("select * from repositories where owner = $1 and lower(name) = lower($2) limit 1") - .bind(&user_id) - .bind(repo_name) - .fetch_optional(executor) - .await - .ok() - .flatten(); + let repo: Option = sqlx::query_as::<_, Repository>( + "select * from repositories where owner = $1 and lower(name) = lower($2) limit 1", + ) + .bind(&user_id) + .bind(repo_name) + .fetch_optional(executor) + .await + .ok() + .flatten(); repo } - pub(crate) async fn create_fs<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result<()> { + pub(crate) async fn create_fs<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result<()> { let mut init_ops = RepositoryInitOptions::new(); init_ops.initial_head(self.default_branch.as_str()); init_ops.bare(true); @@ -73,31 +83,45 @@ impl Repository { Ok(()) } - pub(crate) async fn libgit2<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { + pub(crate) async fn libgit2<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { Ok(Git2Repository::open(self.get_fs_path(executor).await?)?) } - pub(crate) async fn gitoxide<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { - Ok(GitoxideRepository::discover(self.get_fs_path(executor).await?)?) + pub(crate) async fn gitoxide<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { + Ok(GitoxideRepository::discover( + self.get_fs_path(executor).await?, + )?) } - pub(crate) async fn get_fs_path<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { + pub(crate) async fn get_fs_path<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { // Instead of using `config::get_optional_setting`, we run our own query to get both username and repo base dir in one query // https://stackoverflow.com/a/16364390 let (base_dir, username): (String, String) = sqlx::query_as( "select * from \ (select value from settings where key = 'repositories.base_dir' limit 1) A \ cross join \ - (select username from users where id = $1 limit 1) B" + (select username from users where id = $1 limit 1) B", ) - .bind(&self.owner) - .fetch_one(executor) - .await?; + .bind(&self.owner) + .fetch_one(executor) + .await?; Ok(format!("{}/{}/{}", base_dir, username, &self.name)) } - pub(crate) async fn repo_size<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { + pub(crate) async fn repo_size<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { Ok(dir::get_size(self.get_fs_path(executor).await?)?) } } @@ -111,8 +135,16 @@ impl FromRequest for Repository { // If this method gets called from a handler that does not have username or repository in the match info // it is safe to assume the programmer made a mistake, thus .expect_or_log is OK - let username = match_info.get("username").expect_or_log("from_request called on Repository despite not having username argument").to_owned(); - let repository = match_info.get("repository").expect_or_log("from_request called on Repository despite not having repository argument").to_owned(); + let username = match_info + .get("username") + .expect_or_log("from_request called on Repository despite not having username argument") + .to_owned(); + let repository = match_info + .get("repository") + .expect_or_log( + "from_request called on Repository despite not having repository argument", + ) + .to_owned(); //let tree = match_info.get("tree"); // Allows one to receive the repo owner name without having to manually search the database @@ -129,27 +161,43 @@ impl FromRequest for Repository { Box::pin(async move { let web_user = web_user_future.await?; - extract_repo_from_request(db_pool, web_user, username.as_str(), repository.as_str()).await.map_err(|err| GitArenaError { + extract_repo_from_request( + db_pool, + web_user, + username.as_str(), + repository.as_str(), + ) + .await + .map_err(|err| GitArenaError { source: Arc::new(err), - display_type: ErrorDisplayType::Html // TODO: Check whenever route is err = "html|json|git" etc... + display_type: ErrorDisplayType::Html, // TODO: Check whenever route is err = "html|json|git" etc... }) }) } None => Box::pin(async { Err(GitArenaError { source: Arc::new(anyhow!("No PgPool in application data")), - display_type: ErrorDisplayType::Html // TODO: Check whenever route is err = "html|json|git" etc... + display_type: ErrorDisplayType::Html, // TODO: Check whenever route is err = "html|json|git" etc... }) - }) + }), } } } -async fn extract_repo_from_request(db_pool: Data, web_user: WebUser, username: &str, repository: &str) -> Result { +async fn extract_repo_from_request( + db_pool: Data, + web_user: WebUser, + username: &str, + repository: &str, +) -> Result { let mut transaction = db_pool.begin().await?; - let user = User::find_using_name(username, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, "Repository not found"))?; - let repo = Repository::open(user, repository, &mut transaction).await.ok_or_else(|| err!(NOT_FOUND, "Repository not found"))?; + let user = User::find_using_name(username, &mut transaction) + .await + .ok_or_else(|| err!(NOT_FOUND, "Repository not found"))?; + let repo = Repository::open(user, repository, &mut transaction) + .await + .ok_or_else(|| err!(NOT_FOUND, "Repository not found"))?; if !privilege::check_access(&repo, web_user.as_ref(), &mut transaction).await? { die!(NOT_FOUND, "Not found"); @@ -170,7 +218,7 @@ pub(crate) struct RepoOwner(pub(crate) String); pub(crate) struct Branch { pub(crate) gitoxide_repo: GitoxideRepository, pub(crate) tree: String, - pub(crate) reference: Reference + pub(crate) reference: Reference, } impl FromRequest for Branch { @@ -182,7 +230,10 @@ impl FromRequest for Branch { // If this method gets called from a handler that does not have tree in the match info // it is safe to assume the programmer made a mistake, thus .expect_or_log is OK - let tree = match_info.get("tree").expect_or_log("from_request called on Branch despite not having tree argument").to_owned(); + let tree = match_info + .get("tree") + .expect_or_log("from_request called on Branch despite not having tree argument") + .to_owned(); let repo_future = Repository::from_request(req, payload); @@ -195,23 +246,29 @@ impl FromRequest for Branch { // This call exists early if access rights are insufficient, so we don't need to worry about them down the road let repo = repo_future.await?; - extract_branch_from_request(db_pool, repo, tree).await.map_err(|err| GitArenaError { - source: Arc::new(err), - display_type: ErrorDisplayType::Html // TODO: Check whenever route is err = "html|json|git" etc... - }) + extract_branch_from_request(db_pool, repo, tree) + .await + .map_err(|err| GitArenaError { + source: Arc::new(err), + display_type: ErrorDisplayType::Html, // TODO: Check whenever route is err = "html|json|git" etc... + }) }) } None => Box::pin(async { Err(GitArenaError { source: Arc::new(anyhow!("No PgPool in application data")), - display_type: ErrorDisplayType::Html // TODO: Check whenever route is err = "html|json|git" etc... + display_type: ErrorDisplayType::Html, // TODO: Check whenever route is err = "html|json|git" etc... }) - }) + }), } } } -async fn extract_branch_from_request(db_pool: Data, repo: Repository, tree: String) -> Result { +async fn extract_branch_from_request( + db_pool: Data, + repo: Repository, + tree: String, +) -> Result { let mut transaction = db_pool.begin().await?; let gitoxide_repo = repo.gitoxide(&mut transaction).await?; @@ -219,7 +276,7 @@ async fn extract_branch_from_request(db_pool: Data, repo: Repository, tr let reference = match gitoxide_repo.refs.find_loose(tree.as_str()) { Ok(loose_ref) => Ok(loose_ref), Err(GitoxideFindError::Find(err)) => Err(err), - Err(GitoxideFindError::NotFound(_)) => die!(NOT_FOUND, "Tree not found") + Err(GitoxideFindError::NotFound(_)) => die!(NOT_FOUND, "Tree not found"), }?; transaction.commit().await?; @@ -227,6 +284,6 @@ async fn extract_branch_from_request(db_pool: Data, repo: Repository, tr Ok(Branch { gitoxide_repo, tree, - reference + reference, }) } diff --git a/src/routes/admin/dashboard.rs b/src/routes/admin/dashboard.rs index 8d265bd..6f2be39 100644 --- a/src/routes/admin/dashboard.rs +++ b/src/routes/admin/dashboard.rs @@ -7,7 +7,7 @@ use crate::{die, render_template}; use std::env::consts; use std::process; -use actix_web::{Responder, web}; +use actix_web::{web, Responder}; use anyhow::Result; use chrono::Duration; use chrono_humanize::{Accuracy, HumanTime, Tense}; @@ -19,7 +19,10 @@ use sysinfo::SystemExt; use tera::Context; #[route("/", method = "GET", err = "html")] -pub(crate) async fn dashboard(web_user: WebUser, db_pool: web::Data) -> Result { +pub(crate) async fn dashboard( + web_user: WebUser, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; if !user.admin { @@ -39,9 +42,10 @@ pub(crate) async fn dashboard(web_user: WebUser, db_pool: web::Data) -> context.try_insert("users_count", &users_count)?; - let latest_user_option: Option = sqlx::query_as::<_, User>("select * from users order by id desc limit 1") - .fetch_optional(&mut transaction) - .await?; + let latest_user_option: Option = + sqlx::query_as::<_, User>("select * from users order by id desc limit 1") + .fetch_optional(&mut transaction) + .await?; if let Some(latest_user) = latest_user_option { context.try_insert("latest_user", &latest_user)?; @@ -59,17 +63,19 @@ pub(crate) async fn dashboard(web_user: WebUser, db_pool: web::Data) -> context.try_insert("repos_count", &repos_count)?; - let latest_repo_option: Option = sqlx::query_as::<_, Repository>("select * from repositories order by id desc limit 1") - .fetch_optional(&mut transaction) - .await?; + let latest_repo_option: Option = + sqlx::query_as::<_, Repository>("select * from repositories order by id desc limit 1") + .fetch_optional(&mut transaction) + .await?; if let Some(latest_repo) = latest_repo_option { context.try_insert("latest_repo", &latest_repo)?; - let (latest_repo_username_option,): (String,) = sqlx::query_as("select username from users where id = $1 limit 1") - .bind(&latest_repo.owner) - .fetch_one(&mut transaction) - .await?; + let (latest_repo_username_option,): (String,) = + sqlx::query_as("select username from users where id = $1 limit 1") + .bind(&latest_repo.owner) + .fetch_one(&mut transaction) + .await?; context.try_insert("latest_repo_username", &latest_repo_username_option)?; } @@ -85,13 +91,17 @@ pub(crate) async fn dashboard(web_user: WebUser, db_pool: web::Data) -> const GITARENA_SHA1: &str = env!("VERGEN_GIT_SHA"); static GITARENA_SHA1_SHORT: Lazy<&'static str> = Lazy::new(|| &GITARENA_SHA1[0..7]); - static GITARENA_VERSION: Lazy = Lazy::new(|| format!("{}-{}", env!("CARGO_PKG_VERSION"), *GITARENA_SHA1_SHORT)); + static GITARENA_VERSION: Lazy = + Lazy::new(|| format!("{}-{}", env!("CARGO_PKG_VERSION"), *GITARENA_SHA1_SHORT)); context.try_insert("gitarena_version", GITARENA_VERSION.as_str())?; let libgit2_version = LibGit2Version::get(); let (major, minor, patch) = libgit2_version.libgit2_version(); - context.try_insert("libgit2_version", format!("{}.{}.{}", major, minor, patch).as_str())?; + context.try_insert( + "libgit2_version", + format!("{}.{}.{}", major, minor, patch).as_str(), + )?; context.try_insert("git2_rs_version", libgit2_version.crate_version())?; // System Info @@ -99,7 +109,12 @@ pub(crate) async fn dashboard(web_user: WebUser, db_pool: web::Data) -> { let system = SYSTEM_INFO.read().await; - context.try_insert("os", &system.long_os_version().unwrap_or_else(|| "Unknown".to_string()))?; + context.try_insert( + "os", + &system + .long_os_version() + .unwrap_or_else(|| "Unknown".to_string()), + )?; context.try_insert("uptime", &format_uptime(system.uptime()))?; context.try_insert("memory_available", &system.available_memory())?; diff --git a/src/routes/admin/log.rs b/src/routes/admin/log.rs index f8fcbc8..14385b2 100644 --- a/src/routes/admin/log.rs +++ b/src/routes/admin/log.rs @@ -28,23 +28,30 @@ pub(crate) async fn log(web_user: WebUser) -> Result { static LOG_FILE: Lazy = Lazy::new(get_log_file_path); - let lines = fs::read_to_string(LOG_FILE.as_str()).map(|content| { - let index = content.rfind("Successfully loaded 415 licenses from cache").map_or_else(|| 0, |i| i - 72); - let new_log_file = &content[index..]; - - let lines = new_log_file.lines(); - let mut log_lines = Vec::with_capacity(lines.size_hint().0); - - for line in lines { - if let Ok(log_line) = serde_json::from_str::(line) { - if let Some(Value::String(message)) = log_line.fields.get("message") { - log_lines.push(format!("{} [{}] {}", log_line.timestamp, log_line.level, message)); + let lines = fs::read_to_string(LOG_FILE.as_str()) + .map(|content| { + let index = content + .rfind("Successfully loaded 415 licenses from cache") + .map_or_else(|| 0, |i| i - 72); + let new_log_file = &content[index..]; + + let lines = new_log_file.lines(); + let mut log_lines = Vec::with_capacity(lines.size_hint().0); + + for line in lines { + if let Ok(log_line) = serde_json::from_str::(line) { + if let Some(Value::String(message)) = log_line.fields.get("message") { + log_lines.push(format!( + "{} [{}] {}", + log_line.timestamp, log_line.level, message + )); + } } } - } - log_lines - }).unwrap_or_default(); + log_lines + }) + .unwrap_or_default(); let mut context = Context::new(); @@ -55,14 +62,21 @@ pub(crate) async fn log(web_user: WebUser) -> Result { } #[route("/log/sse", method = "GET", err = "html")] -pub(crate) async fn log_sse(web_user: WebUser, broadcaster: Data>) -> Result { +pub(crate) async fn log_sse( + web_user: WebUser, + broadcaster: Data>, +) -> Result { let user = web_user.into_user()?; if !user.admin { die!(FORBIDDEN, "Not allowed"); } - let tx = broadcaster.write().await.new_client(Category::AdminLog).await?; + let tx = broadcaster + .write() + .await + .new_client(Category::AdminLog) + .await?; Ok(HttpResponse::Ok() .insert_header((CONTENT_TYPE, "text/event-stream")) @@ -74,7 +88,7 @@ struct LogLine<'a> { timestamp: &'a str, level: &'a str, #[serde(borrow = "'a")] - fields: HashMap<&'a str, Value> + fields: HashMap<&'a str, Value>, } fn get_log_file_path() -> String { diff --git a/src/routes/admin/mod.rs b/src/routes/admin/mod.rs index c15e81c..6bc2cc1 100644 --- a/src/routes/admin/mod.rs +++ b/src/routes/admin/mod.rs @@ -1,5 +1,5 @@ -use actix_web::Scope; use actix_web::web::scope; +use actix_web::Scope; mod dashboard; mod log; diff --git a/src/routes/admin/settings.rs b/src/routes/admin/settings.rs index 4d4f6fa..1d96c14 100644 --- a/src/routes/admin/settings.rs +++ b/src/routes/admin/settings.rs @@ -6,7 +6,7 @@ use crate::{config, die, err, render_template}; use std::collections::HashMap; use std::sync::Once; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::{Context as _, Result}; use gitarena_macros::route; use multimap::MultiMap; @@ -14,7 +14,10 @@ use sqlx::PgPool; use tera::Context; #[route("/settings", method = "GET", err = "html")] -pub(crate) async fn get_settings(web_user: WebUser, db_pool: web::Data) -> Result { +pub(crate) async fn get_settings( + web_user: WebUser, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; if !user.admin { @@ -32,7 +35,10 @@ pub(crate) async fn get_settings(web_user: WebUser, db_pool: web::Data) .into_iter() .map(|setting| { let key = setting.key.as_str(); - let parent_key = key.split_once('.').map_or_else(|| key, |(key, _)| key).to_owned(); + let parent_key = key + .split_once('.') + .map_or_else(|| key, |(key, _)| key) + .to_owned(); (parent_key, setting) }) @@ -44,7 +50,12 @@ pub(crate) async fn get_settings(web_user: WebUser, db_pool: web::Data) } #[route("/settings", method = "PATCH", err = "htmx+text")] -pub(crate) async fn patch_settings(data: web::Form>, web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn patch_settings( + data: web::Form>, + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; if !user.admin { @@ -65,11 +76,15 @@ pub(crate) async fn patch_settings(data: web::Form>, web TypeConstraint::Boolean => value.parse::().is_ok(), TypeConstraint::Char => value.parse::().is_ok(), TypeConstraint::Int => value.parse::().is_ok(), - TypeConstraint::String | TypeConstraint::Bytes => true + TypeConstraint::String | TypeConstraint::Bytes => true, }; if !valid { - die!(BAD_REQUEST, "Value for {} does not follow type constraint", key); + die!( + BAD_REQUEST, + "Value for {} does not follow type constraint", + key + ); } // This does on purpose not use config::set_setting as that method requires a key: &'static str @@ -88,7 +103,7 @@ pub(crate) async fn patch_settings(data: web::Form>, web if !once.is_completed() { let setting = match request.get_header("hx-trigger-name") { Some(setting) => setting, - None => die!(BAD_REQUEST, "Setting not found") + None => die!(BAD_REQUEST, "Setting not found"), }; sqlx::query("update settings set value = false where key = $1") diff --git a/src/routes/explore.rs b/src/routes/explore.rs index ecb0ec1..8108f90 100644 --- a/src/routes/explore.rs +++ b/src/routes/explore.rs @@ -5,7 +5,7 @@ use crate::{err, render_template}; use std::fmt::{Display, Formatter, Result as FmtResult}; -use actix_web::{HttpRequest, Responder, web}; +use actix_web::{web, HttpRequest, Responder}; use anyhow::Result; use derive_more::Display; use gitarena_macros::route; @@ -16,11 +16,16 @@ use sqlx::{Executor, PgPool, Postgres}; use tera::Context; #[route("/explore", method = "GET", err = "htmx+html")] -pub(crate) async fn explore(web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn explore( + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let query_string = request.q_string(); let sorting = query_string.get("sort").unwrap_or("stars_desc"); - let (sort_method, order) = Order::parse(sorting).ok_or_else(|| err!(BAD_REQUEST, "Invalid order"))?; + let (sort_method, order) = + Order::parse(sorting).ok_or_else(|| err!(BAD_REQUEST, "Invalid order"))?; let htmx_request = request.is_htmx(); let options = ExploreOptions::parse(&query_string, &web_user, sort_method, order, htmx_request); @@ -29,9 +34,15 @@ pub(crate) async fn explore(web_user: WebUser, request: HttpRequest, db_pool: we context.insert_web_user(&web_user)?; - context.try_insert("repositories", &get_repositories(&options, &mut transaction).await?)?; + context.try_insert( + "repositories", + &get_repositories(&options, &mut transaction).await?, + )?; context.try_insert("options", &options)?; - context.try_insert("query_string", query_string_without_offset(&query_string).as_str())?; + context.try_insert( + "query_string", + query_string_without_offset(&query_string).as_str(), + )?; // Only send a partial result (only the component) if it's a request by htmx if options.htmx_request { @@ -41,7 +52,10 @@ pub(crate) async fn explore(web_user: WebUser, request: HttpRequest, db_pool: we render_template!("explore.html", context, transaction) } -async fn get_repositories<'e, E: Executor<'e, Database = Postgres>>(options: &ExploreOptions<'_>, executor: E) -> Result> { +async fn get_repositories<'e, E: Executor<'e, Database = Postgres>>( + options: &ExploreOptions<'_>, + executor: E, +) -> Result> { let query = format!("select repositories.id, \ repositories.name, \ repositories.description, \ @@ -89,23 +103,39 @@ struct ExploreOptions<'a> { sort: &'a str, order: Order, offset: u32, - htmx_request: bool + htmx_request: bool, } impl ExploreOptions<'_> { - fn parse<'a>(query_string: &'a QString, web_user: &WebUser, sort: &'a str, order: Order, htmx_request: bool) -> ExploreOptions<'a> { - let (internal, disabled) = web_user.as_ref().map_or_else(|| (false, false), |user| (true, user.admin)); + fn parse<'a>( + query_string: &'a QString, + web_user: &WebUser, + sort: &'a str, + order: Order, + htmx_request: bool, + ) -> ExploreOptions<'a> { + let (internal, disabled) = web_user + .as_ref() + .map_or_else(|| (false, false), |user| (true, user.admin)); ExploreOptions { - archived: query_string.get("archived").map_or_else(|| true, |value| value == "1"), - forked: query_string.get("fork").map_or_else(|| true, |value| value == "1"), - mirrored: query_string.get("mirror").map_or_else(|| true, |value| value == "1"), + archived: query_string + .get("archived") + .map_or_else(|| true, |value| value == "1"), + forked: query_string + .get("fork") + .map_or_else(|| true, |value| value == "1"), + mirrored: query_string + .get("mirror") + .map_or_else(|| true, |value| value == "1"), internal, disabled, sort, order, - offset: query_string.get("offset").map_or_else(|| 0, |value| value.parse::().unwrap_or(0)), - htmx_request + offset: query_string + .get("offset") + .map_or_else(|| 0, |value| value.parse::().unwrap_or(0)), + htmx_request, } } } @@ -136,12 +166,14 @@ impl Display for ExploreOptions<'_> { // Private repositories are hidden in the public explore page // TODO: Display them if the logged in user has permission to view them - f.write_str("repositories.visibility != 'private' group by repositories.id, users.id order by ")?; + f.write_str( + "repositories.visibility != 'private' group by repositories.id, users.id order by ", + )?; match self.sort { "stars" => write!(f, "stars {}, id ", self.order)?, "name" => write!(f, "lower(name) {}, id ", self.order)?, - _ => write!(f, "id {} ", self.order)? // Default is repository id (creation date) + _ => write!(f, "id {} ", self.order)?, // Default is repository id (creation date) } write!(f, "offset {} limit 20", self.offset) @@ -155,7 +187,7 @@ enum Order { Ascending, #[display(fmt = "desc")] #[serde(rename(serialize = "desc"))] - Descending + Descending, } impl Order { @@ -164,7 +196,7 @@ impl Order { let order = match order_str { "asc" => Order::Ascending, "desc" => Order::Descending, - _ => return None + _ => return None, }; Some((method, order)) @@ -172,7 +204,8 @@ impl Order { } fn query_string_without_offset(input: &QString) -> String { - input.to_pairs() + input + .to_pairs() .iter() .filter(|(key, _)| key != &"offset") .map(|(key, value)| format!("{}={}", key, value)) diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 906682f..ea544ea 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -1,8 +1,8 @@ use actix_web::web::ServiceConfig; +pub(crate) mod admin; mod api; mod explore; -pub(crate) mod admin; pub(crate) mod not_found; pub(crate) mod proxy; pub(crate) mod repository; diff --git a/src/routes/not_found.rs b/src/routes/not_found.rs index 695590c..7c67a74 100644 --- a/src/routes/not_found.rs +++ b/src/routes/not_found.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use actix_web::http::StatusCode; use actix_web::Result as ActixResult; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::Result; use log::debug; use serde_json::json; @@ -22,29 +22,44 @@ async fn api_not_found() -> Result { }))) } -async fn web_not_found(request: HttpRequest, web_user: WebUser, db_pool: web::Data) -> Result { +async fn web_not_found( + request: HttpRequest, + web_user: WebUser, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; let mut context = Context::new(); context.insert_web_user(&web_user)?; context.try_insert("path", request.path())?; - render_template!(StatusCode::NOT_FOUND, "error/404.html", context, transaction) + render_template!( + StatusCode::NOT_FOUND, + "error/404.html", + context, + transaction + ) } #[instrument(skip_all)] -pub(crate) async fn default_handler(request: HttpRequest, web_user: WebUser, db_pool: web::Data) -> ActixResult { +pub(crate) async fn default_handler( + request: HttpRequest, + web_user: WebUser, + db_pool: web::Data, +) -> ActixResult { debug!("Got request for non-existent resource: {}", request.path()); Ok(if !request.path().starts_with("/api") { - web_not_found(request, web_user, db_pool).await.map_err(|err| GitArenaError { - source: Arc::new(err), - display_type: ErrorDisplayType::Html - }) + web_not_found(request, web_user, db_pool) + .await + .map_err(|err| GitArenaError { + source: Arc::new(err), + display_type: ErrorDisplayType::Html, + }) } else { api_not_found().await.map_err(|err| GitArenaError { source: Arc::new(err), - display_type: ErrorDisplayType::Json + display_type: ErrorDisplayType::Json, }) }) } diff --git a/src/routes/proxy/img_proxy.rs b/src/routes/proxy/img_proxy.rs index 4375b77..179781a 100644 --- a/src/routes/proxy/img_proxy.rs +++ b/src/routes/proxy/img_proxy.rs @@ -1,10 +1,10 @@ -use crate::{die, err}; use crate::prelude::{AwcExtensions, HttpRequestExtensions}; +use crate::{die, err}; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::Result; -use awc::Client; use awc::http::header::{CACHE_CONTROL, IF_MODIFIED_SINCE, IF_NONE_MATCH}; +use awc::Client; use gitarena_macros::route; use log::debug; use serde::Deserialize; @@ -15,7 +15,7 @@ const PASSTHROUGH_HEADERS: [&str; 6] = [ "etag", "expires", "last-modified", - "transfer-encoding" + "transfer-encoding", ]; // Source: https://github.com/atmos/camo/blob/master/mime-types.json @@ -62,11 +62,14 @@ const ACCEPTED_MIME_TYPES: [&str; 43] = [ "image/x-rgb", "image/x-xbitmap", "image/x-xpixmap", - "image/x-xwindowdump" + "image/x-xwindowdump", ]; #[route("/api/proxy/{url}", method = "GET", err = "text")] -pub(crate) async fn proxy(uri: web::Path, request: HttpRequest) -> Result { +pub(crate) async fn proxy( + uri: web::Path, + request: HttpRequest, +) -> Result { let url = &uri.url; if url.is_empty() { @@ -92,7 +95,10 @@ pub(crate) async fn proxy(uri: web::Path, request: HttpRequest) -> debug!("Image proxy request for {}", &url); - let gateway_response = client.send().await.map_err(|err| err!(BAD_GATEWAY, "Failed to send request to gateway: {}", err))?; + let gateway_response = client + .send() + .await + .map_err(|err| err!(BAD_GATEWAY, "Failed to send request to gateway: {}", err))?; let mut response = HttpResponse::build(gateway_response.status()); /*if length > 5242880 { @@ -117,5 +123,5 @@ pub(crate) async fn proxy(uri: web::Path, request: HttpRequest) -> #[derive(Deserialize)] pub(crate) struct ProxyRequest { - pub(crate) url: String // Hex Digest + pub(crate) url: String, // Hex Digest } diff --git a/src/routes/repository/api/create_repo.rs b/src/routes/repository/api/create_repo.rs index 6d78bab..a4a6561 100644 --- a/src/routes/repository/api/create_repo.rs +++ b/src/routes/repository/api/create_repo.rs @@ -8,7 +8,7 @@ use crate::routes::repository::api::CreateJsonResponse; use crate::user::{User, WebUser}; use crate::utils::identifiers::{is_fs_legal, is_reserved_repo_name, is_valid}; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::Result; use gitarena_macros::route; use log::info; @@ -18,7 +18,12 @@ use sqlx::{PgPool, Pool, Postgres}; // This whole handler is very similar to `import_repo.rs` so at some point this should be consolidated into one #[route("/api/repo", method = "POST", err = "json")] -pub(crate) async fn create(web_user: WebUser, body: web::Json, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn create( + web_user: WebUser, + body: web::Json, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; let user = web_user.into_user()?; @@ -40,7 +45,10 @@ pub(crate) async fn create(web_user: WebUser, body: web::Json let description = &body.description; if description.len() > 256 { - die!(BAD_REQUEST, "Description may only be up to 256 characters long"); + die!( + BAD_REQUEST, + "Description may only be up to 256 characters long" + ); } let (exists,): (bool,) = sqlx::query_as("select exists(select 1 from repositories where owner = $1 and lower(name) = lower($2) limit 1)") @@ -68,33 +76,50 @@ pub(crate) async fn create(web_user: WebUser, body: web::Json create_readme(&repo, &user, &db_pool).await?; } - let domain = get_optional_setting::("domain", &mut transaction).await?.unwrap_or_default(); + let domain = get_optional_setting::("domain", &mut transaction) + .await? + .unwrap_or_default(); let path = format!("/{}/{}", &user.username, &repo.name); transaction.commit().await?; - info!("New repository created: {}/{} (id {})", &user.username, &repo.name, &repo.id); + info!( + "New repository created: {}/{} (id {})", + &user.username, &repo.name, &repo.id + ); Ok(if request.is_htmx() { - HttpResponse::Ok().append_header(("hx-redirect", path)).append_header(("hx-refresh", "true")).finish() + HttpResponse::Ok() + .append_header(("hx-redirect", path)) + .append_header(("hx-refresh", "true")) + .finish() } else { let url = format!("{}{}", domain, path); - HttpResponse::Ok().json(CreateJsonResponse { - id: repo.id, - url - }) + HttpResponse::Ok().json(CreateJsonResponse { id: repo.id, url }) }) } async fn create_readme(repo: &Repository, user: &User, db_pool: &Pool) -> Result<()> { let mut transaction = db_pool.begin().await?; let libgit2_repo = repo.libgit2(&mut transaction).await?; - let readme = format!("# {}\n\n{}\n", repo.name.as_str(), repo.description.as_str()); + let readme = format!( + "# {}\n\n{}\n", + repo.name.as_str(), + repo.description.as_str() + ); transaction.commit().await?; - write::write_file(&libgit2_repo, user, Some("HEAD"), "README.md", readme.as_bytes(), db_pool).await + write::write_file( + &libgit2_repo, + user, + Some("HEAD"), + "README.md", + readme.as_bytes(), + db_pool, + ) + .await } #[derive(Deserialize)] @@ -103,5 +128,5 @@ pub(crate) struct CreateJsonRequest { description: String, visibility: RepoVisibility, #[serde(default)] - readme: Option + readme: Option, } diff --git a/src/routes/repository/api/fork_repo.rs b/src/routes/repository/api/fork_repo.rs index 0c237c7..b075c9a 100644 --- a/src/routes/repository/api/fork_repo.rs +++ b/src/routes/repository/api/fork_repo.rs @@ -8,15 +8,24 @@ use crate::utils::filesystem::copy_dir_all; use std::path::Path; -use actix_web::{HttpMessage, HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpMessage, HttpRequest, HttpResponse, Responder}; use anyhow::{anyhow, Context, Result}; use gitarena_macros::route; use log::info; use serde_json::json; use sqlx::PgPool; -#[route("/api/repo/{username}/{repository}/fork", method = "GET", err = "htmx+json")] -pub(crate) async fn get_fork_amount(repo: Repository, web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +#[route( + "/api/repo/{username}/{repository}/fork", + method = "GET", + err = "htmx+json" +)] +pub(crate) async fn get_fork_amount( + repo: Repository, + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; let additional_query = if matches!(web_user, WebUser::Authenticated(_)) { @@ -27,7 +36,10 @@ pub(crate) async fn get_fork_amount(repo: Repository, web_user: WebUser, request "visibility = 'public'" }; - let query = format!("select count(*) from repositories where forked_from = $1 and disabled = false and {}", additional_query); + let query = format!( + "select count(*) from repositories where forked_from = $1 and disabled = false and {}", + additional_query + ); let (count,): (i64,) = sqlx::query_as(query.as_str()) .bind(repo.id) @@ -46,8 +58,17 @@ pub(crate) async fn get_fork_amount(repo: Repository, web_user: WebUser, request } } -#[route("/api/repo/{username}/{repository}/fork", method = "POST", err = "htmx+text")] -pub(crate) async fn create_fork(repo: Repository, web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +#[route( + "/api/repo/{username}/{repository}/fork", + method = "POST", + err = "htmx+text" +)] +pub(crate) async fn create_fork( + repo: Repository, + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; @@ -78,25 +99,37 @@ pub(crate) async fn create_fork(repo: Repository, web_user: WebUser, request: Ht let old_path = repo.get_fs_path(&mut transaction).await?; let new_path = new_repo.get_fs_path(&mut transaction).await?; - copy_dir_all(Path::new(old_path.as_str()), Path::new(new_path.as_str())).await.context("Failed to copy repository")?; + copy_dir_all(Path::new(old_path.as_str()), Path::new(new_path.as_str())) + .await + .context("Failed to copy repository")?; - let domain = get_optional_setting::("domain", &mut transaction).await?.unwrap_or_default(); + let domain = get_optional_setting::("domain", &mut transaction) + .await? + .unwrap_or_default(); let url = format!("{}/{}/{}", domain, user.username, new_repo.name); transaction.commit().await?; let extensions = request.extensions(); - let repo_owner = extensions.get::().ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; + let repo_owner = extensions + .get::() + .ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; - info!("New repository forked: {}/{} (id {}, from {}/{})", &user.username, &new_repo.name, &repo.id, repo_owner, &repo.name); + info!( + "New repository forked: {}/{} (id {}, from {}/{})", + &user.username, &new_repo.name, &repo.id, repo_owner, &repo.name + ); Ok(if request.is_htmx() { - HttpResponse::Ok().append_header(("hx-redirect", url)).append_header(("hx-refresh", "true")).finish() + HttpResponse::Ok() + .append_header(("hx-redirect", url)) + .append_header(("hx-refresh", "true")) + .finish() } else { // TODO: Move CreateJsonResponse into mod.rs so it's no longer living inside of create_repo.rs HttpResponse::Ok().json(CreateJsonResponse { id: new_repo.id, - url + url, }) }) } diff --git a/src/routes/repository/api/import_repo.rs b/src/routes/repository/api/import_repo.rs index ad419fe..4e6fba3 100644 --- a/src/routes/repository/api/import_repo.rs +++ b/src/routes/repository/api/import_repo.rs @@ -7,7 +7,7 @@ use crate::user::WebUser; use crate::utils::identifiers::{is_fs_legal, is_reserved_repo_name, is_valid}; use crate::{die, err, Ipc}; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::{Context, Result}; use futures_locks::RwLock; use gitarena_common::packets::git::GitImport; @@ -20,11 +20,18 @@ use url::Url; // This whole handler is very similar to `create_repo.rs` so at some point this should be consolidated into one #[route("/api/repo/import", method = "POST", err = "json")] -pub(crate) async fn import(web_user: WebUser, body: web::Json, request: HttpRequest, ipc: web::Data>, db_pool: web::Data) -> Result { +pub(crate) async fn import( + web_user: WebUser, + body: web::Json, + request: HttpRequest, + ipc: web::Data>, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; - let enabled = get_setting::("repositories.importing_enabled", &mut transaction).await?; + let enabled = + get_setting::("repositories.importing_enabled", &mut transaction).await?; if !enabled || !ipc.read().await.is_connected() { die!(NOT_IMPLEMENTED, "Importing is disabled on this instance"); @@ -47,10 +54,14 @@ pub(crate) async fn import(web_user: WebUser, body: web::Json let description = &body.description; if description.len() > 256 { - die!(BAD_REQUEST, "Description may only be up to 256 characters long"); + die!( + BAD_REQUEST, + "Description may only be up to 256 characters long" + ); } - let url = Url::parse(body.import_url.as_str()).map_err(|_| err!(BAD_REQUEST, "Unable to parse import url"))?; + let url = Url::parse(body.import_url.as_str()) + .map_err(|_| err!(BAD_REQUEST, "Unable to parse import url"))?; if body.mirror.is_some() { die!(NOT_IMPLEMENTED, "Mirroring is not yet implemented"); @@ -81,27 +92,36 @@ pub(crate) async fn import(web_user: WebUser, body: web::Json let packet = GitImport { url: url.to_string(), username: body.username.clone(), - password: body.password.clone() + password: body.password.clone(), }; - ipc.write().await.send(packet).await.context("Failed to send import packet to workhorse")?; + ipc.write() + .await + .send(packet) + .await + .context("Failed to send import packet to workhorse")?; - let domain = get_optional_setting::("domain", &mut transaction).await?.unwrap_or_default(); + let domain = get_optional_setting::("domain", &mut transaction) + .await? + .unwrap_or_default(); let path = format!("/{}/{}", &user.username, &repo.name); transaction.commit().await?; - info!("New repository created for importing: {}/{} (id {}) (source: {})", &user.username, &repo.name, &repo.id, url); + info!( + "New repository created for importing: {}/{} (id {}) (source: {})", + &user.username, &repo.name, &repo.id, url + ); Ok(if request.is_htmx() { - HttpResponse::Ok().append_header(("hx-redirect", path)).append_header(("hx-refresh", "true")).finish() + HttpResponse::Ok() + .append_header(("hx-redirect", path)) + .append_header(("hx-refresh", "true")) + .finish() } else { let url = format!("{}{}", domain, path); - HttpResponse::Ok().json(CreateJsonResponse { - id: repo.id, - url - }) + HttpResponse::Ok().json(CreateJsonResponse { id: repo.id, url }) }) } diff --git a/src/routes/repository/api/mod.rs b/src/routes/repository/api/mod.rs index ad2cb94..ce11fbe 100644 --- a/src/routes/repository/api/mod.rs +++ b/src/routes/repository/api/mod.rs @@ -27,5 +27,5 @@ pub(crate) fn init(config: &mut ServiceConfig) { #[derive(Serialize)] pub(crate) struct CreateJsonResponse { pub(crate) id: i32, - pub(crate) url: String + pub(crate) url: String, } diff --git a/src/routes/repository/api/repo_readme.rs b/src/routes/repository/api/repo_readme.rs index f1d2797..b2b84d6 100644 --- a/src/routes/repository/api/repo_readme.rs +++ b/src/routes/repository/api/repo_readme.rs @@ -9,17 +9,28 @@ use git_repository::objs::Tree; use gitarena_macros::route; use serde_json::json; -#[route("/api/repo/{username}/{repository}/tree/{tree:.*}/readme", method = "GET", err = "json")] +#[route( + "/api/repo/{username}/{repository}/tree/{tree:.*}/readme", + method = "GET", + err = "json" +)] pub(crate) async fn readme(_repo: Repository, branch: Branch) -> Result { let gitoxide_repo = branch.gitoxide_repo; let mut buffer = Vec::::new(); let store = gitoxide_repo.objects.clone(); - let tree_ref = repo_files_at_ref(&branch.reference, store.clone(), &gitoxide_repo, &mut buffer).await?; + let tree_ref = repo_files_at_ref( + &branch.reference, + store.clone(), + &gitoxide_repo, + &mut buffer, + ) + .await?; let tree = Tree::from(tree_ref); - let entry = tree.entries + let entry = tree + .entries .iter() .find(|e| e.filename.to_lowercase().starts_with(b"readme")) .ok_or_else(|| err!(NOT_FOUND, "No readme file found"))?; diff --git a/src/routes/repository/api/star.rs b/src/routes/repository/api/star.rs index d1972c3..092bcd8 100644 --- a/src/routes/repository/api/star.rs +++ b/src/routes/repository/api/star.rs @@ -3,15 +3,24 @@ use crate::prelude::HttpRequestExtensions; use crate::repository::{RepoOwner, Repository}; use crate::user::{User, WebUser}; -use actix_web::{HttpMessage, HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpMessage, HttpRequest, HttpResponse, Responder}; use anyhow::{anyhow, Result}; use gitarena_macros::route; use log::debug; use serde_json::json; use sqlx::{Executor, PgPool, Postgres}; -#[route("/api/repo/{username}/{repository}/star", method = "GET", err = "htmx+json")] -pub(crate) async fn get_star(repo: Repository, web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +#[route( + "/api/repo/{username}/{repository}/star", + method = "GET", + err = "htmx+json" +)] +pub(crate) async fn get_star( + repo: Repository, + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; let count = get_star_count(&repo, &mut transaction).await?; @@ -23,7 +32,9 @@ pub(crate) async fn get_star(repo: Repository, web_user: WebUser, request: HttpR }; let extensions = request.extensions(); - let repo_owner = extensions.get::().ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; + let repo_owner = extensions + .get::() + .ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; transaction.commit().await?; @@ -38,8 +49,16 @@ pub(crate) async fn get_star(repo: Repository, web_user: WebUser, request: HttpR } } -#[route("/api/repo/{username}/{repository}/star", method = "POST", err = "json")] -pub(crate) async fn post_star(repo: Repository, web_user: WebUser, db_pool: web::Data) -> Result { +#[route( + "/api/repo/{username}/{repository}/star", + method = "POST", + err = "json" +)] +pub(crate) async fn post_star( + repo: Repository, + web_user: WebUser, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; @@ -55,8 +74,16 @@ pub(crate) async fn post_star(repo: Repository, web_user: WebUser, db_pool: web: Ok(HttpResponse::Created().finish()) } -#[route("/api/repo/{username}/{repository}/star", method = "DELETE", err = "json")] -pub(crate) async fn delete_star(repo: Repository, web_user: WebUser, db_pool: web::Data) -> Result { +#[route( + "/api/repo/{username}/{repository}/star", + method = "DELETE", + err = "json" +)] +pub(crate) async fn delete_star( + repo: Repository, + web_user: WebUser, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; @@ -73,7 +100,11 @@ pub(crate) async fn delete_star(repo: Repository, web_user: WebUser, db_pool: we } #[route("/api/repo/{username}/{repository}/star", method = "PUT", err = "text")] -pub(crate) async fn put_star(repo: Repository, web_user: WebUser, db_pool: web::Data) -> Result { +pub(crate) async fn put_star( + repo: Repository, + web_user: WebUser, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; @@ -95,7 +126,10 @@ pub(crate) async fn put_star(repo: Repository, web_user: WebUser, db_pool: web:: Ok(response.body(count.to_string())) } -async fn get_star_count<'e, E: Executor<'e, Database = Postgres>>(repo: &Repository, executor: E) -> Result { +async fn get_star_count<'e, E: Executor<'e, Database = Postgres>>( + repo: &Repository, + executor: E, +) -> Result { let (count,): (i64,) = sqlx::query_as("select count(*) from stars where repo = $1") .bind(repo.id) .fetch_optional(executor) @@ -105,36 +139,56 @@ async fn get_star_count<'e, E: Executor<'e, Database = Postgres>>(repo: &Reposit Ok(count) } -async fn add_star<'e, E: Executor<'e, Database = Postgres>>(user: &User, repo: &Repository, executor: E) -> Result<()> { +async fn add_star<'e, E: Executor<'e, Database = Postgres>>( + user: &User, + repo: &Repository, + executor: E, +) -> Result<()> { sqlx::query("insert into stars (stargazer, repo) values ($1, $2)") .bind(user.id) .bind(repo.id) .execute(executor) .await?; - debug!("{} (id {}) added a star to repository id {}", user.username, user.id, repo.id); + debug!( + "{} (id {}) added a star to repository id {}", + user.username, user.id, repo.id + ); Ok(()) } -async fn remove_star<'e, E: Executor<'e, Database = Postgres>>(user: &User, repo: &Repository, executor: E) -> Result<()> { +async fn remove_star<'e, E: Executor<'e, Database = Postgres>>( + user: &User, + repo: &Repository, + executor: E, +) -> Result<()> { sqlx::query("delete from stars where stargazer = $1 and repo = $2") .bind(user.id) .bind(repo.id) .execute(executor) .await?; - debug!("{} (id {}) removed their star from repository id {}", user.username, user.id, repo.id); + debug!( + "{} (id {}) removed their star from repository id {}", + user.username, user.id, repo.id + ); Ok(()) } -async fn has_star<'e, E: Executor<'e, Database = Postgres>>(user: &User, repo: &Repository, executor: E) -> Result { - let (exists,): (bool,) = sqlx::query_as("select exists(select 1 from stars where stargazer = $1 and repo = $2 limit 1)") - .bind(user.id) - .bind(repo.id) - .fetch_one(executor) - .await?; +async fn has_star<'e, E: Executor<'e, Database = Postgres>>( + user: &User, + repo: &Repository, + executor: E, +) -> Result { + let (exists,): (bool,) = sqlx::query_as( + "select exists(select 1 from stars where stargazer = $1 and repo = $2 limit 1)", + ) + .bind(user.id) + .bind(repo.id) + .fetch_one(executor) + .await?; Ok(exists) } diff --git a/src/routes/repository/archive.rs b/src/routes/repository/archive.rs index 8eb7bf6..f670ddc 100644 --- a/src/routes/repository/archive.rs +++ b/src/routes/repository/archive.rs @@ -21,7 +21,11 @@ use tokio_tar::{Builder as TarBuilder, Header as TarHeader}; use zip::write::FileOptions as ZipFileOptions; use zip::ZipWriter; -#[route("/{username}/{repository}/tree/{tree:.*}/archive/targz", method = "GET", err = "html")] +#[route( + "/{username}/{repository}/tree/{tree:.*}/archive/targz", + method = "GET", + err = "html" +)] pub(crate) async fn tar_gz_file(repo: Repository, branch: Branch) -> Result { let gitoxide_repo = branch.gitoxide_repo; @@ -29,11 +33,24 @@ pub(crate) async fn tar_gz_file(repo: Repository, branch: Branch) -> Result Result, tree: Tree, path: &Path, builder: &mut TarBuilder>, buffer: &mut Vec) -> Result<()> { +async fn write_directory_tar( + store: Arc, + tree: Tree, + path: &Path, + builder: &mut TarBuilder>, + buffer: &mut Vec, +) -> Result<()> { for entry in tree.entries { let filename = entry.filename.to_str()?; let path = path.join(filename); @@ -89,7 +115,9 @@ async fn write_directory_tar(store: Arc, tree: Tree, path: &Path, builder header.set_cksum(); - builder.append_data(&mut header, path.as_path(), &content[..]).await?; + builder + .append_data(&mut header, path.as_path(), &content[..]) + .await?; } EntryMode::Commit => { /* TODO: implement submodules */ } } @@ -98,14 +126,24 @@ async fn write_directory_tar(store: Arc, tree: Tree, path: &Path, builder Ok(()) } -#[route("/{username}/{repository}/tree/{tree:.*}/archive/zip", method = "GET", err = "html")] +#[route( + "/{username}/{repository}/tree/{tree:.*}/archive/zip", + method = "GET", + err = "html" +)] pub(crate) async fn zip_file(repo: Repository, branch: Branch) -> Result { let gitoxide_repo = branch.gitoxide_repo; let mut buffer = Vec::::new(); let store = gitoxide_repo.objects.clone(); - let tree = repo_files_at_ref(&branch.reference, store.clone(), &gitoxide_repo, &mut buffer).await?; + let tree = repo_files_at_ref( + &branch.reference, + store.clone(), + &gitoxide_repo, + &mut buffer, + ) + .await?; let tree = Tree::from(tree); let mut writer = ZipWriter::new(Cursor::new(Vec::new())); @@ -115,12 +153,21 @@ pub(crate) async fn zip_file(repo: Repository, branch: Branch) -> Result, tree: Tree, path: &Path, writer: &mut ZipWriter>>, buffer: &mut Vec) -> Result<()> { +async fn write_directory_zip( + store: Arc, + tree: Tree, + path: &Path, + writer: &mut ZipWriter>>, + buffer: &mut Vec, +) -> Result<()> { for entry in tree.entries { let filename = entry.filename.to_str()?; let path_buffer = path.join(filename); @@ -145,7 +192,7 @@ async fn write_directory_zip(store: Arc, tree: Tree, path: &Path, writer: 0o664 }) .large_file(content.len() >= 4294967000); // 4 GiB - //.last_modified_time(...) TODO: DateTime of last commit to this file + //.last_modified_time(...) TODO: DateTime of last commit to this file writer.start_file(format!("{}", path.display()), options)?; writer.write_all(&content[..])?; diff --git a/src/routes/repository/blobs/blob.rs b/src/routes/repository/blobs/blob.rs index eba463b..309696c 100644 --- a/src/routes/repository/blobs/blob.rs +++ b/src/routes/repository/blobs/blob.rs @@ -11,7 +11,7 @@ use crate::{die, err, render_template}; use std::sync::Arc; use actix_web::http::header::CONTENT_TYPE; -use actix_web::{HttpResponse, Responder, web}; +use actix_web::{web, HttpResponse, Responder}; use anyhow::Result; use async_recursion::async_recursion; use bstr::ByteSlice; @@ -27,8 +27,19 @@ use sqlx::PgPool; use tera::Context; use tracing_unwrap::OptionExt; -#[route("/{username}/{repository}/tree/{tree}/blob/{blob:.*}", method = "GET", err = "html")] -pub(crate) async fn view_blob(repo: Repository, branch: Branch, uri: web::Path, web_user: WebUser, cookie: web::Data>, db_pool: web::Data) -> Result { +#[route( + "/{username}/{repository}/tree/{tree}/blob/{blob:.*}", + method = "GET", + err = "html" +)] +pub(crate) async fn view_blob( + repo: Repository, + branch: Branch, + uri: web::Path, + web_user: WebUser, + cookie: web::Data>, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; let gitoxide_repo = branch.gitoxide_repo; @@ -41,29 +52,49 @@ pub(crate) async fn view_blob(repo: Repository, branch: Branch, uri: web::Path, cookie: web::Data>, db_pool: web::Data) -> Result { +#[route( + "/{username}/{repository}/tree/{tree}/~blob/{blob:.*}", + method = "GET", + err = "text" +)] +pub(crate) async fn view_raw_blob( + _repo: Repository, + branch: Branch, + uri: web::Path, + cookie: web::Data>, + db_pool: web::Data, +) -> Result { let transaction = db_pool.begin().await?; let gitoxide_repo = branch.gitoxide_repo; @@ -101,29 +142,55 @@ pub(crate) async fn view_raw_blob(_repo: Repository, branch: Branch, uri: web::P let store = gitoxide_repo.objects.clone(); - let tree_ref = repo_files_at_ref(&branch.reference, store.clone(), &gitoxide_repo, &mut buffer).await?; - let (_, content, _) = recursively_visit_blob_content(&branch.reference, tree_ref, uri.blob.as_str(), &gitoxide_repo, store.clone(), &mut blob_buffer).await?; + let tree_ref = repo_files_at_ref( + &branch.reference, + store.clone(), + &gitoxide_repo, + &mut buffer, + ) + .await?; + let (_, content, _) = recursively_visit_blob_content( + &branch.reference, + tree_ref, + uri.blob.as_str(), + &gitoxide_repo, + store.clone(), + &mut blob_buffer, + ) + .await?; let mime = if let Some(file_type) = infer::get(content.as_bytes()) { file_type.mime_type() } else { match cookie.probe(content.as_bytes())? { FileType::Text => "text/plain", - _ => "application/octet-stream" + _ => "application/octet-stream", } }; transaction.commit().await?; - Ok(HttpResponse::Ok().insert_header((CONTENT_TYPE, mime)).body(content)) + Ok(HttpResponse::Ok() + .insert_header((CONTENT_TYPE, mime)) + .body(content)) } #[async_recursion(?Send)] -async fn recursively_visit_blob_content<'a>(reference: &Reference, tree_ref: TreeRef<'a>, path: &str, repo: &'a GitoxideRepository, store: Arc, buffer: &'a mut Vec) -> Result<(String, String, EntryMode)> { +async fn recursively_visit_blob_content<'a>( + reference: &Reference, + tree_ref: TreeRef<'a>, + path: &str, + repo: &'a GitoxideRepository, + store: Arc, + buffer: &'a mut Vec, +) -> Result<(String, String, EntryMode)> { let tree = Tree::from(tree_ref); - let (search, remaining) = path.split_once('/').map_or_else(|| (path, None), |(a, b)| (a, Some(b))); + let (search, remaining) = path + .split_once('/') + .map_or_else(|| (path, None), |(a, b)| (a, Some(b))); - let entry = tree.entries + let entry = tree + .entries .iter() .find(|e| e.filename == search) .ok_or_else(|| err!(NOT_FOUND))?; @@ -134,19 +201,27 @@ async fn recursively_visit_blob_content<'a>(reference: &Reference, tree_ref: Tre die!(NOT_FOUND); } - let tree_ref = store.to_handle_arc().find_tree(entry.oid.as_ref(), buffer).map(|(tree, _)| tree)?; + let tree_ref = store + .to_handle_arc() + .find_tree(entry.oid.as_ref(), buffer) + .map(|(tree, _)| tree)?; let mut buffer = Vec::::new(); - recursively_visit_blob_content(reference, tree_ref, remaining, repo, store, &mut buffer).await + recursively_visit_blob_content(reference, tree_ref, remaining, repo, store, &mut buffer) + .await } None => { - if entry.mode != EntryMode::Blob && entry.mode != EntryMode::BlobExecutable { + if entry.mode != EntryMode::Blob && entry.mode != EntryMode::BlobExecutable { die!(BAD_REQUEST, "Only blobs can be viewed in blob view"); } let file_name = entry.filename.to_str().unwrap_or("Invalid file name"); - Ok((file_name.to_owned(), read_blob_content(entry.oid.as_ref(), store).await?, entry.mode)) + Ok(( + file_name.to_owned(), + read_blob_content(entry.oid.as_ref(), store).await?, + entry.mode, + )) } } } diff --git a/src/routes/repository/blobs/directory.rs b/src/routes/repository/blobs/directory.rs index 825ad9f..39c8449 100644 --- a/src/routes/repository/blobs/directory.rs +++ b/src/routes/repository/blobs/directory.rs @@ -1,6 +1,8 @@ -use crate::git::GIT_HASH_KIND; -use crate::git::history::{all_branches, all_commits, all_tags, last_commit_for_blob, last_commit_for_ref}; +use crate::git::history::{ + all_branches, all_commits, all_tags, last_commit_for_blob, last_commit_for_ref, +}; use crate::git::utils::{read_blob_content, repo_files_at_ref}; +use crate::git::GIT_HASH_KIND; use crate::prelude::{ContextExtensions, LibGit2SignatureExtensions}; use crate::repository::{Branch, Repository}; use crate::routes::repository::blobs::BlobRequest; @@ -11,7 +13,7 @@ use crate::{die, err, render_template}; use std::cmp::Ordering; use std::sync::Arc; -use actix_web::{Responder, web}; +use actix_web::{web, Responder}; use anyhow::Result; use async_recursion::async_recursion; use bstr::ByteSlice; @@ -25,8 +27,18 @@ use gitarena_macros::route; use sqlx::PgPool; use tera::Context; -#[route("/{username}/{repository}/tree/{tree}/directory/{blob:.*}", method = "GET", err = "html")] -pub(crate) async fn view_dir(repo: Repository, branch: Branch, uri: web::Path, web_user: WebUser, db_pool: web::Data) -> Result { +#[route( + "/{username}/{repository}/tree/{tree}/directory/{blob:.*}", + method = "GET", + err = "html" +)] +pub(crate) async fn view_dir( + repo: Repository, + branch: Branch, + uri: web::Path, + web_user: WebUser, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; let gitoxide_repo = branch.gitoxide_repo; @@ -42,13 +54,29 @@ pub(crate) async fn view_dir(repo: Repository, branch: Branch, uri: web::Path(reference: &Reference, tree_ref: TreeRef<'a>, path: &str, repo: &'a GitoxideRepository, store: Arc, buffer: &'a mut Vec) -> Result { +async fn recursively_visit_tree<'a>( + reference: &Reference, + tree_ref: TreeRef<'a>, + path: &str, + repo: &'a GitoxideRepository, + store: Arc, + buffer: &'a mut Vec, +) -> Result { let tree = Tree::from(tree_ref); match path.split_once('/') { Some((search, remaining)) => { - let entry = tree.entries + let entry = tree + .entries .iter() .find(|e| e.filename == search) .ok_or_else(|| err!(NOT_FOUND, "Not found"))?; @@ -153,11 +215,14 @@ async fn recursively_visit_tree<'a>(reference: &Reference, tree_ref: TreeRef<'a> die!(BAD_REQUEST, "Only trees can be viewed in tree view"); } - let tree_ref = store.to_handle_arc().find_tree(entry.oid.as_ref(), buffer).map(|(tree, _)| tree)?; + let tree_ref = store + .to_handle_arc() + .find_tree(entry.oid.as_ref(), buffer) + .map(|(tree, _)| tree)?; let mut buffer = Vec::::new(); recursively_visit_tree(reference, tree_ref, remaining, repo, store, &mut buffer).await } - None => Ok(tree) + None => Ok(tree), } } diff --git a/src/routes/repository/blobs/mod.rs b/src/routes/repository/blobs/mod.rs index 477d333..dac7fc2 100644 --- a/src/routes/repository/blobs/mod.rs +++ b/src/routes/repository/blobs/mod.rs @@ -19,5 +19,5 @@ pub(crate) struct BlobRequest { pub(crate) repository: String, pub(crate) tree: String, - pub(crate) blob: String + pub(crate) blob: String, } diff --git a/src/routes/repository/commits.rs b/src/routes/repository/commits.rs index 2fe3ebd..43cc3ec 100644 --- a/src/routes/repository/commits.rs +++ b/src/routes/repository/commits.rs @@ -5,15 +5,25 @@ use crate::templates::web::GitCommit; use crate::user::WebUser; use crate::{die, render_template}; -use actix_web::{HttpMessage, HttpRequest, Responder, web}; +use actix_web::{web, HttpMessage, HttpRequest, Responder}; use anyhow::{anyhow, Result}; use bstr::ByteSlice; use gitarena_macros::route; use sqlx::PgPool; use tera::Context; -#[route("/{username}/{repository}/tree/{tree:.*}/commits", method = "GET", err = "htmx+html")] -pub(crate) async fn commits(repo: Repository, branch: Branch, web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +#[route( + "/{username}/{repository}/tree/{tree:.*}/commits", + method = "GET", + err = "htmx+html" +)] +pub(crate) async fn commits( + repo: Repository, + branch: Branch, + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; let full_tree_name = branch.reference.name.as_bstr().to_str()?; @@ -25,7 +35,9 @@ pub(crate) async fn commits(repo: Repository, branch: Branch, web_user: WebUser, let mut context = Context::new(); let extensions = request.extensions(); - let repo_owner = extensions.get::().ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; + let repo_owner = extensions + .get::() + .ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; context.try_insert("repo_owner_name", &repo_owner.0)?; context.try_insert("repo", &repo)?; @@ -56,7 +68,7 @@ pub(crate) async fn commits(repo: Repository, branch: Branch, web_user: WebUser, date: Some(chrono_time_only_date), author_name: name, author_uid: uid, - author_email: email + author_email: email, }); } diff --git a/src/routes/repository/git/git_receive_pack.rs b/src/routes/repository/git/git_receive_pack.rs index 7207f1e..a3cdc04 100644 --- a/src/routes/repository/git/git_receive_pack.rs +++ b/src/routes/repository/git/git_receive_pack.rs @@ -16,7 +16,7 @@ use std::process::Stdio; use std::time::Duration; use actix_web::http::header::CONTENT_TYPE; -use actix_web::{Either, HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, Either, HttpRequest, HttpResponse, Responder}; use anyhow::{Context, Result}; use futures::StreamExt; use git_repository::protocol::transport::packetline::{PacketLineRef, StreamingPeekableIter}; @@ -27,41 +27,61 @@ use sqlx::PgPool; use tokio::process::Command; use tokio::time::timeout; -#[route("/{username}/{repository}.git/git-receive-pack", method = "POST", err = "git")] -pub(crate) async fn git_receive_pack(uri: web::Path, mut body: web::Payload, request: HttpRequest, db_pool: web::Data) -> Result { +#[route( + "/{username}/{repository}.git/git-receive-pack", + method = "POST", + err = "git" +)] +pub(crate) async fn git_receive_pack( + uri: web::Path, + mut body: web::Payload, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let content_type = request.get_header("content-type").unwrap_or_default(); let accept_header = request.get_header("accept").unwrap_or_default(); - if content_type != "application/x-git-receive-pack-request" || accept_header != "application/x-git-receive-pack-result" { + if content_type != "application/x-git-receive-pack-request" + || accept_header != "application/x-git-receive-pack-result" + { die!(BAD_REQUEST); } let mut transaction = db_pool.begin().await?; - let user_option: Option<(i32,)> = sqlx::query_as("select id from users where lower(username) = lower($1) limit 1") - .bind(&uri.username) - .fetch_optional(&mut transaction) - .await?; + let user_option: Option<(i32,)> = + sqlx::query_as("select id from users where lower(username) = lower($1) limit 1") + .bind(&uri.username) + .fetch_optional(&mut transaction) + .await?; let (user_id,) = match user_option { Some(user_id) => user_id, - None => die!(NOT_FOUND) + None => die!(NOT_FOUND), }; - let repo_option: Option = sqlx::query_as::<_, Repository>("select * from repositories where owner = $1 and lower(name) = lower($2) limit 1") - .bind(user_id) - .bind(&uri.repository) - .fetch_optional(&mut transaction) - .await?; - - let user = match basic_auth::login_flow(&request, &mut transaction, "application/x-git-receive-pack-result").await? { + let repo_option: Option = sqlx::query_as::<_, Repository>( + "select * from repositories where owner = $1 and lower(name) = lower($2) limit 1", + ) + .bind(user_id) + .bind(&uri.repository) + .fetch_optional(&mut transaction) + .await?; + + let user = match basic_auth::login_flow( + &request, + &mut transaction, + "application/x-git-receive-pack-result", + ) + .await? + { Either::Left(user) => user, - Either::Right(response) => return Ok(response) + Either::Right(response) => return Ok(response), }; let mut repo = match repo_option { Some(repo) => repo, - None => die!(NOT_FOUND) + None => die!(NOT_FOUND), }; // If the user doesn't have access return 404 Not found to not leak existence of internal/private repositories @@ -114,14 +134,31 @@ pub(crate) async fn git_receive_pack(uri: web::Path, mut body: web:: match searcher.search_in(vec) { Some(pos) => { - let (index_path, pack_path, _temp_dir) = pack::read(&vec[pos..], &repo, &mut transaction).await?; + let (index_path, pack_path, _temp_dir) = + pack::read(&vec[pos..], &repo, &mut transaction).await?; - output_writer.write_text_sideband_pktline(Band::Data, "unpack ok").await?; + output_writer + .write_text_sideband_pktline(Band::Data, "unpack ok") + .await?; for update in updates { match RefUpdateType::determinate(&update.old, &update.new).await? { - RefUpdateType::Create | RefUpdateType::Update => process_create_update(&update, &repo, store.clone(), &db_pool, &mut output_writer, index_path.as_ref(), pack_path.as_ref(), &vec[pos..]).await?, - RefUpdateType::Delete => process_delete(&update, &repo, &mut transaction, &mut output_writer).await? + RefUpdateType::Create | RefUpdateType::Update => { + process_create_update( + &update, + &repo, + store.clone(), + &db_pool, + &mut output_writer, + index_path.as_ref(), + pack_path.as_ref(), + &vec[pos..], + ) + .await? + } + RefUpdateType::Delete => { + process_delete(&update, &repo, &mut transaction, &mut output_writer).await? + } }; } } @@ -132,7 +169,9 @@ pub(crate) async fn git_receive_pack(uri: web::Path, mut body: web:: } // There wasn't actually something to unpack - output_writer.write_text_sideband_pktline(Band::Data, "unpack ok").await?; + output_writer + .write_text_sideband_pktline(Band::Data, "unpack ok") + .await?; for update in updates { process_delete(&update, &repo, &mut transaction, &mut output_writer).await?; @@ -153,11 +192,16 @@ pub(crate) async fn git_receive_pack(uri: web::Path, mut body: web:: .status(); match timeout(Duration::from_secs(10), command).await { - Ok(Ok(status)) => if !status.success() { - warn!("Git garbage collector exited with non-zero status: {}", status); + Ok(Ok(status)) => { + if !status.success() { + warn!( + "Git garbage collector exited with non-zero status: {}", + status + ); + } } Ok(Err(err)) => warn!("Failed to execute Git garbage collector: {}", err), - Err(_) => warn!("Git garbage collector failed to finish within 10 seconds") + Err(_) => warn!("Git garbage collector failed to finish within 10 seconds"), } output_writer.flush_sideband(Band::Data).await?; @@ -166,7 +210,12 @@ pub(crate) async fn git_receive_pack(uri: web::Path, mut body: web:: // Run post update hooks post_update::run(store, &mut repo, &mut transaction) .await - .with_context(|| format!("Failed to run post update hook for newest commit in {}/{}", &uri.username, repo.name))?; + .with_context(|| { + format!( + "Failed to run post update hook for newest commit in {}/{}", + &uri.username, repo.name + ) + })?; sqlx::query("update repositories set license = $1 where id = $2") .bind(&repo.license) diff --git a/src/routes/repository/git/git_upload_pack.rs b/src/routes/repository/git/git_upload_pack.rs index 2deb755..e11b1c9 100644 --- a/src/routes/repository/git/git_upload_pack.rs +++ b/src/routes/repository/git/git_upload_pack.rs @@ -9,19 +9,30 @@ use crate::repository::Repository; use crate::routes::repository::GitRequest; use actix_web::http::header::CONTENT_TYPE; -use actix_web::{Either, HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, Either, HttpRequest, HttpResponse, Responder}; use anyhow::Result; use futures::StreamExt; use git_repository::protocol::transport::packetline::{PacketLineRef, StreamingPeekableIter}; use gitarena_macros::route; use sqlx::PgPool; -#[route("/{username}/{repository}.git/git-upload-pack", method = "POST", err = "git")] -pub(crate) async fn git_upload_pack(uri: web::Path, mut body: web::Payload, request: HttpRequest, db_pool: web::Data) -> Result { +#[route( + "/{username}/{repository}.git/git-upload-pack", + method = "POST", + err = "git" +)] +pub(crate) async fn git_upload_pack( + uri: web::Path, + mut body: web::Payload, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let content_type = request.get_header("content-type").unwrap_or_default(); let accept_header = request.get_header("accept").unwrap_or_default(); - if content_type != "application/x-git-upload-pack-request" || accept_header != "application/x-git-upload-pack-result" { + if content_type != "application/x-git-upload-pack-request" + || accept_header != "application/x-git-upload-pack-result" + { die!(BAD_REQUEST); } @@ -33,25 +44,35 @@ pub(crate) async fn git_upload_pack(uri: web::Path, mut body: web::P let mut transaction = db_pool.begin().await?; - let user_option: Option<(i32,)> = sqlx::query_as("select id from users where lower(username) = lower($1) limit 1") - .bind(&uri.username) - .fetch_optional(&mut transaction) - .await?; + let user_option: Option<(i32,)> = + sqlx::query_as("select id from users where lower(username) = lower($1) limit 1") + .bind(&uri.username) + .fetch_optional(&mut transaction) + .await?; let (user_id,) = match user_option { Some(user_id) => user_id, - None => die!(NOT_FOUND) + None => die!(NOT_FOUND), }; - let repo_option: Option = sqlx::query_as::<_, Repository>("select * from repositories where owner = $1 and lower(name) = lower($2) limit 1") - .bind(user_id) - .bind(&uri.repository) - .fetch_optional(&mut transaction) - .await?; - - let (user, repo) = match basic_auth::validate_repo_access(repo_option, "application/x-git-upload-pack-advertisement", &request, &mut transaction).await? { + let repo_option: Option = sqlx::query_as::<_, Repository>( + "select * from repositories where owner = $1 and lower(name) = lower($2) limit 1", + ) + .bind(user_id) + .bind(&uri.repository) + .fetch_optional(&mut transaction) + .await?; + + let (user, repo) = match basic_auth::validate_repo_access( + repo_option, + "application/x-git-upload-pack-advertisement", + &request, + &mut transaction, + ) + .await? + { Either::Left(tuple) => tuple, - Either::Right(response) => return Ok(response) + Either::Right(response) => return Ok(response), }; if !privilege::check_access(&repo, user.as_ref(), &mut transaction).await? { @@ -91,9 +112,11 @@ pub(crate) async fn git_upload_pack(uri: web::Path, mut body: web::P .append_header((CONTENT_TYPE, accept_header)) .body(output) } - _ => HttpResponse::Unauthorized() // According to spec we have to send unauthorized for commands we don't understand + _ => { + HttpResponse::Unauthorized() // According to spec we have to send unauthorized for commands we don't understand .append_header((CONTENT_TYPE, accept_header)) .finish() + } }; transaction.commit().await?; diff --git a/src/routes/repository/git/info_refs.rs b/src/routes/repository/git/info_refs.rs index bdaa894..6831f59 100644 --- a/src/routes/repository/git/info_refs.rs +++ b/src/routes/repository/git/info_refs.rs @@ -7,41 +7,49 @@ use crate::repository::Repository; use crate::routes::repository::GitRequest; use actix_web::http::header::CONTENT_TYPE; -use actix_web::{Either, HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, Either, HttpRequest, HttpResponse, Responder}; use anyhow::Result; use gitarena_macros::route; use sqlx::{Executor, PgPool, Pool, Postgres}; #[route("/{username}/{repository}.git/info/refs", method = "GET", err = "text")] -pub(crate) async fn info_refs(uri: web::Path, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn info_refs( + uri: web::Path, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let query_string = request.q_string(); let service = match query_string.get("service") { Some(value) => value.trim(), - None => die!(BAD_REQUEST, "Dumb clients are not supported") + None => die!(BAD_REQUEST, "Dumb clients are not supported"), }; let mut transaction = db_pool.begin().await?; - let user_option: Option<(i32,)> = sqlx::query_as("select id from users where lower(username) = lower($1) limit 1") - .bind(&uri.username) - .fetch_optional(&mut transaction) - .await?; + let user_option: Option<(i32,)> = + sqlx::query_as("select id from users where lower(username) = lower($1) limit 1") + .bind(&uri.username) + .fetch_optional(&mut transaction) + .await?; let (user_id,) = match user_option { Some(user_id) => user_id, - None => die!(NOT_FOUND) + None => die!(NOT_FOUND), }; - let repo_option: Option = sqlx::query_as::<_, Repository>("select * from repositories where owner = $1 and lower(name) = lower($2) limit 1") - .bind(user_id) - .bind(&uri.repository) - .fetch_optional(&mut transaction) - .await?; + let repo_option: Option = sqlx::query_as::<_, Repository>( + "select * from repositories where owner = $1 and lower(name) = lower($2) limit 1", + ) + .bind(user_id) + .bind(&uri.repository) + .fetch_optional(&mut transaction) + .await?; match service { "git-upload-pack" => { - let response = upload_pack_info_refs(repo_option, service, &request, &mut transaction).await?; + let response = + upload_pack_info_refs(repo_option, service, &request, &mut transaction).await?; transaction.commit().await?; Ok(response) @@ -52,12 +60,18 @@ pub(crate) async fn info_refs(uri: web::Path, request: HttpRequest, Ok(response) } - _ => die!(FORBIDDEN, "Requested service not found") + _ => die!(FORBIDDEN, "Requested service not found"), } } -async fn upload_pack_info_refs<'e, E>(repo_option: Option, service: &str, request: &HttpRequest, executor: E) -> Result - where E: Executor<'e, Database = Postgres> +async fn upload_pack_info_refs<'e, E>( + repo_option: Option, + service: &str, + request: &HttpRequest, + executor: E, +) -> Result +where + E: Executor<'e, Database = Postgres>, { let git_protocol = request.get_header("git-protocol").unwrap_or_default(); @@ -65,9 +79,16 @@ async fn upload_pack_info_refs<'e, E>(repo_option: Option, service: die!(BAD_REQUEST, "Unsupported Git protocol version"); } - let (_, _) = match basic_auth::validate_repo_access(repo_option, "application/x-git-upload-pack-advertisement", request, executor).await? { + let (_, _) = match basic_auth::validate_repo_access( + repo_option, + "application/x-git-upload-pack-advertisement", + request, + executor, + ) + .await? + { Either::Left(tuple) => tuple, - Either::Right(response) => return Ok(response) + Either::Right(response) => return Ok(response), }; Ok(HttpResponse::Ok() @@ -75,19 +96,29 @@ async fn upload_pack_info_refs<'e, E>(repo_option: Option, service: .body(capabilities(service).await?)) } -async fn receive_pack_info_refs(repo_option: Option, request: &HttpRequest, db_pool: &Pool) -> Result { +async fn receive_pack_info_refs( + repo_option: Option, + request: &HttpRequest, + db_pool: &Pool, +) -> Result { let mut transaction = db_pool.begin().await?; - let _user = match basic_auth::login_flow(request, &mut transaction, "application/x-git-receive-pack-advertisement").await? { + let _user = match basic_auth::login_flow( + request, + &mut transaction, + "application/x-git-receive-pack-advertisement", + ) + .await? + { Either::Left(user) => user, - Either::Right(response) => return Ok(response) + Either::Right(response) => return Ok(response), }; // TODO: Check if the user has actually `write` access to the repository let repo = match repo_option { Some(repo) => repo, - None => die!(NOT_FOUND) + None => die!(NOT_FOUND), }; let git2repo = repo.libgit2(&mut transaction).await?; diff --git a/src/routes/repository/import.rs b/src/routes/repository/import.rs index fd0351d..68d4428 100644 --- a/src/routes/repository/import.rs +++ b/src/routes/repository/import.rs @@ -1,9 +1,9 @@ use crate::config::get_setting; use crate::prelude::ContextExtensions; use crate::user::WebUser; -use crate::{die, Ipc, render_template}; +use crate::{die, render_template, Ipc}; -use actix_web::{Responder, web}; +use actix_web::{web, Responder}; use anyhow::Result; use futures_locks::RwLock; use gitarena_macros::route; @@ -11,11 +11,16 @@ use sqlx::PgPool; use tera::Context; #[route("/new/import", method = "GET", err = "html")] -pub(crate) async fn import_repo(web_user: WebUser, ipc: web::Data>, db_pool: web::Data) -> Result { +pub(crate) async fn import_repo( + web_user: WebUser, + ipc: web::Data>, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; - let enabled = get_setting::("repositories.importing_enabled", &mut transaction).await?; + let enabled = + get_setting::("repositories.importing_enabled", &mut transaction).await?; if !enabled || !ipc.read().await.is_connected() { die!(NOT_IMPLEMENTED, "Importing is disabled on this instance"); diff --git a/src/routes/repository/issues.rs b/src/routes/repository/issues.rs index 51597b8..b7488c5 100644 --- a/src/routes/repository/issues.rs +++ b/src/routes/repository/issues.rs @@ -6,7 +6,7 @@ use crate::user::WebUser; use std::collections::HashMap; -use actix_web::{HttpMessage, HttpRequest, Responder, web}; +use actix_web::{web, HttpMessage, HttpRequest, Responder}; use anyhow::{anyhow, Result}; use gitarena_macros::route; use itertools::Itertools; @@ -14,29 +14,44 @@ use sqlx::PgPool; use tera::Context; #[route("/{username}/{repository}/issues", method = "GET", err = "html")] -pub(crate) async fn all_issues(repo: Repository, web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn all_issues( + repo: Repository, + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; - let confidential = if web_user.as_ref().map_or_else(|| false, |user| user.id == repo.owner) { + let confidential = if web_user + .as_ref() + .map_or_else(|| false, |user| user.id == repo.owner) + { "1 = 1" } else { "confidential = false" }; - let issues = sqlx::query_as::<_, Issue>(format!("select * from issues where repo = $1 and {} order by id desc", confidential).as_str()) - .bind(&repo.id) - .fetch_all(&mut transaction) - .await?; + let issues = sqlx::query_as::<_, Issue>( + format!( + "select * from issues where repo = $1 and {} order by id desc", + confidential + ) + .as_str(), + ) + .bind(&repo.id) + .fetch_all(&mut transaction) + .await?; // This is really ugly and needs to be changed // TODO: Is there a way to map the original Issue struct to include these infos? let mut usernames = HashMap::new(); for issue in issues.iter() { - let (username,): (String,) = sqlx::query_as("select username from users where id = $1 limit 1") - .bind(&issue.author) - .fetch_one(&mut transaction) - .await?; + let (username,): (String,) = + sqlx::query_as("select username from users where id = $1 limit 1") + .bind(&issue.author) + .fetch_one(&mut transaction) + .await?; usernames.insert(format!("u{}", issue.author), username); @@ -63,7 +78,9 @@ pub(crate) async fn all_issues(repo: Repository, web_user: WebUser, request: Htt context.try_insert("repo", &repo)?; let extensions = request.extensions(); - let repo_owner = extensions.get::().ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; + let repo_owner = extensions + .get::() + .ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; context.try_insert("repo_owner_name", &repo_owner.0)?; context.try_insert("issues", &issues)?; diff --git a/src/routes/repository/mod.rs b/src/routes/repository/mod.rs index 7f5f9af..a5188ac 100644 --- a/src/routes/repository/mod.rs +++ b/src/routes/repository/mod.rs @@ -5,8 +5,8 @@ mod api; mod archive; mod blobs; mod commits; -mod import; mod git; +mod import; mod issues; mod repo_create; mod repo_view; @@ -29,12 +29,12 @@ pub(crate) fn init(config: &mut ServiceConfig) { #[derive(Deserialize)] pub(crate) struct GitRequest { pub(crate) username: String, - pub(crate) repository: String + pub(crate) repository: String, } #[derive(Deserialize)] pub(crate) struct GitTreeRequest { pub(crate) username: String, pub(crate) repository: String, - pub(crate) tree: String + pub(crate) tree: String, } diff --git a/src/routes/repository/repo_create.rs b/src/routes/repository/repo_create.rs index 2987012..9c835ac 100644 --- a/src/routes/repository/repo_create.rs +++ b/src/routes/repository/repo_create.rs @@ -1,8 +1,8 @@ use crate::prelude::ContextExtensions; -use crate::{Ipc, render_template}; use crate::user::WebUser; +use crate::{render_template, Ipc}; -use actix_web::{Responder, web}; +use actix_web::{web, Responder}; use anyhow::Result; use futures_locks::RwLock; use gitarena_macros::route; @@ -10,7 +10,11 @@ use sqlx::PgPool; use tera::Context; #[route("/new", method = "GET", err = "html")] -pub(crate) async fn new_repo(web_user: WebUser, ipc: web::Data>, db_pool: web::Data) -> Result { +pub(crate) async fn new_repo( + web_user: WebUser, + ipc: web::Data>, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; diff --git a/src/routes/repository/repo_view.rs b/src/routes/repository/repo_view.rs index e14592a..7a06f32 100644 --- a/src/routes/repository/repo_view.rs +++ b/src/routes/repository/repo_view.rs @@ -1,6 +1,8 @@ -use crate::git::GIT_HASH_KIND; -use crate::git::history::{all_branches, all_commits, all_tags, last_commit_for_blob, last_commit_for_ref}; +use crate::git::history::{ + all_branches, all_commits, all_tags, last_commit_for_blob, last_commit_for_ref, +}; use crate::git::utils::{read_blob_content, repo_files_at_ref}; +use crate::git::GIT_HASH_KIND; use crate::prelude::{ContextExtensions, LibGit2SignatureExtensions}; use crate::repository::{RepoOwner, Repository}; use crate::routes::repository::GitTreeRequest; @@ -10,7 +12,7 @@ use crate::{die, err, render_template}; use std::cmp::Ordering; -use actix_web::{HttpMessage, HttpRequest, Responder, web}; +use actix_web::{web, HttpMessage, HttpRequest, Responder}; use anyhow::{anyhow, Result}; use bstr::ByteSlice; use git_repository::hash::ObjectId; @@ -22,7 +24,13 @@ use sqlx::{PgPool, Postgres, Transaction}; use tera::Context; use tracing_unwrap::OptionExt; -async fn render(tree_option: Option<&str>, repo: Repository, username: &str, web_user: WebUser, mut transaction: Transaction<'_, Postgres>) -> Result { +async fn render( + tree_option: Option<&str>, + repo: Repository, + username: &str, + web_user: WebUser, + mut transaction: Transaction<'_, Postgres>, +) -> Result { let tree_name = tree_option.unwrap_or(repo.default_branch.as_str()); let mut context = Context::new(); @@ -30,10 +38,12 @@ async fn render(tree_option: Option<&str>, repo: Repository, username: &str, web let libgit2_repo = repo.libgit2(&mut transaction).await?; let gitoxide_repo = repo.gitoxide(&mut transaction).await?; - let (issues_count,): (i64,) = sqlx::query_as("select count(*) from issues where repo = $1 and closed = false and confidential = false") - .bind(&repo.id) - .fetch_one(&mut transaction) - .await?; + let (issues_count,): (i64,) = sqlx::query_as( + "select count(*) from issues where repo = $1 and closed = false and confidential = false", + ) + .bind(&repo.id) + .fetch_one(&mut transaction) + .await?; context.try_insert("repo", &repo)?; context.try_insert("repo_owner_name", &username)?; @@ -76,11 +86,17 @@ async fn render(tree_option: Option<&str>, repo: Repository, username: &str, web for entry in tree.entries.iter().take(1000) { let name = entry.filename.to_str().unwrap_or("Invalid file name"); - let oid = last_commit_for_blob(&libgit2_repo, full_tree_name, name).await?.unwrap_or_log(); + let oid = last_commit_for_blob(&libgit2_repo, full_tree_name, name) + .await? + .unwrap_or_log(); let commit = libgit2_repo.find_commit(oid)?; let submodule_target_oid = if matches!(entry.mode, EntryMode::Commit) { - Some(read_blob_content(entry.oid.as_ref(), store.clone()).await.unwrap_or_else(|_| ObjectId::null(GIT_HASH_KIND).to_string())) + Some( + read_blob_content(entry.oid.as_ref(), store.clone()) + .await + .unwrap_or_else(|_| ObjectId::null(GIT_HASH_KIND).to_string()), + ) } else { None }; @@ -94,10 +110,10 @@ async fn render(tree_option: Option<&str>, repo: Repository, username: &str, web message: commit.message().unwrap_or_default().to_owned(), time: commit.time().seconds(), date: None, - author_name: String::new(), // Unused for file listing - author_uid: None, // Unused for file listing - author_email: String::new() // Unused for file listing - } + author_name: String::new(), // Unused for file listing + author_uid: None, // Unused for file listing + author_email: String::new(), // Unused for file listing + }, }); } @@ -108,13 +124,19 @@ async fn render(tree_option: Option<&str>, repo: Repository, username: &str, web if lhs.file_type == EntryMode::Tree as u16 && rhs.file_type != EntryMode::Tree as u16 { Ordering::Less - } else if lhs.file_type != EntryMode::Tree as u16 && rhs.file_type == EntryMode::Tree as u16 { + } else if lhs.file_type != EntryMode::Tree as u16 && rhs.file_type == EntryMode::Tree as u16 + { Ordering::Greater - } else if lhs.file_type == EntryMode::Tree as u16 && rhs.file_type == EntryMode::Tree as u16 { + } else if lhs.file_type == EntryMode::Tree as u16 && rhs.file_type == EntryMode::Tree as u16 + { lhs.file_name.cmp(rhs.file_name) - } else if lhs.file_type == EntryMode::Commit as u16 && rhs.file_type != EntryMode::Commit as u16 { + } else if lhs.file_type == EntryMode::Commit as u16 + && rhs.file_type != EntryMode::Commit as u16 + { Ordering::Less - } else if lhs.file_type != EntryMode::Commit as u16 && rhs.file_type == EntryMode::Commit as u16 { + } else if lhs.file_type != EntryMode::Commit as u16 + && rhs.file_type == EntryMode::Commit as u16 + { Ordering::Greater } else { lhs.file_name.cmp(rhs.file_name) @@ -138,39 +160,71 @@ async fn render(tree_option: Option<&str>, repo: Repository, username: &str, web } context.try_insert("files", &files)?; - context.try_insert("commits_count", &all_commits(&libgit2_repo, full_tree_name, 0).await?.len())?; - - let last_commit_oid = last_commit_for_ref(&libgit2_repo, full_tree_name).await?.ok_or_else(|| err!(OK, "Repository is empty"))?; + context.try_insert( + "commits_count", + &all_commits(&libgit2_repo, full_tree_name, 0).await?.len(), + )?; + + let last_commit_oid = last_commit_for_ref(&libgit2_repo, full_tree_name) + .await? + .ok_or_else(|| err!(OK, "Repository is empty"))?; let last_commit = libgit2_repo.find_commit(last_commit_oid)?; // TODO: Additionally show last_commit.committer and if doesn't match with author - let (author_name, author_uid, author_email) = last_commit.author().try_disassemble(&mut transaction).await; - - context.try_insert("last_commit", &GitCommit { - oid: format!("{}", last_commit_oid), - message: last_commit.message().unwrap_or_default().to_owned(), - time: last_commit.time().seconds(), - date: None, - author_name, - author_uid, - author_email - })?; + let (author_name, author_uid, author_email) = + last_commit.author().try_disassemble(&mut transaction).await; + + context.try_insert( + "last_commit", + &GitCommit { + oid: format!("{}", last_commit_oid), + message: last_commit.message().unwrap_or_default().to_owned(), + time: last_commit.time().seconds(), + date: None, + author_name, + author_uid, + author_email, + }, + )?; render_template!("repo/index.html", context, transaction) } -#[route("/{username}/{repository}/tree/{tree:.*}", method = "GET", err = "html")] -pub(crate) async fn view_repo_tree(repo: Repository, uri: web::Path, web_user: WebUser, db_pool: web::Data) -> Result { +#[route( + "/{username}/{repository}/tree/{tree:.*}", + method = "GET", + err = "html" +)] +pub(crate) async fn view_repo_tree( + repo: Repository, + uri: web::Path, + web_user: WebUser, + db_pool: web::Data, +) -> Result { let transaction = db_pool.begin().await?; - render(Some(uri.tree.as_str()), repo, &uri.username, web_user, transaction).await + render( + Some(uri.tree.as_str()), + repo, + &uri.username, + web_user, + transaction, + ) + .await } #[route("/{username}/{repository}", method = "GET", err = "html")] -pub(crate) async fn view_repo(repo: Repository, web_user: WebUser, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn view_repo( + repo: Repository, + web_user: WebUser, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let transaction = db_pool.begin().await?; let extensions = request.extensions(); - let repo_owner = extensions.get::().ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; + let repo_owner = extensions + .get::() + .ok_or_else(|| anyhow!("Failed to lookup repo owner"))?; render(None, repo, &repo_owner.0, web_user, transaction).await } diff --git a/src/routes/user/api/add_key.rs b/src/routes/user/api/add_key.rs index f918f59..e16ad3f 100644 --- a/src/routes/user/api/add_key.rs +++ b/src/routes/user/api/add_key.rs @@ -2,7 +2,7 @@ use crate::ssh::SshKey; use crate::user::WebUser; use crate::{die, err}; -use actix_web::{HttpResponse, Responder, web}; +use actix_web::{web, HttpResponse, Responder}; use anyhow::Context; use anyhow::Result; use chrono::serde::ts_seconds_option; @@ -15,7 +15,11 @@ use serde::{Deserialize, Serialize}; use sqlx::PgPool; #[route("/api/ssh-key", method = "PUT", err = "json")] -pub(crate) async fn put_ssh_key(body: web::Json, web_user: WebUser, db_pool: web::Data) -> Result { +pub(crate) async fn put_ssh_key( + body: web::Json, + web_user: WebUser, + db_pool: web::Data, +) -> Result { let user = web_user.into_user()?; let mut transaction = db_pool.begin().await?; @@ -23,8 +27,10 @@ pub(crate) async fn put_ssh_key(body: web::Json, web_user: We die!(BAD_REQUEST, "Key is not a valid argument"); } - let public_key = PublicKey::parse(body.key.as_str()).context("Failed to parse SSH public key")?; - let algorithm = KeyType::try_from(public_key.keytype()).map_err(|_| err!(BAD_REQUEST, "Invalid or unsupported key type"))?; + let public_key = + PublicKey::parse(body.key.as_str()).context("Failed to parse SSH public key")?; + let algorithm = KeyType::try_from(public_key.keytype()) + .map_err(|_| err!(BAD_REQUEST, "Invalid or unsupported key type"))?; let key_title = if !body.title.is_empty() { &body.title @@ -37,14 +43,22 @@ pub(crate) async fn put_ssh_key(body: web::Json, web_user: We let fingerprint = public_key.fingerprint_md5(); if fingerprint.len() != 47 { - warn!("Calculated md5 fingerprint is not acceptable: {} (expected 47 characters, got {})", &fingerprint, fingerprint.len()); - die!(UNPROCESSABLE_ENTITY, "Calculated md5 fingerprint did not end up being 47 characters long"); + warn!( + "Calculated md5 fingerprint is not acceptable: {} (expected 47 characters, got {})", + &fingerprint, + fingerprint.len() + ); + die!( + UNPROCESSABLE_ENTITY, + "Calculated md5 fingerprint did not end up being 47 characters long" + ); } - let (exists,): (bool,) = sqlx::query_as("select exists(select 1 from ssh_keys where fingerprint = $1 limit 1)") - .bind(fingerprint.as_str()) - .fetch_one(&mut transaction) - .await?; + let (exists,): (bool,) = + sqlx::query_as("select exists(select 1 from ssh_keys where fingerprint = $1 limit 1)") + .bind(fingerprint.as_str()) + .fetch_one(&mut transaction) + .await?; if exists { die!(CONFLICT, "SSH key already exists"); @@ -62,11 +76,17 @@ pub(crate) async fn put_ssh_key(body: web::Json, web_user: We transaction.commit().await?; - debug!("New SSH key added for user {}: {} (fingerprint: {} id {})", &user.id, key_title, fingerprint.as_str(), &key.id); + debug!( + "New SSH key added for user {}: {} (fingerprint: {} id {})", + &user.id, + key_title, + fingerprint.as_str(), + &key.id + ); Ok(HttpResponse::Created().json(AddKeyJsonResponse { id: key.id, - fingerprint + fingerprint, })) } @@ -75,11 +95,11 @@ pub(crate) struct AddKeyJsonRequest { title: String, key: String, #[serde(default, with = "ts_seconds_option")] - expiration_date: Option> + expiration_date: Option>, } #[derive(Serialize)] pub(crate) struct AddKeyJsonResponse { id: i32, - fingerprint: String + fingerprint: String, } diff --git a/src/routes/user/avatar.rs b/src/routes/user/avatar.rs index e88f375..d815bb5 100644 --- a/src/routes/user/avatar.rs +++ b/src/routes/user/avatar.rs @@ -10,10 +10,10 @@ use std::time::SystemTime; use actix_multipart::Multipart; use actix_web::http::header::{CACHE_CONTROL, LAST_MODIFIED}; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::{Context, Result}; -use awc::Client; use awc::http::header::IF_MODIFIED_SINCE; +use awc::Client; use chrono::{Duration, NaiveDateTime}; use futures::TryStreamExt; use gitarena_macros::{from_config, route}; @@ -22,7 +22,11 @@ use serde::Deserialize; use sqlx::PgPool; #[route("/api/avatar/{user_id}", method = "GET", err = "text")] -pub(crate) async fn get_avatar(avatar_request: web::Path, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn get_avatar( + avatar_request: web::Path, + request: HttpRequest, + db_pool: web::Data, +) -> Result { let (gravatar_enabled, avatars_dir): (bool, String) = from_config!( "avatars.gravatar" => bool, "avatars.dir" => String @@ -36,7 +40,9 @@ pub(crate) async fn get_avatar(avatar_request: web::Path, request // User has set an avatar, return it if path.is_file() { - return send_image(path, &request).await.context("Failed to read local image file"); + return send_image(path, &request) + .await + .context("Failed to read local image file"); } } @@ -53,7 +59,9 @@ pub(crate) async fn get_avatar(avatar_request: web::Path, request .email }; - return send_gravatar(email.as_str(), &request).await.context("Failed to request Gravatar image"); + return send_gravatar(email.as_str(), &request) + .await + .context("Failed to request Gravatar image"); } // Gravatar integration is not enabled, return fallback icon @@ -62,11 +70,17 @@ pub(crate) async fn get_avatar(avatar_request: web::Path, request let path_str = format!("{}/default.jpg", avatars_dir); let path = Path::new(path_str.as_str()); - Ok(send_image(path, &request).await.context("Failed to read default avatar file")?) + Ok(send_image(path, &request) + .await + .context("Failed to read default avatar file")?) } #[route("/api/avatar", method = "PUT", err = "text")] -pub(crate) async fn put_avatar(web_user: WebUser, mut payload: Multipart, db_pool: web::Data) -> Result { +pub(crate) async fn put_avatar( + web_user: WebUser, + mut payload: Multipart, + db_pool: web::Data, +) -> Result { if matches!(web_user, WebUser::Anonymous) { die!(UNAUTHORIZED, "No logged in"); } @@ -82,25 +96,33 @@ pub(crate) async fn put_avatar(web_user: WebUser, mut payload: Multipart, db_poo let mut field = match payload.try_next().await { Ok(Some(field)) => field, Ok(None) => die!(BAD_REQUEST, "No multipart field found"), - Err(err) => return Err(err.into()) + Err(err) => return Err(err.into()), }; let content_disposition = field.content_disposition(); - let file_name = content_disposition.get_filename().ok_or_else(|| err!(BAD_REQUEST, "No file name"))?; - let extension = file_name.rsplit_once('.') + let file_name = content_disposition + .get_filename() + .ok_or_else(|| err!(BAD_REQUEST, "No file name"))?; + let extension = file_name + .rsplit_once('.') .map(|(_, ext)| ext.to_owned()) .ok_or_else(|| err!(BAD_REQUEST, "Invalid file name"))?; let mut bytes = web::BytesMut::new(); - while let Some(chunk) = field.try_next().await.context("Failed to read multipart data chunk")? { + while let Some(chunk) = field + .try_next() + .await + .context("Failed to read multipart data chunk")? + { bytes.extend_from_slice(chunk.as_ref()); } let frozen_bytes = bytes.freeze(); web::block(move || -> Result<()> { - let format = ImageFormat::from_extension(extension).ok_or_else(|| err!(BAD_REQUEST, "Unsupported image format"))?; + let format = ImageFormat::from_extension(extension) + .ok_or_else(|| err!(BAD_REQUEST, "Unsupported image format"))?; let mut cursor = Cursor::new(frozen_bytes.as_ref()); @@ -113,7 +135,10 @@ pub(crate) async fn put_avatar(web_user: WebUser, mut payload: Multipart, db_poo img.save_with_format(path, ImageFormat::Jpeg)?; Ok(()) - }).await.context("Failed to save image")?.context("Failed to save image")?; + }) + .await + .context("Failed to save image")? + .context("Failed to save image")?; Ok(HttpResponse::Created().finish()) } @@ -128,19 +153,27 @@ async fn send_image>(path: P, request: &HttpRequest) -> Result Duration::seconds(0) { - return Ok(HttpResponse::NotModified().append_header((LAST_MODIFIED, format)).finish()); + return Ok(HttpResponse::NotModified() + .append_header((LAST_MODIFIED, format)) + .finish()); } } @@ -156,7 +189,10 @@ async fn send_image>(path: P, request: &HttpRequest) -> Result Result { let md5hash = md5::compute(email); - let url = format!("https://www.gravatar.com/avatar/{:x}?s=500&r=pg&d=identicon", md5hash); + let url = format!( + "https://www.gravatar.com/avatar/{:x}?s=500&r=pg&d=identicon", + md5hash + ); let mut client = Client::gitarena().get(url); @@ -164,7 +200,10 @@ async fn send_gravatar(email: &str, request: &HttpRequest) -> Result Result, web_user: WebUser, db_pool: web::Data) -> Result { +pub(crate) async fn initiate_sso( + sso_request: web::Path, + web_user: WebUser, + db_pool: web::Data, +) -> Result { if matches!(web_user, WebUser::Authenticated(_)) { die!(UNAUTHORIZED, "Already logged in"); } @@ -31,13 +35,21 @@ pub(crate) async fn initiate_sso(sso_request: web::Path, web_user: W let provider_impl = provider.get_implementation(); // TODO: Save token in cache to check for CSRF - let (url, _token) = SSOProvider::generate_auth_url(provider_impl.deref(), &provider, &db_pool).await?; + let (url, _token) = + SSOProvider::generate_auth_url(provider_impl.deref(), &provider, &db_pool).await?; - Ok(HttpResponse::TemporaryRedirect().append_header((LOCATION, url.to_string())).finish()) + Ok(HttpResponse::TemporaryRedirect() + .append_header((LOCATION, url.to_string())) + .finish()) } #[route("/sso/{service}/callback", method = "GET", err = "html")] -pub(crate) async fn sso_callback(sso_request: web::Path, id: Identity, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn sso_callback( + sso_request: web::Path, + id: Identity, + request: HttpRequest, + db_pool: web::Data, +) -> Result { if id.identity().is_some() { die!(UNAUTHORIZED, "Already logged in"); } @@ -47,7 +59,9 @@ pub(crate) async fn sso_callback(sso_request: web::Path, id: Identit let provider_impl = provider.get_implementation(); let query_string = request.q_string(); - let token_response = SSOProvider::exchange_response(provider_impl.deref(), &query_string, &provider, &db_pool).await?; + let token_response = + SSOProvider::exchange_response(provider_impl.deref(), &query_string, &provider, &db_pool) + .await?; if !SSOProvider::validate_scopes(provider_impl.deref(), token_response.scopes()) { die!(CONFLICT, "Not all required scopes have been granted"); @@ -60,11 +74,13 @@ pub(crate) async fn sso_callback(sso_request: web::Path, id: Identit let provider_id = SSOProvider::get_provider_id(provider_impl.deref(), token.as_str()).await?; - let sso: Option = sqlx::query_as::<_, SSO>("select * from sso where provider = $1 and provider_id = $2 limit 1") - .bind(&provider) - .bind(provider_id.as_str()) - .fetch_optional(&mut transaction) - .await?; + let sso: Option = sqlx::query_as::<_, SSO>( + "select * from sso where provider = $1 and provider_id = $2 limit 1", + ) + .bind(&provider) + .bind(provider_id.as_str()) + .fetch_optional(&mut transaction) + .await?; let user = match sso { Some(sso) => { @@ -73,7 +89,7 @@ pub(crate) async fn sso_callback(sso_request: web::Path, id: Identit .bind(&sso.user_id) .fetch_one(&mut transaction) .await? - }, + } None => { // User link does not exist -> Create new user SSOProvider::create_user(provider_impl.deref(), token.as_str(), &db_pool) @@ -87,9 +103,15 @@ pub(crate) async fn sso_callback(sso_request: web::Path, id: Identit .ok_or_else(|| err!(UNAUTHORIZED, "No primary email"))?; if user.disabled || !primary_email.is_allowed_login() { - debug!("Received {} sso login request for disabled user {} (id {})", &provider, &user.username, &user.id); - - die!(FORBIDDEN, "Account has been disabled. Please contact support."); + debug!( + "Received {} sso login request for disabled user {} (id {})", + &provider, &user.username, &user.id + ); + + die!( + FORBIDDEN, + "Account has been disabled. Please contact support." + ); } // We're now doing something *very* illegal: We're changing state in a GET request @@ -99,14 +121,19 @@ pub(crate) async fn sso_callback(sso_request: web::Path, id: Identit let session = Session::new(&request, &user, &mut transaction).await?; id.remember(session.to_string()); - debug!("{} (id {}) logged in successfully using {} sso", &user.username, &user.id, &provider); + debug!( + "{} (id {}) logged in successfully using {} sso", + &user.username, &user.id, &provider + ); transaction.commit().await?; - Ok(HttpResponse::Found().append_header((LOCATION, "/")).finish()) + Ok(HttpResponse::Found() + .append_header((LOCATION, "/")) + .finish()) } #[derive(Deserialize)] pub(crate) struct SSORequest { - service: String + service: String, } diff --git a/src/routes/user/user_create.rs b/src/routes/user/user_create.rs index 5f9a609..a659626 100644 --- a/src/routes/user/user_create.rs +++ b/src/routes/user/user_create.rs @@ -7,7 +7,7 @@ use crate::verification::send_verification_mail; use crate::{captcha, crypto, die, render_template}; use actix_identity::Identity; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::Result; use gitarena_macros::route; use log::info; @@ -16,7 +16,10 @@ use sqlx::PgPool; use tera::Context; #[route("/register", method = "GET", err = "html")] -pub(crate) async fn get_register(web_user: WebUser, db_pool: web::Data) -> Result { +pub(crate) async fn get_register( + web_user: WebUser, + db_pool: web::Data, +) -> Result { let mut transaction = db_pool.begin().await?; if matches!(web_user, WebUser::Authenticated(_)) { @@ -29,7 +32,9 @@ pub(crate) async fn get_register(web_user: WebUser, db_pool: web::Data) die!(FORBIDDEN, "User registrations are disabled"); } - if let Some(site_key) = get_optional_setting::("hcaptcha.site_key", &mut transaction).await? { + if let Some(site_key) = + get_optional_setting::("hcaptcha.site_key", &mut transaction).await? + { context.try_insert("hcaptcha_site_key", &site_key)?; } @@ -37,7 +42,12 @@ pub(crate) async fn get_register(web_user: WebUser, db_pool: web::Data) } #[route("/api/user", method = "POST", err = "htmx+html")] -pub(crate) async fn post_register(body: web::Json, id: Identity, request: HttpRequest, db_pool: web::Data) -> Result { +pub(crate) async fn post_register( + body: web::Json, + id: Identity, + request: HttpRequest, + db_pool: web::Data, +) -> Result { if id.identity().is_some() { // Maybe just redirect to home page? die!(UNAUTHORIZED, "Already logged in"); @@ -61,14 +71,22 @@ pub(crate) async fn post_register(body: web::Json, id: Iden // This is not according to the spec of the IETF but trying to implement that is honestly out-of-bounds for this project // Thus a best effort naive implementation. Checks for the presence of "@" and a "." in the domain name (after the last @) - if !email.contains('@') || !email.rsplit_once('@').map(|(_, x)| x).unwrap_or_default().contains('.') { + if !email.contains('@') + || !email + .rsplit_once('@') + .map(|(_, x)| x) + .unwrap_or_default() + .contains('.') + { die!(BAD_REQUEST, "Invalid email address"); } - let (email_exists,): (bool,) = sqlx::query_as("select exists(select 1 from emails where lower(email) = lower($1) limit 1)") - .bind(email) - .fetch_one(&mut transaction) - .await?; + let (email_exists,): (bool,) = sqlx::query_as( + "select exists(select 1 from emails where lower(email) = lower($1) limit 1)", + ) + .bind(email) + .fetch_one(&mut transaction) + .await?; if email_exists { die!(CONFLICT, "Email already in use"); @@ -84,9 +102,13 @@ pub(crate) async fn post_register(body: web::Json, id: Iden let password = crypto::hash_password(raw_password)?; - if get_optional_setting::("hcaptcha.site_key", &mut transaction).await?.is_some() { + if get_optional_setting::("hcaptcha.site_key", &mut transaction) + .await? + .is_some() + { if let Some(h_captcha_response) = &body.h_captcha_response { - let captcha_success = captcha::verify_captcha(h_captcha_response, &mut transaction).await?; + let captcha_success = + captcha::verify_captcha(h_captcha_response, &mut transaction).await?; if !captcha_success { die!(UNPROCESSABLE_ENTITY, "Captcha verification failed"); @@ -96,11 +118,13 @@ pub(crate) async fn post_register(body: web::Json, id: Iden } } - let user: User = sqlx::query_as::<_, User>("insert into users (username, password) values ($1, $2) returning *") - .bind(username) - .bind(&password) - .fetch_one(&mut transaction) - .await?; + let user: User = sqlx::query_as::<_, User>( + "insert into users (username, password) values ($1, $2) returning *", + ) + .bind(username) + .bind(&password) + .fetch_one(&mut transaction) + .await?; sqlx::query("insert into emails (owner, email, \"primary\", commit, notification, public) values ($1, $2, true, true, true, true)") .bind(&user.id) @@ -122,11 +146,14 @@ pub(crate) async fn post_register(body: web::Json, id: Iden info!("New user registered: {} (id {})", &user.username, &user.id); Ok(if request.is_htmx() { - HttpResponse::Ok().append_header(("hx-redirect", "/")).append_header(("hx-refresh", "true")).finish() + HttpResponse::Ok() + .append_header(("hx-redirect", "/")) + .append_header(("hx-refresh", "true")) + .finish() } else { HttpResponse::Ok().json(RegisterJsonResponse { success: true, - id: user.id + id: user.id, }) }) } @@ -137,11 +164,11 @@ pub(crate) struct RegisterJsonRequest { email: String, password: String, #[serde(rename = "h-captcha-response")] - h_captcha_response: Option + h_captcha_response: Option, } #[derive(Serialize)] struct RegisterJsonResponse { success: bool, - id: i32 + id: i32, } diff --git a/src/routes/user/user_login.rs b/src/routes/user/user_login.rs index f7e48f0..02be3be 100644 --- a/src/routes/user/user_login.rs +++ b/src/routes/user/user_login.rs @@ -7,22 +7,30 @@ use crate::{crypto, die, err}; use actix_identity::Identity; use actix_web::http::header::LOCATION; use actix_web::http::StatusCode; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::Result; use gitarena_macros::{from_config, route}; +use log::debug; use serde::Deserialize; use sqlx::PgPool; use tera::Context; use tracing_unwrap::OptionExt; -use log::debug; #[route("/login", method = "GET", err = "html")] -pub(crate) async fn get_login(web_user: WebUser, db_pool: web::Data) -> Result { +pub(crate) async fn get_login( + web_user: WebUser, + db_pool: web::Data, +) -> Result { if matches!(web_user, WebUser::Authenticated(_)) { die!(UNAUTHORIZED, "Already logged in"); } - let (allow_registrations, bitbucket_sso_enabled, github_sso_enabled, gitlab_sso_enabled): (bool, bool, bool, bool) = from_config!( + let (allow_registrations, bitbucket_sso_enabled, github_sso_enabled, gitlab_sso_enabled): ( + bool, + bool, + bool, + bool, + ) = from_config!( "allow_registrations" => bool, "sso.bitbucket.enabled" => bool, "sso.github.enabled" => bool, @@ -40,12 +48,19 @@ pub(crate) async fn get_login(web_user: WebUser, db_pool: web::Data) -> } #[route("/login", method = "POST", err = "html")] -pub(crate) async fn post_login(body: web::Form, request: HttpRequest, id: Identity, db_pool: web::Data) -> Result { +pub(crate) async fn post_login( + body: web::Form, + request: HttpRequest, + id: Identity, + db_pool: web::Data, +) -> Result { let redirect = body.redirect.as_deref().unwrap_or("/"); // User is already logged in if id.identity().is_some() { - return Ok(HttpResponse::Found().append_header((LOCATION, redirect)).finish()); + return Ok(HttpResponse::Found() + .append_header((LOCATION, redirect)) + .finish()); } // TODO: Maybe allow login with email address? @@ -73,32 +88,60 @@ pub(crate) async fn post_login(body: web::Form, request: HttpReque let mut transaction = db_pool.begin().await?; - let option: Option = sqlx::query_as::<_, User>("select * from users where username = $1 limit 1") - .bind(username) - .fetch_optional(&mut transaction) - .await?; + let option: Option = + sqlx::query_as::<_, User>("select * from users where username = $1 limit 1") + .bind(username) + .fetch_optional(&mut transaction) + .await?; if option.is_none() { - debug!("Received login request for non-existent user: {}", &username); + debug!( + "Received login request for non-existent user: {}", + &username + ); context.try_insert("username_error", "Username does not exist")?; - return render_template!(StatusCode::UNAUTHORIZED, "user/login.html", context, transaction); + return render_template!( + StatusCode::UNAUTHORIZED, + "user/login.html", + context, + transaction + ); } let user = option.unwrap_or_log(); if user.password == "sso-login" { - debug!("Received login request for an {} (id {}) despite being registered with SSO", &user.username, &user.id); - - context.try_insert("password_error", "Your account has been registered with SSO. Try using another login method below.")?; - return render_template!(StatusCode::UNAUTHORIZED, "user/login.html", context, transaction); + debug!( + "Received login request for an {} (id {}) despite being registered with SSO", + &user.username, &user.id + ); + + context.try_insert( + "password_error", + "Your account has been registered with SSO. Try using another login method below.", + )?; + return render_template!( + StatusCode::UNAUTHORIZED, + "user/login.html", + context, + transaction + ); } if !crypto::check_password(&user, password)? { - debug!("Received login request with wrong password for {} (id {})", &user.username, &user.id); + debug!( + "Received login request with wrong password for {} (id {})", + &user.username, &user.id + ); context.try_insert("password_error", "Incorrect password")?; - return render_template!(StatusCode::UNAUTHORIZED, "user/login.html", context, transaction); + return render_template!( + StatusCode::UNAUTHORIZED, + "user/login.html", + context, + transaction + ); } let primary_email = Email::find_primary_email(&user, &mut transaction) @@ -106,25 +149,41 @@ pub(crate) async fn post_login(body: web::Form, request: HttpReque .ok_or_else(|| err!(UNAUTHORIZED, "No primary email"))?; if user.disabled || !primary_email.is_allowed_login() { - debug!("Received login request for disabled user {} (id {})", &user.username, &user.id); - - context.try_insert("general_error", "Account has been disabled. Please contact support.")?; - return render_template!(StatusCode::UNAUTHORIZED, "user/login.html", context, transaction); + debug!( + "Received login request for disabled user {} (id {})", + &user.username, &user.id + ); + + context.try_insert( + "general_error", + "Account has been disabled. Please contact support.", + )?; + return render_template!( + StatusCode::UNAUTHORIZED, + "user/login.html", + context, + transaction + ); } let session = Session::new(&request, &user, &mut transaction).await?; id.remember(session.to_string()); - debug!("{} (id {}) logged in successfully", &user.username, &user.id); + debug!( + "{} (id {}) logged in successfully", + &user.username, &user.id + ); transaction.commit().await?; - Ok(HttpResponse::Found().append_header((LOCATION, redirect)).finish()) + Ok(HttpResponse::Found() + .append_header((LOCATION, redirect)) + .finish()) } #[derive(Deserialize)] pub(crate) struct LoginRequest { username: String, password: String, - redirect: Option + redirect: Option, } diff --git a/src/routes/user/user_logout.rs b/src/routes/user/user_logout.rs index 78166c7..3be86f0 100644 --- a/src/routes/user/user_logout.rs +++ b/src/routes/user/user_logout.rs @@ -4,14 +4,18 @@ use crate::session::Session; use actix_identity::Identity; use actix_web::http::header::LOCATION; -use actix_web::{HttpRequest, HttpResponse, Responder, web}; +use actix_web::{web, HttpRequest, HttpResponse, Responder}; use anyhow::Result; use gitarena_macros::route; use log::debug; use sqlx::PgPool; #[route("/logout", method = "POST", err = "htmx+html")] -pub(crate) async fn logout(request: HttpRequest, id: Identity, db_pool: web::Data) -> Result { +pub(crate) async fn logout( + request: HttpRequest, + id: Identity, + db_pool: web::Data, +) -> Result { if id.identity().is_none() { // Maybe just redirect to home page? die!(UNAUTHORIZED, "Already logged out"); @@ -19,7 +23,11 @@ pub(crate) async fn logout(request: HttpRequest, id: Identity, db_pool: web::Dat let mut transaction = db_pool.begin().await?; - if let Some(session) = Session::from_identity(id.identity(), &mut transaction).await.ok().flatten() { + if let Some(session) = Session::from_identity(id.identity(), &mut transaction) + .await + .ok() + .flatten() + { debug!("Destroying a session for user id {}", &session.user_id); session.destroy(&mut transaction).await?; @@ -30,8 +38,13 @@ pub(crate) async fn logout(request: HttpRequest, id: Identity, db_pool: web::Dat transaction.commit().await?; Ok(if request.is_htmx() { - HttpResponse::Ok().append_header(("hx-redirect", "/")).append_header(("hx-refresh", "true")).finish() + HttpResponse::Ok() + .append_header(("hx-redirect", "/")) + .append_header(("hx-refresh", "true")) + .finish() } else { - HttpResponse::Found().append_header((LOCATION, "/")).finish() + HttpResponse::Found() + .append_header((LOCATION, "/")) + .finish() }) } diff --git a/src/routes/user/user_verify.rs b/src/routes/user/user_verify.rs index d497142..4d3aa9f 100644 --- a/src/routes/user/user_verify.rs +++ b/src/routes/user/user_verify.rs @@ -1,6 +1,6 @@ use crate::die; -use actix_web::{Responder, web}; +use actix_web::{web, Responder}; use anyhow::Result; use gitarena_macros::route; use log::info; @@ -10,7 +10,10 @@ use sqlx::PgPool; use tracing_unwrap::OptionExt; #[route("/api/verify/{token}", method = "GET", err = "html")] -pub(crate) async fn verify(verify_request: web::Path, db_pool: web::Data) -> Result { +pub(crate) async fn verify( + verify_request: web::Path, + db_pool: web::Data, +) -> Result { let token = &verify_request.token; if token.len() != 32 || !token.chars().all(|c| c.is_ascii_hexdigit()) { @@ -19,10 +22,12 @@ pub(crate) async fn verify(verify_request: web::Path, db_pool: we let mut transaction = db_pool.begin().await?; - let option: Option<(i32, i32)> = sqlx::query_as("select id, user_id from user_verifications where hash = $1 and expires > now() limit 1") - .bind(&token) - .fetch_optional(&mut transaction) - .await?; + let option: Option<(i32, i32)> = sqlx::query_as( + "select id, user_id from user_verifications where hash = $1 and expires > now() limit 1", + ) + .bind(&token) + .fetch_optional(&mut transaction) + .await?; if option.is_none() { die!(FORBIDDEN, "Token does not exist or has expired"); @@ -52,5 +57,5 @@ pub(crate) async fn verify(verify_request: web::Path, db_pool: we #[derive(Deserialize)] pub(crate) struct VerifyRequest { - token: String + token: String, } diff --git a/src/session.rs b/src/session.rs index 41d685d..4c0a2d5 100644 --- a/src/session.rs +++ b/src/session.rs @@ -23,7 +23,7 @@ pub(crate) struct Session { pub(crate) ip_address: IpNetwork, pub(crate) user_agent: String, // TODO: Move this to a dedicated table to prevent duplicates created_at: DateTime, - pub(crate) updated_at: DateTime + pub(crate) updated_at: DateTime, } impl Display for Session { @@ -33,7 +33,11 @@ impl Display for Session { } impl Session { - pub(crate) async fn new<'e, E: Executor<'e, Database = Postgres>>(request: &HttpRequest, user: &User, executor: E) -> Result { + pub(crate) async fn new<'e, E: Executor<'e, Database = Postgres>>( + request: &HttpRequest, + user: &User, + executor: E, + ) -> Result { let (ip_address, user_agent) = extract_ip_and_ua(request); // Limit user agent to 256 characters: https://stackoverflow.com/questions/654921/how-big-can-a-user-agent-string-get/654992#comment106798172_654992 @@ -50,25 +54,37 @@ impl Session { } /// Finds existing session from Identity (Display of Session) - pub(crate) async fn from_identity<'e, E: Executor<'e, Database = Postgres>>(identity: Option, executor: E) -> Result> { + pub(crate) async fn from_identity<'e, E: Executor<'e, Database = Postgres>>( + identity: Option, + executor: E, + ) -> Result> { match identity { Some(identity) => { - let (user_id_str, hash) = identity.split_once('$').ok_or_else(|| anyhow!("Unable to parse identity"))?; + let (user_id_str, hash) = identity + .split_once('$') + .ok_or_else(|| anyhow!("Unable to parse identity"))?; let user_id = user_id_str.parse::()?; - let option: Option = sqlx::query_as::<_, Session>("select * from sessions where user_id = $1 and hash = $2 limit 1") - .bind(user_id) - .bind(hash) - .fetch_optional(executor) - .await?; + let option: Option = sqlx::query_as::<_, Session>( + "select * from sessions where user_id = $1 and hash = $2 limit 1", + ) + .bind(user_id) + .bind(hash) + .fetch_optional(executor) + .await?; Ok(option) } - None => Ok(None) + None => Ok(None), } } - pub(crate) async fn update_explicit<'e, E: Executor<'e, Database = Postgres>>(&self, ip_address: &IpNetwork, user_agent: &str, executor: E) -> Result<()> { + pub(crate) async fn update_explicit<'e, E: Executor<'e, Database = Postgres>>( + &self, + ip_address: &IpNetwork, + user_agent: &str, + executor: E, + ) -> Result<()> { let now = Local::now(); // Limit user agent to 256 characters: https://stackoverflow.com/questions/654921/how-big-can-a-user-agent-string-get/654992#comment106798172_654992 @@ -87,14 +103,22 @@ impl Session { } #[allow(dead_code)] - pub(crate) async fn update_from_request<'e, E: Executor<'e, Database = Postgres>>(&self, request: &HttpRequest, executor: E) -> Result<()> { + pub(crate) async fn update_from_request<'e, E: Executor<'e, Database = Postgres>>( + &self, + request: &HttpRequest, + executor: E, + ) -> Result<()> { let (ip_address, user_agent) = extract_ip_and_ua(request); - self.update_explicit(&ip_address, user_agent, executor).await + self.update_explicit(&ip_address, user_agent, executor) + .await } /// Consumes the current session and destroys it - pub(crate) async fn destroy<'e, E: Executor<'e, Database = Postgres>>(self, executor: E) -> Result<()> { + pub(crate) async fn destroy<'e, E: Executor<'e, Database = Postgres>>( + self, + executor: E, + ) -> Result<()> { sqlx::query("delete from sessions where user_id = $1 and hash = $2") .bind(&self.user_id) .bind(self.hash.as_str()) @@ -114,14 +138,18 @@ pub(crate) fn extract_ip_and_ua(request: &HttpRequest) -> (IpNetwork, &str) { pub(crate) fn extract_ip_and_ua_owned(request: HttpRequest) -> (IpNetwork, String) { let ip_address = extract_ip(&request); - let user_agent = request.get_header("user-agent").unwrap_or("No user agent sent"); + let user_agent = request + .get_header("user-agent") + .unwrap_or("No user agent sent"); (ip_address, user_agent.to_owned()) } fn extract_ip(request: &HttpRequest) -> IpNetwork { let connection_info = request.connection_info(); - let ip_str = connection_info.realip_remote_addr().unwrap_or("No user agent sent"); + let ip_str = connection_info + .realip_remote_addr() + .unwrap_or("No user agent sent"); match IpNetwork::from_str(ip_str) { Ok(ip_network) => ip_network, diff --git a/src/sse.rs b/src/sse.rs index eec4e50..b297f4f 100644 --- a/src/sse.rs +++ b/src/sse.rs @@ -13,24 +13,24 @@ use std::time::Duration; use actix_web::web::{Bytes, Data}; use anyhow::Result; -use tracing::instrument; use derive_more::{Deref, Display}; use futures::Stream; use futures_locks::RwLock; use log::debug; use tokio::sync::mpsc::{channel, Receiver, Sender}; +use tracing::instrument; pub(crate) const SSE_BUFFER_SIZE: usize = 512; #[derive(Default)] pub(crate) struct Broadcaster { - clients: Vec<(Sender, Category)> + clients: Vec<(Sender, Category)>, } #[derive(Clone, Copy, Debug, Display, PartialEq, Eq, Hash)] pub(crate) enum Category { #[display(fmt = "log")] - AdminLog + AdminLog, } impl Broadcaster { @@ -75,10 +75,15 @@ impl Broadcaster { self.clients.retain(|(client, category)| { // This will fail if the buffer is full or the client is disconnected // If the buffer is full the client has not recv'd for a while which means it probably disconnected - client.try_send(Bytes::from("event: ping\ndata: pong!\n\n")).map_or_else(|err| { - debug!("Disconnecting a client subscribed to {}: {}", category, err); - false - }, |_| true) + client + .try_send(Bytes::from("event: ping\ndata: pong!\n\n")) + .map_or_else( + |err| { + debug!("Disconnecting a client subscribed to {}: {}", category, err); + false + }, + |_| true, + ) }); } diff --git a/src/ssh.rs b/src/ssh.rs index d1243b9..516d4ce 100644 --- a/src/ssh.rs +++ b/src/ssh.rs @@ -14,5 +14,5 @@ pub(crate) struct SshKey { pub(crate) algorithm: KeyType, key: Vec, pub(crate) created_at: DateTime, - pub(crate) expires_at: Option> + pub(crate) expires_at: Option>, } diff --git a/src/sso/bitbucket_sso.rs b/src/sso/bitbucket_sso.rs index fc580ba..cfbb128 100644 --- a/src/sso/bitbucket_sso.rs +++ b/src/sso/bitbucket_sso.rs @@ -8,8 +8,8 @@ use crate::{config, crypto, err}; use anyhow::{anyhow, bail, Result}; use async_trait::async_trait; -use awc::Client; use awc::http::header::ACCEPT; +use awc::Client; use oauth2::{AuthUrl, ClientId, ClientSecret, TokenUrl}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; @@ -24,7 +24,8 @@ impl OAuthRequest for BitBucketSSO { async fn request_data(endpoint: &'static str, token: &str) -> Result { let client = Client::gitarena(); - Ok(client.get(format!("https://api.bitbucket.org/2.0/{}", endpoint).as_str()) + Ok(client + .get(format!("https://api.bitbucket.org/2.0/{}", endpoint).as_str()) .append_header((ACCEPT, "application/json")) .bearer_auth(token) .send() @@ -32,20 +33,33 @@ impl OAuthRequest for BitBucketSSO { .map_err(|err| err!(BAD_GATEWAY, "Failed to connect to BitBucket api: {}", err))? .json::() .await - .map_err(|err| err!(BAD_GATEWAY, "Failed to parse BitBucket response as JSON: {}", err))?) + .map_err(|err| { + err!( + BAD_GATEWAY, + "Failed to parse BitBucket response as JSON: {}", + err + ) + })?) } } #[async_trait] impl DatabaseSSOProvider for BitBucketSSO { - async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { + async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { let client_id = config::get_setting::("sso.bitbucket.key", executor).await?; Ok(ClientId::new(client_id)) } - async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result> { - let client_secret = config::get_setting::("sso.bitbucket.secret", executor).await?; + async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result> { + let client_secret = + config::get_setting::("sso.bitbucket.secret", executor).await?; Ok(Some(ClientSecret::new(client_secret))) } @@ -64,23 +78,24 @@ impl SSOProvider for BitBucketSSO { fn get_token_url(&self) -> Option { // unwrap_or_log() is safe as we can guarantee that this is a valid url - Some(TokenUrl::new("https://bitbucket.org/site/oauth2/access_token".to_owned()).unwrap_or_log()) + Some( + TokenUrl::new("https://bitbucket.org/site/oauth2/access_token".to_owned()) + .unwrap_or_log(), + ) } fn get_scopes_as_str(&self) -> Vec<&'static str> { - vec![ - "account", - "email" - ] + vec!["account", "email"] } async fn get_provider_id(&self, token: &str) -> Result { let profile_data: SerdeMap = BitBucketSSO::request_data("user", token).await?; - profile_data.get("account_id") + profile_data + .get("account_id") .and_then(|v| match v { Value::String(val) => Some(val.to_owned()), - _ => None + _ => None, }) .ok_or_else(|| anyhow!("Failed to retrieve id from BitBucket API json response")) } @@ -90,28 +105,36 @@ impl SSOProvider for BitBucketSSO { let profile_data: SerdeMap = BitBucketSSO::request_data("user", token).await?; - let mut username = profile_data.get("username") + let mut username = profile_data + .get("username") .and_then(|v| match v { Value::String(s) => Some(s), - _ => None + _ => None, }) .cloned() - .ok_or_else(|| anyhow!("Failed to retrieve username from BitBucket API json response"))?; + .ok_or_else(|| { + anyhow!("Failed to retrieve username from BitBucket API json response") + })?; - while validate_username(username.as_str()).is_err() || is_username_taken(username.as_str(), &mut transaction).await? { + while validate_username(username.as_str()).is_err() + || is_username_taken(username.as_str(), &mut transaction).await? + { username = crypto::random_numeric_ascii_string(16); } - let user: User = sqlx::query_as::<_, User>("insert into users (username, password) values ($1, $2) returning *") - .bind(username.as_str()) - .bind("sso-login") - .fetch_one(&mut transaction) - .await?; + let user: User = sqlx::query_as::<_, User>( + "insert into users (username, password) values ($1, $2) returning *", + ) + .bind(username.as_str()) + .bind("sso-login") + .fetch_one(&mut transaction) + .await?; - let bitbucket_id = profile_data.get("account_id") + let bitbucket_id = profile_data + .get("account_id") .and_then(|v| match v { Value::String(val) => Some(val.to_owned()), - _ => None + _ => None, }) .ok_or_else(|| anyhow!("Failed to retrieve id from BitBucket API json response"))?; @@ -126,14 +149,20 @@ impl SSOProvider for BitBucketSSO { let emails: BitBucketEmailList = BitBucketSSO::request_data("user/emails", token).await?; - for bitbucket_email in emails.values.iter().skip_while(|e| !e.is_confirmed || e.email_type != "email") { + for bitbucket_email in emails + .values + .iter() + .skip_while(|e| !e.is_confirmed || e.email_type != "email") + { let email = bitbucket_email.email.as_str(); // Email exists - let (email_exists,): (bool,) = sqlx::query_as("select exists(select 1 from emails where lower(email) = lower($1) limit 1)") - .bind(email) - .fetch_one(&mut transaction) - .await?; + let (email_exists,): (bool,) = sqlx::query_as( + "select exists(select 1 from emails where lower(email) = lower($1) limit 1)", + ) + .bind(email) + .fetch_one(&mut transaction) + .await?; let primary = bitbucket_email.is_primary; @@ -168,7 +197,7 @@ struct BitBucketEmailList { page_length: usize, values: Vec, page: usize, - size: usize + size: usize, } #[derive(Deserialize, Serialize, Debug)] @@ -179,5 +208,5 @@ struct BitBucketEmail { email_type: String, email: String, #[serde(skip_deserializing)] - links: Option + links: Option, } diff --git a/src/sso/github_sso.rs b/src/sso/github_sso.rs index 3bb1ee2..2d6cedd 100644 --- a/src/sso/github_sso.rs +++ b/src/sso/github_sso.rs @@ -8,8 +8,8 @@ use crate::{config, crypto, err}; use anyhow::{anyhow, bail, Result}; use async_trait::async_trait; -use awc::Client; use awc::http::header::{ACCEPT, AUTHORIZATION, USER_AGENT}; +use awc::Client; use oauth2::{AuthUrl, ClientId, ClientSecret, Scope, TokenUrl}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; @@ -24,7 +24,8 @@ impl OAuthRequest for GitHubSSO { async fn request_data(endpoint: &'static str, token: &str) -> Result { let client = Client::gitarena(); - Ok(client.get(format!("https://api.github.com/{}", endpoint).as_str()) + Ok(client + .get(format!("https://api.github.com/{}", endpoint).as_str()) .append_header((ACCEPT, "application/vnd.github.v3+json")) .append_header((AUTHORIZATION, format!("token {}", token))) .append_header((USER_AGENT, concat!("GitArena ", env!("CARGO_PKG_VERSION")))) @@ -33,20 +34,33 @@ impl OAuthRequest for GitHubSSO { .map_err(|err| err!(BAD_GATEWAY, "Failed to connect to GitHub api: {}", err))? .json::() .await - .map_err(|err| err!(BAD_GATEWAY, "Failed to parse GitHub response as JSON: {}", err))?) + .map_err(|err| { + err!( + BAD_GATEWAY, + "Failed to parse GitHub response as JSON: {}", + err + ) + })?) } } #[async_trait] impl DatabaseSSOProvider for GitHubSSO { - async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { + async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { let client_id = config::get_setting::("sso.github.client_id", executor).await?; Ok(ClientId::new(client_id)) } - async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result> { - let client_secret = config::get_setting::("sso.github.client_secret", executor).await?; + async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result> { + let client_secret = + config::get_setting::("sso.github.client_secret", executor).await?; Ok(Some(ClientSecret::new(client_secret))) } @@ -65,40 +79,43 @@ impl SSOProvider for GitHubSSO { fn get_token_url(&self) -> Option { // unwrap_or_log() is safe as we can guarantee that this is a valid url - Some(TokenUrl::new("https://github.com/login/oauth/access_token".to_owned()).unwrap_or_log()) + Some( + TokenUrl::new("https://github.com/login/oauth/access_token".to_owned()).unwrap_or_log(), + ) } fn get_scopes_as_str(&self) -> Vec<&'static str> { vec![ "read:public_key", // SSH keys - "read:user", // User profile data - "user:email", // Emails - "read:gpg_key", // GPG keys + "read:user", // User profile data + "user:email", // Emails + "read:gpg_key", // GPG keys ] } fn validate_scopes(&self, scopes_option: Option<&Vec>) -> bool { let granted_scopes = match scopes_option { - Some(granted_scopes) => { - granted_scopes - .iter() - .flat_map(|scope| scope.split(',')) - .collect::>() - } - None => return true // If not provided it is identical to our asked scopes + Some(granted_scopes) => granted_scopes + .iter() + .flat_map(|scope| scope.split(',')) + .collect::>(), + None => return true, // If not provided it is identical to our asked scopes }; let requested_scopes = self.get_scopes_as_str(); - granted_scopes.iter().all(|item| requested_scopes.contains(item)) + granted_scopes + .iter() + .all(|item| requested_scopes.contains(item)) } async fn get_provider_id(&self, token: &str) -> Result { let profile_data: SerdeMap = GitHubSSO::request_data("user", token).await?; - profile_data.get("id") + profile_data + .get("id") .and_then(|v| match v { Value::Number(val) => val.as_i64().map_or_else(|| None, |v| Some(v.to_string())), - _ => None + _ => None, }) .ok_or_else(|| anyhow!("Failed to retrieve id from GitHub API json response")) } @@ -108,28 +125,34 @@ impl SSOProvider for GitHubSSO { let profile_data: SerdeMap = GitHubSSO::request_data("user", token).await?; - let mut username = profile_data.get("login") + let mut username = profile_data + .get("login") .and_then(|v| match v { Value::String(s) => Some(s), - _ => None + _ => None, }) .cloned() .ok_or_else(|| anyhow!("Failed to retrieve username from GitHub API json response"))?; - while validate_username(username.as_str()).is_err() || is_username_taken(username.as_str(), &mut transaction).await? { + while validate_username(username.as_str()).is_err() + || is_username_taken(username.as_str(), &mut transaction).await? + { username = crypto::random_numeric_ascii_string(16); } - let user: User = sqlx::query_as::<_, User>("insert into users (username, password) values ($1, $2) returning *") - .bind(username.as_str()) - .bind("sso-login") - .fetch_one(&mut transaction) - .await?; + let user: User = sqlx::query_as::<_, User>( + "insert into users (username, password) values ($1, $2) returning *", + ) + .bind(username.as_str()) + .bind("sso-login") + .fetch_one(&mut transaction) + .await?; - let github_id = profile_data.get("id") + let github_id = profile_data + .get("id") .and_then(|v| match v { Value::Number(val) => val.as_i64().map_or_else(|| None, |v| Some(v.to_string())), - _ => None + _ => None, }) .ok_or_else(|| anyhow!("Failed to retrieve id from GitHub API json response"))?; @@ -142,16 +165,19 @@ impl SSOProvider for GitHubSSO { // TODO: Save avatar (profile data "avatar_url") - let emails: Vec = GitHubSSO::request_data("user/emails?per_page=100", token).await?; + let emails: Vec = + GitHubSSO::request_data("user/emails?per_page=100", token).await?; for github_email in emails.iter().skip_while(|e| !e.verified) { let email = github_email.email.as_str(); // Email exists - let (email_exists,): (bool,) = sqlx::query_as("select exists(select 1 from emails where lower(email) = lower($1) limit 1)") - .bind(email) - .fetch_one(&mut transaction) - .await?; + let (email_exists,): (bool,) = sqlx::query_as( + "select exists(select 1 from emails where lower(email) = lower($1) limit 1)", + ) + .bind(email) + .fetch_one(&mut transaction) + .await?; let primary = github_email.primary; @@ -163,7 +189,10 @@ impl SSOProvider for GitHubSSO { } } - let public = github_email.visibility.as_ref().map_or_else(|| false, |v| v == "public"); + let public = github_email + .visibility + .as_ref() + .map_or_else(|| false, |v| v == "public"); // All email addresses have already been verified by GitHub, so we also mark them as verified sqlx::query("insert into emails (owner, email, \"primary\", commit, notification, public, verified_at) values ($1, $2, $3, $3, $3, $4, current_timestamp)") @@ -188,5 +217,5 @@ struct GitHubEmail { email: String, verified: bool, primary: bool, - visibility: Option + visibility: Option, } diff --git a/src/sso/gitlab_sso.rs b/src/sso/gitlab_sso.rs index 739ab63..e5b5ffc 100644 --- a/src/sso/gitlab_sso.rs +++ b/src/sso/gitlab_sso.rs @@ -10,8 +10,8 @@ use std::sync::Once; use anyhow::{anyhow, bail, Result}; use async_trait::async_trait; -use awc::Client; use awc::http::header::{AUTHORIZATION, USER_AGENT}; +use awc::Client; use oauth2::{AuthUrl, ClientId, ClientSecret, TokenUrl}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; @@ -26,7 +26,8 @@ impl OAuthRequest for GitLabSSO { async fn request_data(endpoint: &'static str, token: &str) -> Result { let client = Client::gitarena(); - Ok(client.get(format!("https://gitlab.com/api/v4/{}", endpoint).as_str()) + Ok(client + .get(format!("https://gitlab.com/api/v4/{}", endpoint).as_str()) .append_header((AUTHORIZATION, format!("Bearer {}", token))) .append_header((USER_AGENT, concat!("GitArena ", env!("CARGO_PKG_VERSION")))) .send() @@ -34,20 +35,33 @@ impl OAuthRequest for GitLabSSO { .map_err(|err| err!(BAD_GATEWAY, "Failed to connect to GitLab api: {}", err))? .json::() .await - .map_err(|err| err!(BAD_GATEWAY, "Failed to parse GitLab response as JSON: {}", err))?) + .map_err(|err| { + err!( + BAD_GATEWAY, + "Failed to parse GitLab response as JSON: {}", + err + ) + })?) } } #[async_trait] impl DatabaseSSOProvider for GitLabSSO { - async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { + async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { let client_id = config::get_setting::("sso.gitlab.app_id", executor).await?; Ok(ClientId::new(client_id)) } - async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result> { - let client_secret = config::get_setting::("sso.gitlab.client_secret", executor).await?; + async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result> { + let client_secret = + config::get_setting::("sso.gitlab.client_secret", executor).await?; Ok(Some(ClientSecret::new(client_secret))) } @@ -70,18 +84,17 @@ impl SSOProvider for GitLabSSO { } fn get_scopes_as_str(&self) -> Vec<&'static str> { - vec![ - "read_user" - ] + vec!["read_user"] } async fn get_provider_id(&self, token: &str) -> Result { let profile_data: SerdeMap = GitLabSSO::request_data("user", token).await?; - profile_data.get("id") + profile_data + .get("id") .and_then(|v| match v { Value::Number(val) => val.as_i64().map_or_else(|| None, |v| Some(v.to_string())), - _ => None + _ => None, }) .ok_or_else(|| anyhow!("Failed to retrieve id from GitLab API json response")) } @@ -91,28 +104,34 @@ impl SSOProvider for GitLabSSO { let profile_data: SerdeMap = GitLabSSO::request_data("user", token).await?; - let mut username = profile_data.get("username") + let mut username = profile_data + .get("username") .and_then(|v| match v { Value::String(s) => Some(s), - _ => None + _ => None, }) .cloned() .ok_or_else(|| anyhow!("Failed to retrieve username from GitLab API json response"))?; - while validate_username(username.as_str()).is_err() || is_username_taken(username.as_str(), &mut transaction).await? { + while validate_username(username.as_str()).is_err() + || is_username_taken(username.as_str(), &mut transaction).await? + { username = crypto::random_numeric_ascii_string(16); } - let user: User = sqlx::query_as::<_, User>("insert into users (username, password) values ($1, $2) returning *") - .bind(username.as_str()) - .bind("sso-login") - .fetch_one(&mut transaction) - .await?; + let user: User = sqlx::query_as::<_, User>( + "insert into users (username, password) values ($1, $2) returning *", + ) + .bind(username.as_str()) + .bind("sso-login") + .fetch_one(&mut transaction) + .await?; - let gitlab_id = profile_data.get("id") + let gitlab_id = profile_data + .get("id") .and_then(|v| match v { Value::Number(val) => val.as_i64().map_or_else(|| None, |v| Some(v.to_string())), - _ => None + _ => None, }) .ok_or_else(|| anyhow!("Failed to retrieve id from GitLab API json response"))?; @@ -131,14 +150,18 @@ impl SSOProvider for GitLabSSO { // For some reason GitLab does not currently always provide the `verified_at` field even for verified email addresses // TODO: Reactivate check once GitLab fixed their endpoint // Once their endpoint has been fixed, we can also mark all email addresses as verified - for gitlab_email in emails.iter()/*.skip_while(|e| e.verified_at.is_none())*/ { + for gitlab_email in emails.iter() + /*.skip_while(|e| e.verified_at.is_none())*/ + { let email = gitlab_email.email.as_str(); // Email exists - let (email_exists,): (bool,) = sqlx::query_as("select exists(select 1 from emails where lower(email) = lower($1) limit 1)") - .bind(email) - .fetch_one(&mut transaction) - .await?; + let (email_exists,): (bool,) = sqlx::query_as( + "select exists(select 1 from emails where lower(email) = lower($1) limit 1)", + ) + .bind(email) + .fetch_one(&mut transaction) + .await?; if email_exists { continue; @@ -163,7 +186,9 @@ impl SSOProvider for GitLabSSO { } if !once.is_completed() { - bail!("All verified GitLab email addresses are already assigned to a different account"); + bail!( + "All verified GitLab email addresses are already assigned to a different account" + ); } transaction.commit().await?; @@ -178,5 +203,5 @@ impl SSOProvider for GitLabSSO { struct GitLabEmail { id: i32, email: String, - verified_at: Option + verified_at: Option, } diff --git a/src/sso/mod.rs b/src/sso/mod.rs index f5a695d..204ae26 100644 --- a/src/sso/mod.rs +++ b/src/sso/mod.rs @@ -14,5 +14,5 @@ pub(crate) mod sso_provider_type; pub(crate) struct SSO { pub(crate) user_id: i32, // User id on our end pub(crate) provider: SSOProviderType, - pub(crate) provider_id: String // User id on the provider end + pub(crate) provider_id: String, // User id on the provider end } diff --git a/src/sso/oauth2_awc_client.rs b/src/sso/oauth2_awc_client.rs index c7a8a05..bf679ec 100644 --- a/src/sso/oauth2_awc_client.rs +++ b/src/sso/oauth2_awc_client.rs @@ -4,11 +4,13 @@ use crate::err; use crate::error::WithStatusCode; use crate::prelude::USER_AGENT_STR; -use awc::ClientBuilder; use awc::http::header::USER_AGENT; +use awc::ClientBuilder; use oauth2::{HttpRequest, HttpResponse}; -pub(crate) async fn async_http_client(request: HttpRequest) -> Result { +pub(crate) async fn async_http_client( + request: HttpRequest, +) -> Result { let client = ClientBuilder::new() .disable_redirects() // Following redirects opens the client up to SSRF vulnerabilities .add_default_header((USER_AGENT, USER_AGENT_STR)) @@ -20,17 +22,19 @@ pub(crate) async fn async_http_client(request: HttpRequest) -> Result AuthUrl; fn get_token_url(&self) -> Option; - async fn build_client(&self, provider: &SSOProviderType, db_pool: &PgPool) -> Result { + async fn build_client( + &self, + provider: &SSOProviderType, + db_pool: &PgPool, + ) -> Result { let mut transaction = db_pool.begin().await?; let (client_id, client_secret) = match provider { SSOProviderType::BitBucket => ( - DatabaseSSOProvider::get_client_id(&BitBucketSSO, &mut transaction).await.context("Failed to get client id")?, - DatabaseSSOProvider::get_client_secret(&BitBucketSSO, &mut transaction).await.context("Failed to get client secret")? + DatabaseSSOProvider::get_client_id(&BitBucketSSO, &mut transaction) + .await + .context("Failed to get client id")?, + DatabaseSSOProvider::get_client_secret(&BitBucketSSO, &mut transaction) + .await + .context("Failed to get client secret")?, ), SSOProviderType::GitHub => ( - DatabaseSSOProvider::get_client_id(&GitHubSSO, &mut transaction).await.context("Failed to get client id")?, - DatabaseSSOProvider::get_client_secret(&GitHubSSO, &mut transaction).await.context("Failed to get client secret")? + DatabaseSSOProvider::get_client_id(&GitHubSSO, &mut transaction) + .await + .context("Failed to get client id")?, + DatabaseSSOProvider::get_client_secret(&GitHubSSO, &mut transaction) + .await + .context("Failed to get client secret")?, ), SSOProviderType::GitLab => ( - DatabaseSSOProvider::get_client_id(&GitLabSSO, &mut transaction).await.context("Failed to get client id")?, - DatabaseSSOProvider::get_client_secret(&GitLabSSO, &mut transaction).await.context("Failed to get client secret")? - ) + DatabaseSSOProvider::get_client_id(&GitLabSSO, &mut transaction) + .await + .context("Failed to get client id")?, + DatabaseSSOProvider::get_client_secret(&GitLabSSO, &mut transaction) + .await + .context("Failed to get client secret")?, + ), }; let auth_url = self.get_auth_url(); let token_url = self.get_token_url(); let redirect_url = match provider { - SSOProviderType::BitBucket => DatabaseSSOProvider::get_redirect_url(&BitBucketSSO, &mut transaction).await.context("Failed to get redirect url")?, - SSOProviderType::GitHub => DatabaseSSOProvider::get_redirect_url(&GitHubSSO, &mut transaction).await.context("Failed to get redirect url")?, - SSOProviderType::GitLab => DatabaseSSOProvider::get_redirect_url(&GitLabSSO, &mut transaction).await.context("Failed to get redirect url")?, + SSOProviderType::BitBucket => { + DatabaseSSOProvider::get_redirect_url(&BitBucketSSO, &mut transaction) + .await + .context("Failed to get redirect url")? + } + SSOProviderType::GitHub => { + DatabaseSSOProvider::get_redirect_url(&GitHubSSO, &mut transaction) + .await + .context("Failed to get redirect url")? + } + SSOProviderType::GitLab => { + DatabaseSSOProvider::get_redirect_url(&GitLabSSO, &mut transaction) + .await + .context("Failed to get redirect url")? + } }; transaction.commit().await?; - Ok(BasicClient::new(client_id, client_secret, auth_url, token_url).set_redirect_uri(redirect_url)) + Ok( + BasicClient::new(client_id, client_secret, auth_url, token_url) + .set_redirect_uri(redirect_url), + ) } fn get_scopes_as_str(&self) -> Vec<&'static str>; @@ -63,7 +96,11 @@ pub(crate) trait SSOProvider { .collect() } - async fn generate_auth_url(&self, provider: &SSOProviderType, db_pool: &PgPool) -> Result<(Url, CsrfToken)> { + async fn generate_auth_url( + &self, + provider: &SSOProviderType, + db_pool: &PgPool, + ) -> Result<(Url, CsrfToken)> { let client = self.build_client(provider, db_pool).await?; let mut request = client.authorize_url(CsrfToken::new_random); @@ -75,7 +112,12 @@ pub(crate) trait SSOProvider { } /// Exchanges a response (provide by `state` and `code` in `query_string`) into an oauth access token - async fn exchange_response(&self, query_string: &QString, provider: &SSOProviderType, db_pool: &PgPool) -> Result { + async fn exchange_response( + &self, + query_string: &QString, + provider: &SSOProviderType, + db_pool: &PgPool, + ) -> Result { let code_option = query_string.get("code"); let state_option = query_string.get("state"); @@ -91,21 +133,32 @@ pub(crate) trait SSOProvider { let client = self.build_client(provider, db_pool).await?; - Ok(client.exchange_code(code) + Ok(client + .exchange_code(code) .request_async(async_http_client) .await - .with_context(|| format!("Failed to contact {} in order to exchange oauth token", &self.get_name()))?) + .with_context(|| { + format!( + "Failed to contact {} in order to exchange oauth token", + &self.get_name() + ) + })?) } /// Returns true if the granted scopes are OK or not fn validate_scopes(&self, scopes_option: Option<&Vec>) -> bool { let granted_scopes = match scopes_option { - Some(scopes) => scopes.iter().map(|scope| scope.as_str()).collect::>(), - None => return true // If not provided it is identical to our asked scopes + Some(scopes) => scopes + .iter() + .map(|scope| scope.as_str()) + .collect::>(), + None => return true, // If not provided it is identical to our asked scopes }; let requested_scopes = self.get_scopes_as_str(); - granted_scopes.iter().all(|item| requested_scopes.contains(item)) + granted_scopes + .iter() + .all(|item| requested_scopes.contains(item)) } async fn get_provider_id(&self, token: &str) -> Result; @@ -115,13 +168,22 @@ pub(crate) trait SSOProvider { #[async_trait] pub(crate) trait DatabaseSSOProvider: SSOProvider { - async fn get_redirect_url<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result { + async fn get_redirect_url<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result { let domain = config::get_setting::("domain", executor).await?; let url = format!("{}/sso/{}/callback", domain, self.get_name()); Ok(RedirectUrl::new(url)?) } - async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result; - async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>(&self, executor: E) -> Result>; + async fn get_client_id<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result; + async fn get_client_secret<'e, E: Executor<'e, Database = Postgres>>( + &self, + executor: E, + ) -> Result>; } diff --git a/src/sso/sso_provider_type.rs b/src/sso/sso_provider_type.rs index dde41f9..cbe0985 100644 --- a/src/sso/sso_provider_type.rs +++ b/src/sso/sso_provider_type.rs @@ -16,7 +16,7 @@ use sqlx::Type; pub(crate) enum SSOProviderType { BitBucket, GitHub, - GitLab + GitLab, } impl SSOProviderType { @@ -24,7 +24,7 @@ impl SSOProviderType { match self { SSOProviderType::BitBucket => Box::new(BitBucketSSO), SSOProviderType::GitHub => Box::new(GitHubSSO), - SSOProviderType::GitLab => Box::new(GitLabSSO) + SSOProviderType::GitLab => Box::new(GitLabSSO), } } } @@ -39,7 +39,7 @@ impl FromStr for SSOProviderType { "bitbucket" => Ok(SSOProviderType::BitBucket), "github" => Ok(SSOProviderType::GitHub), "gitlab" => Ok(SSOProviderType::GitLab), - _ => Err(()) + _ => Err(()), } } } diff --git a/src/templates/filters.rs b/src/templates/filters.rs index 32f87e0..170b287 100644 --- a/src/templates/filters.rs +++ b/src/templates/filters.rs @@ -12,7 +12,7 @@ pub(crate) fn human_prefix(value: &Value, _: &HashMap) -> Result< i @ 1_000..=999_999 => { let str = i.to_string(); format!("{}K", &str[..str.len() - 3]) - }, + } _ => "1M+".to_owned(), })) } diff --git a/src/templates/mod.rs b/src/templates/mod.rs index 4abcb28..a28cb35 100644 --- a/src/templates/mod.rs +++ b/src/templates/mod.rs @@ -32,11 +32,15 @@ pub(crate) async fn init() -> Result { info!("Loading templates. This may take a few seconds."); let elapsed = time_function(|| async { - VERIFY_EMAIL.set(parse_template("email/user/verify_email.txt".to_owned())).expect_or_log("Verify email template should only be initialized once"); + VERIFY_EMAIL + .set(parse_template("email/user/verify_email.txt".to_owned())) + .expect_or_log("Verify email template should only be initialized once"); // This additionally checks the templates for errors - TERA.set(init_tera()).expect_or_log("Tera should only be initialized once"); - }).await; + TERA.set(init_tera()) + .expect_or_log("Tera should only be initialized once"); + }) + .await; info!("Successfully loaded templates. Took {} seconds.", elapsed); @@ -47,47 +51,50 @@ pub(crate) async fn init() -> Result { use actix_web::rt::Runtime; use log::error; use notify::{Error as NotifyError, Event, RecommendedWatcher, RecursiveMode, Watcher}; - - let mut watcher = notify::recommended_watcher(|result: std::result::Result| { - let event = match result { - Ok(event) => event, - Err(err) => { - error!("Failed to unwrap file system notify event: {}", err); - return; - } - }; - if !event.kind.is_modify() { - return; - } + let mut watcher = + notify::recommended_watcher(|result: std::result::Result| { + let event = match result { + Ok(event) => event, + Err(err) => { + error!("Failed to unwrap file system notify event: {}", err); + return; + } + }; - for path in &event.paths { - if path.is_dir() { + if !event.kind.is_modify() { return; } - match path.file_name() { - Some(file_name) => match file_name.to_str() { - Some(file_name) => if !file_name.ends_with(".html") { - return - } - None => return + for path in &event.paths { + if path.is_dir() { + return; + } + + match path.file_name() { + Some(file_name) => match file_name.to_str() { + Some(file_name) => { + if !file_name.ends_with(".html") { + return; + } + } + None => return, + }, + None => return, } - None => return } - } - if let Ok(runtime) = Runtime::new() { - info!("Detected modification in templates directory, reloading..."); + if let Ok(runtime) = Runtime::new() { + info!("Detected modification in templates directory, reloading..."); - runtime.block_on(async { - match tera().write().await.full_reload() { - Ok(_) => info!("Successfully reloaded templates."), - Err(err) => error!("Failed to reload templates: {}", err) - } - }); - } - })?; + runtime.block_on(async { + match tera().write().await.full_reload() { + Ok(_) => info!("Successfully reloaded templates."), + Err(err) => error!("Failed to reload templates: {}", err), + } + }); + } + })?; watcher.watch(Path::new("templates/html"), RecursiveMode::Recursive)?; @@ -103,7 +110,7 @@ pub(crate) async fn init() -> Result { fn parse_template(template_path: String) -> Template { match plain::parse(template_path) { Ok(template) => template, - Err(err) => panic!("Failed to parse template: {}", err) + Err(err) => panic!("Failed to parse template: {}", err), } } @@ -118,7 +125,7 @@ pub(crate) async fn render(template: &str, context: &Context) -> Result fn init_tera() -> GlobalTera { let mut tera = match Tera::new("templates/html/**/*") { Ok(tera) => tera, - Err(err) => panic!("{}", err) + Err(err) => panic!("{}", err), }; tera.register_filter("human_prefix", filters::human_prefix); @@ -143,7 +150,7 @@ pub(crate) fn tera() -> &'static GlobalTera { macro_rules! template_context { ($input:expr) => { Some($input.iter().cloned().collect()) - } + }; } /// Renders a template and returns `Ok(HttpResponse)`. If an error occurs, returns `Err`. @@ -156,7 +163,12 @@ macro_rules! render_template { render_template!(actix_web::http::StatusCode::OK, $template_name, $context) }}; ($template_name:literal, $context:expr, $transaction:expr) => {{ - render_template!(actix_web::http::StatusCode::OK, $template_name, $context, $transaction) + render_template!( + actix_web::http::StatusCode::OK, + $template_name, + $context, + $transaction + ) }}; ($status:expr, $template_name:literal, $context:expr) => {{ if cfg!(debug_assertions) { @@ -167,7 +179,9 @@ macro_rules! render_template { Ok(actix_web::HttpResponseBuilder::new($status).body(template)) }}; ($status:expr, $template_name:literal, $context:expr, $transaction:expr) => {{ - let domain = $crate::config::get_optional_setting::("domain", &mut $transaction).await?.unwrap_or_default(); + let domain = $crate::config::get_optional_setting::("domain", &mut $transaction) + .await? + .unwrap_or_default(); $context.try_insert("domain", &domain)?; if cfg!(debug_assertions) { diff --git a/src/templates/plain.rs b/src/templates/plain.rs index bd8c531..93b2d60 100644 --- a/src/templates/plain.rs +++ b/src/templates/plain.rs @@ -27,7 +27,10 @@ pub(crate) fn parse(template_path: String) -> Result