initial
This commit is contained in:
commit
4ebb2a21d2
7
.gitignore
vendored
Normal file
7
.gitignore
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
/target
|
||||||
|
.env
|
||||||
|
config.toml
|
||||||
|
*.log
|
||||||
|
*.db
|
||||||
|
/bindings
|
||||||
|
.DS_Store
|
18
.vscode/settings.json
vendored
Normal file
18
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"cSpell.words": [
|
||||||
|
"apikey",
|
||||||
|
"axum",
|
||||||
|
"chrono",
|
||||||
|
"color",
|
||||||
|
"Conn",
|
||||||
|
"dotenv",
|
||||||
|
"hmac",
|
||||||
|
"oneshot",
|
||||||
|
"openapi",
|
||||||
|
"recv",
|
||||||
|
"repr",
|
||||||
|
"Servable",
|
||||||
|
"sqlx",
|
||||||
|
"utoipa"
|
||||||
|
]
|
||||||
|
}
|
9
ActiveDirecotry/Dockerfile
Normal file
9
ActiveDirecotry/Dockerfile
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
FROM ubuntu:22.04
|
||||||
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
|
# Install Required packages
|
||||||
|
RUN apt-get update && apt-get -y install samba krb5-config winbind smbclient iproute2 openssl \
|
||||||
|
&& rm /etc/krb5.conf \
|
||||||
|
&& mkdir -p /opt/ad-scripts
|
||||||
|
WORKDIR /opt/ad-scripts
|
||||||
|
COPY *.sh /opt/ad-scripts
|
||||||
|
CMD chmod +x /opt/ad-scripts/*.sh && ./samba-ad-setup.sh && ./samba-ad-run.sh
|
17
ActiveDirecotry/docker-compose.yaml
Normal file
17
ActiveDirecotry/docker-compose.yaml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# https://github.com/amardeep2006/ldap-activedirectory-samba/
|
||||||
|
version: "3.4"
|
||||||
|
services:
|
||||||
|
ldap.example.org:
|
||||||
|
restart: "no"
|
||||||
|
hostname: "ldap.example.org"
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
privileged: true
|
||||||
|
expose:
|
||||||
|
- "636"
|
||||||
|
ports:
|
||||||
|
- "636:636"
|
||||||
|
environment:
|
||||||
|
SMB_ADMIN_PASSWORD: "admin123!"
|
||||||
|
AD_DOMAIN: "DEV-AD"
|
10
ActiveDirecotry/samba-ad-run.sh
Normal file
10
ActiveDirecotry/samba-ad-run.sh
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
[ -f /var/lib/samba/.setup ] || {
|
||||||
|
>&2 echo "[ERROR] Samba is not setup yet, which should happen automatically. Look for errors!"
|
||||||
|
exit 127
|
||||||
|
}
|
||||||
|
|
||||||
|
samba -i -s /var/lib/samba/private/smb.conf
|
31
ActiveDirecotry/samba-ad-setup.sh
Normal file
31
ActiveDirecotry/samba-ad-setup.sh
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
info () {
|
||||||
|
echo "[INFO] $@"
|
||||||
|
}
|
||||||
|
|
||||||
|
info "Running setup"
|
||||||
|
|
||||||
|
# Check if samba is setup
|
||||||
|
[ -f /var/lib/samba/.setup ] && info "Already setup..." && exit 0
|
||||||
|
|
||||||
|
info "Provisioning domain controller..."
|
||||||
|
|
||||||
|
info "Given admin password: ${SMB_ADMIN_PASSWORD}"
|
||||||
|
|
||||||
|
rm /etc/samba/smb.conf
|
||||||
|
|
||||||
|
# Setting up Samba AD server
|
||||||
|
samba-tool domain provision\
|
||||||
|
--server-role=dc\
|
||||||
|
--use-rfc2307\
|
||||||
|
--dns-backend=SAMBA_INTERNAL\
|
||||||
|
--realm=$(hostname | cut -d '.' -f 2-)\
|
||||||
|
--domain=${AD_DOMAIN}\
|
||||||
|
--adminpass=${SMB_ADMIN_PASSWORD}
|
||||||
|
|
||||||
|
mv /etc/samba/smb.conf /var/lib/samba/private/smb.conf
|
||||||
|
|
||||||
|
touch /var/lib/samba/.setup
|
3600
Cargo.lock
generated
Normal file
3600
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
66
Cargo.toml
Normal file
66
Cargo.toml
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
[package]
|
||||||
|
name = "axum-api-test"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
# Logging
|
||||||
|
# ========================================
|
||||||
|
tracing = "0.1.41"
|
||||||
|
tracing-subscriber = { version = "0.3.19", features = [
|
||||||
|
"local-time",
|
||||||
|
"env-filter",
|
||||||
|
] }
|
||||||
|
tracing-appender = "0.2.3"
|
||||||
|
tracing-rolling-file = "0.1.2"
|
||||||
|
error-stack = "0.5.0"
|
||||||
|
|
||||||
|
# CLI
|
||||||
|
# ========================================
|
||||||
|
dotenv = "0.15"
|
||||||
|
clap = { version = "4.5.23", features = ["derive"] }
|
||||||
|
config = "0.15.4"
|
||||||
|
|
||||||
|
# User Authentication
|
||||||
|
# ========================================
|
||||||
|
uuid = { version = "1.11.0", features = ["v4"] }
|
||||||
|
sha2 = "0.10.8"
|
||||||
|
hmac = "0.12.1"
|
||||||
|
# axum-jwt-login = { path = "../axum-login-jwt" }
|
||||||
|
axum-jwt-login = { version = "0.1.0", registry = "kellnr" }
|
||||||
|
rust-argon2 = "2.1.0"
|
||||||
|
rand = "0.8.5"
|
||||||
|
ldap3 = "0.11.5"
|
||||||
|
|
||||||
|
# Service
|
||||||
|
# ========================================
|
||||||
|
windows-service = "0.7.0"
|
||||||
|
|
||||||
|
axum = { version = "0.8.1", features = ["macros"] }
|
||||||
|
strum = { version = "0.26", features = ["derive"] }
|
||||||
|
# utoipa = { version = "5.3.0", features = ["axum_extras"] }
|
||||||
|
utoipa = { git = "https://github.com/juhaku/utoipa", features = [
|
||||||
|
"axum_extras",
|
||||||
|
] }
|
||||||
|
# utoipa-axum = "0.1.3"
|
||||||
|
utoipa-axum = { git = "https://github.com/juhaku/utoipa" }
|
||||||
|
# utoipa-swagger-ui = { version = "8.1.0", features = ["axum"] }
|
||||||
|
# utoipa-redoc = { version = "*", features = ["axum"] }
|
||||||
|
# utoipa-scalar = { version = "*", features = ["axum"] }
|
||||||
|
utoipa-swagger-ui = { git = "https://github.com/juhaku/utoipa", features = [
|
||||||
|
"axum",
|
||||||
|
] }
|
||||||
|
utoipa-redoc = { git = "https://github.com/juhaku/utoipa", features = ["axum"] }
|
||||||
|
utoipa-scalar = { git = "https://github.com/juhaku/utoipa", features = [
|
||||||
|
"axum",
|
||||||
|
] }
|
||||||
|
ts-rs = { version = "10.1.0", features = ["chrono-impl"] }
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
# ========================================
|
||||||
|
serde = { version = "1.0.216", features = ["derive"] }
|
||||||
|
tokio = { version = "1.42.0", features = ["full"] }
|
||||||
|
tokio-util = { version = "0.7.13", features = ["rt"] }
|
||||||
|
once_cell = "1.20.2"
|
||||||
|
sqlx = { version = "0.8.3", features = ["runtime-tokio", "postgres", "chrono"] }
|
||||||
|
chrono = { version = "0.4.39", features = ["serde"] }
|
206
src/api/backend/ldap.rs
Normal file
206
src/api/backend/ldap.rs
Normal file
@ -0,0 +1,206 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use ldap3::{Ldap, LdapConnAsync, LdapConnSettings, Scope, SearchEntry};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use ts_rs::TS;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
config::{Configuration, LDAP},
|
||||||
|
errors::ApiError,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||||
|
#[ts(export)]
|
||||||
|
pub struct ActiveDirectoryUser {
|
||||||
|
pub name: String,
|
||||||
|
pub surname: String,
|
||||||
|
pub email: String,
|
||||||
|
pub id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LDAPBackend {
|
||||||
|
connection: Ldap,
|
||||||
|
config: LDAP,
|
||||||
|
bound: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LDAPBackend {
|
||||||
|
pub async fn from_config(config: &Configuration) -> Result<Self, ApiError> {
|
||||||
|
// create connection to LDAP server
|
||||||
|
let ldap_settings = LdapConnSettings::new().set_no_tls_verify(config.ldap.skip_tls_verify);
|
||||||
|
let (connection, ldap) = LdapConnAsync::with_settings(ldap_settings, &config.ldap.server)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::InternalError(format!("LDAP Server connection: {e}")))?;
|
||||||
|
ldap3::drive!(connection);
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
config: config.ldap.clone(),
|
||||||
|
connection: ldap,
|
||||||
|
bound: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn unbind(&mut self) {
|
||||||
|
let _ = self.connection.unbind().await;
|
||||||
|
self.bound = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn ad_bind(&mut self, user: &str, password: &str) -> Result<(), ApiError> {
|
||||||
|
self.connection
|
||||||
|
.simple_bind(&format!("{}\\{user}", self.config.ad_domain), &password)
|
||||||
|
.await
|
||||||
|
.map_err(|_e| ApiError::InvalidCredentials)?;
|
||||||
|
|
||||||
|
self.bound = true;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_user_detail(
|
||||||
|
&mut self,
|
||||||
|
user_id: &str,
|
||||||
|
) -> Result<HashMap<String, Vec<String>>, ApiError> {
|
||||||
|
// check if elevated user has to be used
|
||||||
|
if self.config.elevated_search {
|
||||||
|
let id = self.config.elevated_user_id.clone();
|
||||||
|
let password = self.config.elevated_user_pw.clone();
|
||||||
|
|
||||||
|
self.ad_bind(&id, &password).await?;
|
||||||
|
}
|
||||||
|
let (search_result, _) = self
|
||||||
|
.connection
|
||||||
|
.search(
|
||||||
|
&self.config.user_search_base,
|
||||||
|
Scope::Subtree,
|
||||||
|
// "(objectClass=*)",
|
||||||
|
&format!("(&(ObjectCategory=Person)(sAMAccountName={user_id}))"),
|
||||||
|
vec!["givenName", "sn", "mail"],
|
||||||
|
// vec!["*"],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::InternalError(format!("LDAP search error: {e}")))?
|
||||||
|
.success()
|
||||||
|
.map_err(|e| ApiError::InternalError(format!("LDAP search error: {e}")))?;
|
||||||
|
|
||||||
|
let attributes = if let Some(entry) = search_result.first().cloned() {
|
||||||
|
SearchEntry::construct(entry).attrs
|
||||||
|
} else {
|
||||||
|
HashMap::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(attributes)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_ad_user_list(&mut self) -> Result<Vec<ActiveDirectoryUser>, ApiError> {
|
||||||
|
// check if elevated user has to be used
|
||||||
|
if self.config.elevated_search {
|
||||||
|
let id = self.config.elevated_user_id.clone();
|
||||||
|
let password = self.config.elevated_user_pw.clone();
|
||||||
|
|
||||||
|
self.ad_bind(&id, &password).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (search_result, _) = self
|
||||||
|
.connection
|
||||||
|
.search(
|
||||||
|
&self.config.user_search_base,
|
||||||
|
Scope::Subtree,
|
||||||
|
&format!("(&(ObjectCategory=Person))"),
|
||||||
|
vec!["sAMAccountName", "givenName", "sn", "mail"],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::InternalError(format!("LDAP search error: {e}")))?
|
||||||
|
.success()
|
||||||
|
.map_err(|e| ApiError::InternalError(format!("LDAP search error: {e}")))?;
|
||||||
|
|
||||||
|
let result = search_result
|
||||||
|
.into_iter()
|
||||||
|
.map(|entry| {
|
||||||
|
let user_info = SearchEntry::construct(entry).attrs;
|
||||||
|
|
||||||
|
ActiveDirectoryUser {
|
||||||
|
name: user_info
|
||||||
|
.get("givenName")
|
||||||
|
.map(|v| v.first().cloned())
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_default(),
|
||||||
|
surname: user_info
|
||||||
|
.get("sn")
|
||||||
|
.map(|v| v.first().cloned())
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_default(),
|
||||||
|
email: user_info
|
||||||
|
.get("mail")
|
||||||
|
.map(|v| v.first().cloned())
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_default(),
|
||||||
|
id: user_info
|
||||||
|
.get("sAMAccountName")
|
||||||
|
.map(|v| v.first().cloned())
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_default(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use ldap3::{exop::WhoAmI, LdapConnAsync, LdapConnSettings, Scope, SearchEntry};
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_ldap() {
|
||||||
|
let (connection, mut ldap) = LdapConnAsync::with_settings(
|
||||||
|
LdapConnSettings::new().set_no_tls_verify(true),
|
||||||
|
"ldaps://localhost:636",
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
ldap3::drive!(connection);
|
||||||
|
|
||||||
|
// bind to server
|
||||||
|
let res = ldap
|
||||||
|
// .simple_bind("cn=read-only-admin,dc=example,dc=com", "password")
|
||||||
|
.simple_bind(
|
||||||
|
// "CN=Abel Austin,OU=Accounting,OU=Mylab Users,DC=mylab,DC=local",
|
||||||
|
// "MYLAB\\A0H67123",
|
||||||
|
// "cn=read-only-admin,dc=example,dc=com",
|
||||||
|
// "DEV-AD\\Administrator",
|
||||||
|
// "admin123!",
|
||||||
|
"DEV-AD\\einstein",
|
||||||
|
"einstein",
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
println!("{res:?}");
|
||||||
|
|
||||||
|
let (res, re) = ldap
|
||||||
|
.search(
|
||||||
|
// "CN=Abel Austin,OU=Accounting,OU=Mylab Users,DC=mylab,DC=local",
|
||||||
|
"OU=DevUsers,DC=example,DC=org",
|
||||||
|
Scope::Subtree,
|
||||||
|
"(objectClass=*)",
|
||||||
|
// "(&(ObjectCategory=Person)(sAMAccountName=A0H67123))",
|
||||||
|
vec!["givenName", "sn"],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.success()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
for entry in res {
|
||||||
|
println!("{:?}", SearchEntry::construct(entry));
|
||||||
|
}
|
||||||
|
|
||||||
|
let test = ldap.extended(WhoAmI).await.unwrap().0;
|
||||||
|
println!("{test:?}");
|
||||||
|
// let whoami: ldap3::exop::WhoAmIResp = test.parse();
|
||||||
|
// println!("{whoami:?}");
|
||||||
|
|
||||||
|
let _ = ldap.unbind().await;
|
||||||
|
}
|
||||||
|
}
|
70
src/api/backend/mod.rs
Normal file
70
src/api/backend/mod.rs
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
use axum_jwt_login::{AuthBackend, UserPermissions};
|
||||||
|
use ldap::LDAPBackend;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::routes::users::{models::UserStatus, sql::get_users},
|
||||||
|
config::Configuration,
|
||||||
|
errors::ApiError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::routes::{auth::models::Credentials, users::models::User};
|
||||||
|
|
||||||
|
pub mod ldap;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ApiBackend {
|
||||||
|
pool: PgPool,
|
||||||
|
config: Configuration,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiBackend {
|
||||||
|
pub fn new(pool: PgPool, config: &Configuration) -> Self {
|
||||||
|
Self {
|
||||||
|
pool,
|
||||||
|
config: config.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pool(&self) -> &PgPool {
|
||||||
|
&self.pool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn config(&self) -> &Configuration {
|
||||||
|
&self.config
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuthBackend<User> for ApiBackend {
|
||||||
|
type Credentials = Credentials;
|
||||||
|
type Error = ApiError;
|
||||||
|
|
||||||
|
async fn authenticate(
|
||||||
|
&self,
|
||||||
|
Credentials { id, password }: Self::Credentials,
|
||||||
|
) -> Result<Option<User>, Self::Error> {
|
||||||
|
// get user from Database
|
||||||
|
let user = get_users(&self.pool, Some(UserStatus::Active), Some(id)).await?;
|
||||||
|
let user = user.first().ok_or(ApiError::InvalidCredentials)?;
|
||||||
|
|
||||||
|
// authenticate user
|
||||||
|
if user.active_directory_auth {
|
||||||
|
// authenticate against LDAP (AD) server
|
||||||
|
// --
|
||||||
|
let mut ldap = LDAPBackend::from_config(&self.config).await?;
|
||||||
|
ldap.ad_bind(&user.id(), &password).await?;
|
||||||
|
|
||||||
|
// update user information
|
||||||
|
let details = ldap.get_user_detail(&user.id()).await?;
|
||||||
|
user.update_with_ad_details(&self.pool, details).await?;
|
||||||
|
|
||||||
|
// terminate connection
|
||||||
|
ldap.unbind().await;
|
||||||
|
} else {
|
||||||
|
argon2::verify_encoded(&user.password, password.as_bytes())
|
||||||
|
.map_err(|_| ApiError::InvalidCredentials)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(user.clone()))
|
||||||
|
}
|
||||||
|
}
|
41
src/api/description.rs
Normal file
41
src/api/description.rs
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
use utoipa::{
|
||||||
|
openapi::security::{ApiKey, ApiKeyValue, HttpAuthScheme, HttpBuilder, SecurityScheme},
|
||||||
|
Modify, OpenApi,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const AUTH_TAG: &str = "Authentication";
|
||||||
|
pub const USERS_TAG: &str = "Users";
|
||||||
|
pub const ORDER_TAG: &str = "order";
|
||||||
|
pub const API_KEY_TAG: &str = "API Keys";
|
||||||
|
|
||||||
|
#[derive(OpenApi)]
|
||||||
|
#[openapi(
|
||||||
|
modifiers(&SecurityAddon),
|
||||||
|
tags(
|
||||||
|
(name = AUTH_TAG, description = "API Authentication endpoints"),
|
||||||
|
(name = ORDER_TAG, description = "Order API endpoints")
|
||||||
|
),
|
||||||
|
)]
|
||||||
|
pub struct ApiDocumentation;
|
||||||
|
|
||||||
|
struct SecurityAddon;
|
||||||
|
|
||||||
|
impl Modify for SecurityAddon {
|
||||||
|
fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) {
|
||||||
|
if let Some(components) = openapi.components.as_mut() {
|
||||||
|
components.add_security_scheme(
|
||||||
|
"api_key",
|
||||||
|
SecurityScheme::ApiKey(ApiKey::Header(ApiKeyValue::new("x-api-key"))),
|
||||||
|
);
|
||||||
|
components.add_security_scheme(
|
||||||
|
"user_auth",
|
||||||
|
SecurityScheme::Http(
|
||||||
|
HttpBuilder::new()
|
||||||
|
.scheme(HttpAuthScheme::Bearer)
|
||||||
|
.bearer_format("JWT")
|
||||||
|
.build(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
202
src/api/mod.rs
Normal file
202
src/api/mod.rs
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
use std::{future::IntoFuture, net::Ipv4Addr, time::Duration};
|
||||||
|
|
||||||
|
use axum::http::HeaderName;
|
||||||
|
use axum_jwt_login::{AuthSessionWithApiKeyBuilder, JWTKeyPair};
|
||||||
|
use backend::ApiBackend;
|
||||||
|
use error_stack::{Report, ResultExt};
|
||||||
|
use routes::api_keys::models::ApiKey;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use tokio::net::TcpListener;
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
use tracing::{info, level_filters::LevelFilter, Level};
|
||||||
|
use tracing_rolling_file::{RollingConditionBase, RollingFileAppenderBase};
|
||||||
|
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
|
use crate::APP_NAME;
|
||||||
|
use crate::{config::Configuration, errors::AppError, ROOT_PATH};
|
||||||
|
|
||||||
|
pub mod backend;
|
||||||
|
mod description;
|
||||||
|
pub mod routes;
|
||||||
|
|
||||||
|
pub async fn start(
|
||||||
|
config: &Configuration,
|
||||||
|
cancellation_token: CancellationToken,
|
||||||
|
) -> Result<(), Report<AppError>> {
|
||||||
|
// Set Report Colour Mode to NONE
|
||||||
|
Report::set_color_mode(error_stack::fmt::ColorMode::None);
|
||||||
|
|
||||||
|
let level_filter = LevelFilter::from_level(match config.debug {
|
||||||
|
true => Level::DEBUG,
|
||||||
|
false => Level::INFO,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Prepare logging to file
|
||||||
|
let file_appender = RollingFileAppenderBase::new(
|
||||||
|
ROOT_PATH.with_file_name(format!("{}.log", env!("CARGO_PKG_NAME"))),
|
||||||
|
RollingConditionBase::new().max_size(1024 * 1024 * 2),
|
||||||
|
5,
|
||||||
|
)
|
||||||
|
.change_context(AppError)?;
|
||||||
|
let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);
|
||||||
|
|
||||||
|
// prepare initialization of logging
|
||||||
|
let log_layers = tracing_subscriber::registry().with(level_filter).with(
|
||||||
|
fmt::Layer::default()
|
||||||
|
.with_target(false)
|
||||||
|
.with_ansi(false)
|
||||||
|
.with_writer(non_blocking),
|
||||||
|
);
|
||||||
|
|
||||||
|
// also log to console in debug mode
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
let stdout_log = tracing_subscriber::fmt::layer().pretty();
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
let log_layers = log_layers.with(stdout_log);
|
||||||
|
|
||||||
|
// Initialize logging
|
||||||
|
log_layers.init();
|
||||||
|
|
||||||
|
// Initialize database pool
|
||||||
|
let pool = PgPool::connect(&config.database_url)
|
||||||
|
.await
|
||||||
|
.change_context(AppError)?;
|
||||||
|
// Get API Keys from database
|
||||||
|
let api_keys = ApiKey::get_all_with_secret_attached(&pool, config)
|
||||||
|
.await
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
// Initialize API Backend
|
||||||
|
let backend = ApiBackend::new(pool.clone(), config);
|
||||||
|
// Create Session storage for API
|
||||||
|
let session = AuthSessionWithApiKeyBuilder::new()
|
||||||
|
.backend(backend)
|
||||||
|
.api_header_name(HeaderName::from_static("x-api-key"))
|
||||||
|
.initial_api_keys(api_keys)
|
||||||
|
.jwt_key_pair(JWTKeyPair::from_secret(&config.token_secret))
|
||||||
|
.session_length(Duration::from_secs(config.session_length))
|
||||||
|
.build()
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
// Create routes
|
||||||
|
let router = routes::create_routes(session);
|
||||||
|
|
||||||
|
// Define listener
|
||||||
|
let port = config.port;
|
||||||
|
let listener = TcpListener::bind((Ipv4Addr::LOCALHOST, port))
|
||||||
|
.await
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
// Serve API
|
||||||
|
let server = axum::serve(listener, router.into_make_service())
|
||||||
|
.with_graceful_shutdown(shutdown_signal(cancellation_token));
|
||||||
|
|
||||||
|
tokio::spawn(server.into_future());
|
||||||
|
|
||||||
|
info!("{APP_NAME} up and running on port '{port}'.");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn shutdown_signal(stop_signal: CancellationToken) {
|
||||||
|
// Wait for shutdown event.
|
||||||
|
stop_signal.cancelled().await;
|
||||||
|
|
||||||
|
info!("Shutting down {APP_NAME}...");
|
||||||
|
}
|
||||||
|
|
||||||
|
// // Set Report Colour Mode to NONE
|
||||||
|
// Report::set_color_mode(error_stack::fmt::ColorMode::None);
|
||||||
|
|
||||||
|
// // Enable the `INFO` level for anything in `darl` as default
|
||||||
|
// let level_filter =
|
||||||
|
// filter::Targets::new().with_target(env!("CARGO_PKG_NAME"), DEFAULT_LOG_LEVEL_FILTER);
|
||||||
|
// let (level_filter, tracing_target_reload_handle) = reload::Layer::new(level_filter);
|
||||||
|
|
||||||
|
// // Prepare logging to file
|
||||||
|
// let file_appender = RollingFileAppenderBase::new(
|
||||||
|
// ROOT_PATH.with_file_name(format!("{}.log", env!("CARGO_PKG_NAME"))),
|
||||||
|
// RollingConditionBase::new().max_size(1024 * 1024 * 2),
|
||||||
|
// 5,
|
||||||
|
// )
|
||||||
|
// .change_context(ServiceError::Starting)?;
|
||||||
|
// let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);
|
||||||
|
|
||||||
|
// // Prepare live logging to config webserver
|
||||||
|
// let (log_receiver, live_log_layer) = LiveLogLayer::new();
|
||||||
|
|
||||||
|
// // prepare initialization of logging
|
||||||
|
// let log_layers = tracing_subscriber::registry()
|
||||||
|
// .with(level_filter)
|
||||||
|
// // .with(filter::LevelFilter::DEBUG)
|
||||||
|
// .with(live_log_layer)
|
||||||
|
// .with(
|
||||||
|
// fmt::Layer::default()
|
||||||
|
// .with_target(false)
|
||||||
|
// .with_ansi(false)
|
||||||
|
// .with_writer(non_blocking),
|
||||||
|
// );
|
||||||
|
|
||||||
|
// // also log to console in debug mode
|
||||||
|
// #[cfg(debug_assertions)]
|
||||||
|
// let stdout_log = tracing_subscriber::fmt::layer().pretty();
|
||||||
|
// #[cfg(debug_assertions)]
|
||||||
|
// let log_layers = log_layers.with(stdout_log);
|
||||||
|
|
||||||
|
// // Initialize logging
|
||||||
|
// log_layers.init();
|
||||||
|
|
||||||
|
// // Initialize local database
|
||||||
|
// let local_database = LocalDatabase::init()
|
||||||
|
// .await
|
||||||
|
// .change_context(ServiceError::Starting)?;
|
||||||
|
|
||||||
|
// // Load configuration from config files
|
||||||
|
// let (config, external_database) = Configuration::initialize(&local_database)
|
||||||
|
// .await
|
||||||
|
// .change_context(ServiceError::Starting)?;
|
||||||
|
// let standalone_external_db = StandaloneExternalDatabase::from(&external_database);
|
||||||
|
|
||||||
|
// // change log level to configured value
|
||||||
|
// if let Err(error) = tracing_target_reload_handle.modify(|filter| {
|
||||||
|
// *filter = filter::Targets::new().with_target(env!("CARGO_PKG_NAME"), &config.log_level)
|
||||||
|
// }) {
|
||||||
|
// error!("{error}");
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // prepare and start connections
|
||||||
|
// let connections = MachineConnections::init(&local_database, &standalone_external_db, &config)
|
||||||
|
// .await
|
||||||
|
// .change_context(ServiceError::Starting)?;
|
||||||
|
|
||||||
|
// // start config server
|
||||||
|
// ConfigServer::start(
|
||||||
|
// config.extended_config.webserver.config_server_port,
|
||||||
|
// config.opc_configuration.clone(),
|
||||||
|
// &local_database,
|
||||||
|
// standalone_external_db,
|
||||||
|
// connections,
|
||||||
|
// tracing_target_reload_handle,
|
||||||
|
// log_receiver,
|
||||||
|
// )
|
||||||
|
// .await;
|
||||||
|
|
||||||
|
// // start webserver
|
||||||
|
// WebServer::start(&config, &local_database).await;
|
||||||
|
|
||||||
|
// // initialize Logging to external database
|
||||||
|
// external_database.start_writer().await;
|
||||||
|
|
||||||
|
// info!("{APP_NAME} service is now running...");
|
||||||
|
|
||||||
|
// // block thread
|
||||||
|
// loop {
|
||||||
|
// // Poll shutdown event.
|
||||||
|
// if (stop_signal.recv().await).is_some() {
|
||||||
|
// // Break the loop either upon stop or channel disconnect
|
||||||
|
// info!("Shutting down {APP_NAME} service");
|
||||||
|
// break;
|
||||||
|
// };
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Ok(())
|
145
src/api/routes/api_keys/mod.rs
Normal file
145
src/api/routes/api_keys/mod.rs
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
use axum::{debug_handler, extract::Query, Extension, Json};
|
||||||
|
use models::ApiKey;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use utoipa::IntoParams;
|
||||||
|
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::{
|
||||||
|
backend::ApiBackend,
|
||||||
|
description::API_KEY_TAG,
|
||||||
|
routes::{
|
||||||
|
users::permissions::{Permission, PermissionDetail},
|
||||||
|
User,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
errors::ApiError,
|
||||||
|
permission_required,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::AuthBackendType;
|
||||||
|
|
||||||
|
pub mod models;
|
||||||
|
pub mod sql;
|
||||||
|
|
||||||
|
// expose the OpenAPI to parent module
|
||||||
|
pub fn router() -> OpenApiRouter {
|
||||||
|
let read = OpenApiRouter::new()
|
||||||
|
.routes(routes!(get_api_keys))
|
||||||
|
.route_layer(permission_required!(Permission::Read(
|
||||||
|
PermissionDetail::APIKeys
|
||||||
|
)));
|
||||||
|
let write = OpenApiRouter::new()
|
||||||
|
.routes(routes!(create_api_key, update_api_key, delete_api_key))
|
||||||
|
.route_layer(permission_required!(Permission::Write(
|
||||||
|
PermissionDetail::APIKeys
|
||||||
|
)));
|
||||||
|
|
||||||
|
OpenApiRouter::new().merge(read).merge(write)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/apikeys",
|
||||||
|
summary = "Get all API Keys",
|
||||||
|
description = "Get a list of all configured API Keys.",
|
||||||
|
responses(
|
||||||
|
(status = OK, body = Vec<ApiKey>, description = "List of API Keys"),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["read:apikeys",]),
|
||||||
|
),
|
||||||
|
tag = API_KEY_TAG)]
|
||||||
|
pub async fn get_api_keys(
|
||||||
|
Extension(backend): Extension<ApiBackend>,
|
||||||
|
) -> Result<Json<Vec<ApiKey>>, ApiError> {
|
||||||
|
Ok(Json(sql::get_api_keys(backend.pool()).await?))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/apikeys",
|
||||||
|
summary = "Create new API Key",
|
||||||
|
description = "Create a new API Key.",
|
||||||
|
request_body(content = ApiKey, description = "API Key details", content_type = "application/json"),
|
||||||
|
responses(
|
||||||
|
(status = OK, description = "API Key successfully created (API Key Secret in Body)", body = String),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["write:apikeys",]),
|
||||||
|
),
|
||||||
|
tag = API_KEY_TAG)]
|
||||||
|
pub async fn create_api_key(
|
||||||
|
auth_session: AuthBackendType,
|
||||||
|
Json(api_key): Json<ApiKey>,
|
||||||
|
) -> Result<String, ApiError> {
|
||||||
|
let backend = auth_session.backend();
|
||||||
|
|
||||||
|
// create new API key
|
||||||
|
let (key_secret, key) = ApiKey::create(
|
||||||
|
&api_key.name,
|
||||||
|
api_key.auth_required,
|
||||||
|
api_key.permissions.0,
|
||||||
|
backend.config(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// insert API Key into database
|
||||||
|
sql::create_api_key(backend.pool(), &key).await?;
|
||||||
|
|
||||||
|
// add API Key to session
|
||||||
|
auth_session.add_api_key(key).await;
|
||||||
|
|
||||||
|
// Return key secret in response
|
||||||
|
Ok(key_secret)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
put,
|
||||||
|
path = "/apikeys",
|
||||||
|
summary = "Update API Key",
|
||||||
|
description = "Update an API Key.",
|
||||||
|
request_body(content = ApiKey, description = "API Key details", content_type = "application/json"),
|
||||||
|
responses(
|
||||||
|
(status = OK, description = "API Key successfully updated"),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["write:apikeys",]),
|
||||||
|
),
|
||||||
|
tag = API_KEY_TAG)]
|
||||||
|
pub async fn update_api_key(
|
||||||
|
Extension(backend): Extension<ApiBackend>,
|
||||||
|
Json(api_key): Json<ApiKey>,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
sql::update_api_key(backend.pool(), &api_key).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, IntoParams)]
|
||||||
|
pub struct DeleteQueryParameters {
|
||||||
|
key_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/apikeys",
|
||||||
|
summary = "Delete API Key",
|
||||||
|
description = "Delete an API Key.",
|
||||||
|
params(DeleteQueryParameters),
|
||||||
|
responses(
|
||||||
|
(status = OK, description = "API Key successfully deleted"),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["write:apikeys",]),
|
||||||
|
),
|
||||||
|
tag = API_KEY_TAG)]
|
||||||
|
pub async fn delete_api_key(
|
||||||
|
Extension(backend): Extension<ApiBackend>,
|
||||||
|
Query(params): Query<DeleteQueryParameters>,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
sql::delete_api_key(backend.pool(), ¶ms.key_id).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
143
src/api/routes/api_keys/models.rs
Normal file
143
src/api/routes/api_keys/models.rs
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use axum_jwt_login::ApiKey as ApiKeyTrait;
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use hmac::{Hmac, Mac};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sha2::Sha512;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use ts_rs::TS;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::routes::users::permissions::{Permission, PermissionContainer},
|
||||||
|
config::Configuration,
|
||||||
|
errors::ApiError,
|
||||||
|
utils::create_random,
|
||||||
|
};
|
||||||
|
|
||||||
|
const KEY_LENGTH: usize = 40;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||||
|
#[ts(export)]
|
||||||
|
pub struct ApiKey {
|
||||||
|
pub id: String,
|
||||||
|
#[serde(skip)] // Don't leak Hash
|
||||||
|
pub hash: Vec<u8>,
|
||||||
|
pub name: String,
|
||||||
|
pub auth_required: bool,
|
||||||
|
#[schema(inline)]
|
||||||
|
pub permissions: PermissionContainer,
|
||||||
|
#[serde(skip)] // Don't leak secret
|
||||||
|
pub api_config_secret: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
#[schema(value_type = String, read_only)]
|
||||||
|
pub creation_date: Option<NaiveDateTime>,
|
||||||
|
#[serde(default)]
|
||||||
|
#[schema(value_type = String, read_only)]
|
||||||
|
pub last_change: Option<NaiveDateTime>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiKey {
|
||||||
|
pub async fn get_all_with_secret_attached(
|
||||||
|
pool: &PgPool,
|
||||||
|
config: &Configuration,
|
||||||
|
) -> Result<Vec<Self>, ApiError> {
|
||||||
|
let mut keys = super::sql::get_api_keys(pool).await?;
|
||||||
|
|
||||||
|
for key in keys.iter_mut() {
|
||||||
|
key.api_config_secret = Some(config.token_secret.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(keys)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create(
|
||||||
|
name: &str,
|
||||||
|
requires_auth: bool,
|
||||||
|
permissions: HashSet<Permission>,
|
||||||
|
config: &Configuration,
|
||||||
|
) -> Result<(String, Self), ApiError> {
|
||||||
|
// create uuid
|
||||||
|
let uuid = Uuid::new_v4().simple();
|
||||||
|
// create API Key secret part
|
||||||
|
let key: String = create_random(KEY_LENGTH);
|
||||||
|
// calculate API Key signature
|
||||||
|
let mut mac = Hmac::<Sha512>::new_from_slice(config.token_secret.as_bytes())?;
|
||||||
|
mac.update(key.as_bytes());
|
||||||
|
let signature = mac.finalize();
|
||||||
|
|
||||||
|
// Return Api Key Secret and Api Key
|
||||||
|
Ok((
|
||||||
|
key,
|
||||||
|
Self {
|
||||||
|
name: name.to_string(),
|
||||||
|
auth_required: requires_auth,
|
||||||
|
id: uuid.to_string(),
|
||||||
|
hash: signature.into_bytes().as_slice().to_vec(),
|
||||||
|
permissions: PermissionContainer(permissions),
|
||||||
|
api_config_secret: Some(config.token_secret.clone()),
|
||||||
|
creation_date: None,
|
||||||
|
last_change: None,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate(&self, key_secret_part: &str) -> Result<(), ApiError> {
|
||||||
|
// calculate API Key signature
|
||||||
|
let mut mac = Hmac::<Sha512>::new_from_slice(
|
||||||
|
self.api_config_secret
|
||||||
|
.clone()
|
||||||
|
.ok_or(ApiError::InternalError(
|
||||||
|
"Missing API Config Secret".to_string(),
|
||||||
|
))?
|
||||||
|
.as_bytes(),
|
||||||
|
)?;
|
||||||
|
mac.update(key_secret_part.as_bytes());
|
||||||
|
|
||||||
|
match mac.verify_slice(&self.hash) {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(_e) => Err(ApiError::AccessDenied),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiKeyTrait for ApiKey {
|
||||||
|
type Permission = Permission;
|
||||||
|
|
||||||
|
fn id(&self) -> String {
|
||||||
|
self.id.clone()
|
||||||
|
}
|
||||||
|
fn validate(&self, key_secret_part: &str) -> bool {
|
||||||
|
self.validate(key_secret_part).is_ok()
|
||||||
|
}
|
||||||
|
fn requires_user_auth(&self) -> bool {
|
||||||
|
self.auth_required
|
||||||
|
}
|
||||||
|
fn get_permissions(&self) -> HashSet<Self::Permission> {
|
||||||
|
self.permissions.0.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use crate::config::Configuration;
|
||||||
|
|
||||||
|
use super::ApiKey;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_api_key_creation_and_validation() {
|
||||||
|
let config = Configuration {
|
||||||
|
token_secret: "abcdefghijk".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let (key, api_key) =
|
||||||
|
ApiKey::create("name", true, HashSet::new(), &config).expect("Error creating API Key");
|
||||||
|
println!("{key}, {api_key:?}");
|
||||||
|
assert_eq!(Ok(()), api_key.validate(&key));
|
||||||
|
assert_ne!(Ok(()), api_key.validate("124"));
|
||||||
|
}
|
||||||
|
}
|
135
src/api/routes/api_keys/sql.rs
Normal file
135
src/api/routes/api_keys/sql.rs
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
use sqlx::{PgPool, Postgres, Transaction};
|
||||||
|
|
||||||
|
use crate::errors::ApiError;
|
||||||
|
|
||||||
|
use super::models::ApiKey;
|
||||||
|
|
||||||
|
pub async fn get_api_keys(pool: &PgPool) -> Result<Vec<ApiKey>, ApiError> {
|
||||||
|
Ok(sqlx::query_as!(
|
||||||
|
ApiKey,
|
||||||
|
r#"SELECT
|
||||||
|
APIKEYS."KeyID" as id,
|
||||||
|
APIKEYS."Name" as name,
|
||||||
|
APIKEYS."Hash" as hash,
|
||||||
|
APIKEYS."UserAuthRequired" as auth_required,
|
||||||
|
APIKEYS."CreationDate" as "creation_date?",
|
||||||
|
APIKEYS."LastChanged" as "last_change?",
|
||||||
|
NULL as api_config_secret,
|
||||||
|
array_remove(ARRAY_AGG(APIKEY_PERMISSIONS."Permission"), NULL) AS permissions
|
||||||
|
FROM
|
||||||
|
apikeys
|
||||||
|
LEFT JOIN APIKEY_PERMISSIONS ON APIKEY_PERMISSIONS."KeyID" = APIKEYS."KeyID"
|
||||||
|
GROUP BY APIKEYS."KeyID""#
|
||||||
|
)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_api_key(pool: &PgPool, api_key: &ApiKey) -> Result<(), ApiError> {
|
||||||
|
// start transaction
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
// create api_key
|
||||||
|
sqlx::query!(
|
||||||
|
r#"INSERT INTO apikeys
|
||||||
|
("KeyID", "Name", "UserAuthRequired", "Hash")
|
||||||
|
VALUES ($1, $2, $3, $4)"#,
|
||||||
|
api_key.id,
|
||||||
|
api_key.name,
|
||||||
|
api_key.auth_required,
|
||||||
|
api_key.hash,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// update permissions
|
||||||
|
update_permissions(&mut transaction, &api_key).await?;
|
||||||
|
|
||||||
|
// commit transaction
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_api_key(pool: &PgPool, api_key: &ApiKey) -> Result<(), ApiError> {
|
||||||
|
// start transaction
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
// create api_key
|
||||||
|
sqlx::query!(
|
||||||
|
r#"UPDATE apikeys SET
|
||||||
|
"Name" = $2,
|
||||||
|
"UserAuthRequired" = $3
|
||||||
|
WHERE "KeyID" = $1"#,
|
||||||
|
api_key.id,
|
||||||
|
api_key.name,
|
||||||
|
api_key.auth_required,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// update permissions
|
||||||
|
update_permissions(&mut transaction, &api_key).await?;
|
||||||
|
|
||||||
|
// commit transaction
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_permissions(
|
||||||
|
transaction: &mut Transaction<'static, Postgres>,
|
||||||
|
api_key: &ApiKey,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
// delete all permissions for API Key
|
||||||
|
sqlx::query!(
|
||||||
|
r#"DELETE FROM apikey_permissions
|
||||||
|
WHERE "KeyID" = $1"#,
|
||||||
|
api_key.id,
|
||||||
|
)
|
||||||
|
.execute(&mut **transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// insert permissions
|
||||||
|
for permission in &api_key.permissions.0 {
|
||||||
|
sqlx::query!(
|
||||||
|
r#"INSERT INTO apikey_permissions
|
||||||
|
("KeyID", "Permission")
|
||||||
|
VALUES ($1, $2)"#,
|
||||||
|
api_key.id,
|
||||||
|
permission.to_string()
|
||||||
|
)
|
||||||
|
.execute(&mut **transaction)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_api_key(pool: &PgPool, key_id: &str) -> Result<(), ApiError> {
|
||||||
|
// start transaction
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
// Delete permissions
|
||||||
|
sqlx::query!(
|
||||||
|
r#"DELETE FROM apikey_permissions
|
||||||
|
WHERE "KeyID" = $1"#,
|
||||||
|
key_id,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Delete API Key
|
||||||
|
sqlx::query!(
|
||||||
|
r#"DELETE FROM apikeys
|
||||||
|
WHERE "KeyID" = $1"#,
|
||||||
|
key_id,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// commit transaction
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
62
src/api/routes/auth/mod.rs
Normal file
62
src/api/routes/auth/mod.rs
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
use axum::{debug_handler, Json};
|
||||||
|
use models::Credentials;
|
||||||
|
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||||
|
|
||||||
|
use crate::{api::description::AUTH_TAG, errors::ApiError};
|
||||||
|
|
||||||
|
use super::AuthBackendType;
|
||||||
|
|
||||||
|
pub mod models;
|
||||||
|
|
||||||
|
// expose the OpenAPI to parent module
|
||||||
|
pub fn router() -> OpenApiRouter {
|
||||||
|
OpenApiRouter::new()
|
||||||
|
.routes(routes!(authorize))
|
||||||
|
.routes(routes!(logout))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/login",
|
||||||
|
summary = "Authenticate as user",
|
||||||
|
description = "Authenticate as user and receive a JWT auth token.",
|
||||||
|
request_body(content = Credentials, description = "User credentials", content_type = "application/json"),
|
||||||
|
responses(
|
||||||
|
(status = OK, body = String, description = "Successfully logged in (JWT Token in body)"),
|
||||||
|
(status = UNAUTHORIZED, description = "Invalid credentials or unauthorized user")
|
||||||
|
),
|
||||||
|
tag = AUTH_TAG)]
|
||||||
|
pub async fn authorize(
|
||||||
|
mut auth_session: AuthBackendType,
|
||||||
|
Json(credentials): Json<Credentials>,
|
||||||
|
) -> Result<String, ApiError> {
|
||||||
|
let token = match auth_session.authenticate(credentials).await {
|
||||||
|
Ok(Some(_user)) => {
|
||||||
|
if let Some(token) = auth_session.get_auth_token() {
|
||||||
|
token
|
||||||
|
} else {
|
||||||
|
return Err(ApiError::InvalidCredentials);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None) => return Err(ApiError::InvalidCredentials),
|
||||||
|
Err(_) => return Err(ApiError::InvalidCredentials),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(token)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/logout",
|
||||||
|
summary = "Logout",
|
||||||
|
description = "Log the currently logged in user out.",
|
||||||
|
responses(
|
||||||
|
(status = OK, description = "Logout successful")
|
||||||
|
),
|
||||||
|
tag = AUTH_TAG)]
|
||||||
|
pub async fn logout(mut auth_session: AuthBackendType) -> Result<(), ApiError> {
|
||||||
|
auth_session.logout().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
8
src/api/routes/auth/models.rs
Normal file
8
src/api/routes/auth/models.rs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
use serde::Deserialize;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, ToSchema)]
|
||||||
|
pub struct Credentials {
|
||||||
|
pub id: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
64
src/api/routes/mod.rs
Normal file
64
src/api/routes/mod.rs
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
pub mod api_keys;
|
||||||
|
pub mod auth;
|
||||||
|
pub mod users;
|
||||||
|
|
||||||
|
use api_keys::models::ApiKey as APIKey;
|
||||||
|
use axum::{Extension, Router};
|
||||||
|
use axum_jwt_login::AuthSessionWithApiKey;
|
||||||
|
use users::models::User;
|
||||||
|
use utoipa::OpenApi;
|
||||||
|
use utoipa_axum::router::OpenApiRouter;
|
||||||
|
use utoipa_redoc::{Redoc, Servable};
|
||||||
|
use utoipa_scalar::{Scalar, Servable as ScalarServable};
|
||||||
|
use utoipa_swagger_ui::SwaggerUi;
|
||||||
|
|
||||||
|
use super::{backend::ApiBackend, description::ApiDocumentation};
|
||||||
|
|
||||||
|
const API_BASE: &str = "/api";
|
||||||
|
|
||||||
|
type AuthBackendType = AuthSessionWithApiKey<User, ApiBackend, APIKey>;
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! login_required {
|
||||||
|
() => {
|
||||||
|
axum_jwt_login::login_required!(User, ApiBackend, APIKey)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! permission_required {
|
||||||
|
($($perm:expr),+ $(,)?) => {
|
||||||
|
axum_jwt_login::permission_required!(
|
||||||
|
User,
|
||||||
|
ApiBackend,
|
||||||
|
crate::api::routes::APIKey,
|
||||||
|
$($perm),+
|
||||||
|
)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_routes(session: AuthBackendType) -> Router {
|
||||||
|
let backend = session.backend();
|
||||||
|
let (router, api) = OpenApiRouter::with_openapi(ApiDocumentation::openapi())
|
||||||
|
// .routes(routes!(health))
|
||||||
|
.nest(API_BASE, auth::router())
|
||||||
|
.nest(API_BASE, users::router())
|
||||||
|
.nest(API_BASE, api_keys::router())
|
||||||
|
// .nest(
|
||||||
|
// "/api/order",
|
||||||
|
// // order::router().route_layer(crate::login_required!(AuthenticationBackend<ApiKey>)),
|
||||||
|
// order::router().route_layer(login_required!(User, auth_backend::AuthenticationBackend)),
|
||||||
|
// )
|
||||||
|
// .routes(routes!(
|
||||||
|
// inner::secret_handlers::get_secret,
|
||||||
|
// inner::secret_handlers::post_secret
|
||||||
|
// ))
|
||||||
|
// .nest("/api", users::router())
|
||||||
|
// // .layer(auth_layer)
|
||||||
|
.layer(session.into_layer())
|
||||||
|
.layer(Extension(backend))
|
||||||
|
.split_for_parts();
|
||||||
|
|
||||||
|
router
|
||||||
|
.merge(SwaggerUi::new("/swagger-ui").url("/apidoc/openapi.json", api.clone()))
|
||||||
|
.merge(Redoc::with_url("/redoc", api.clone()))
|
||||||
|
.merge(Scalar::with_url("/scalar", api))
|
||||||
|
}
|
159
src/api/routes/users/mod.rs
Normal file
159
src/api/routes/users/mod.rs
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
use axum::{debug_handler, Extension, Json};
|
||||||
|
use axum_jwt_login::UserPermissions;
|
||||||
|
use models::User;
|
||||||
|
use permissions::{Permission, PermissionDetail};
|
||||||
|
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::{
|
||||||
|
backend::{
|
||||||
|
ldap::{ActiveDirectoryUser, LDAPBackend},
|
||||||
|
ApiBackend,
|
||||||
|
},
|
||||||
|
description::USERS_TAG,
|
||||||
|
routes::auth::models::Credentials,
|
||||||
|
},
|
||||||
|
errors::ApiError,
|
||||||
|
permission_required,
|
||||||
|
utils::create_random,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub mod models;
|
||||||
|
pub mod permissions;
|
||||||
|
pub mod sql;
|
||||||
|
|
||||||
|
// expose the OpenAPI to parent module
|
||||||
|
pub fn router() -> OpenApiRouter {
|
||||||
|
let read = OpenApiRouter::new()
|
||||||
|
.routes(routes!(get_users))
|
||||||
|
.route_layer(permission_required!(Permission::Read(
|
||||||
|
PermissionDetail::Users
|
||||||
|
)));
|
||||||
|
let write = OpenApiRouter::new()
|
||||||
|
.routes(routes!(create_user, update_user))
|
||||||
|
.routes(routes!(get_ad_users))
|
||||||
|
.route_layer(permission_required!(Permission::Write(
|
||||||
|
PermissionDetail::Users
|
||||||
|
)));
|
||||||
|
|
||||||
|
OpenApiRouter::new().merge(read).merge(write)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/users",
|
||||||
|
summary = "Get all Users",
|
||||||
|
description = "Get a list of all users.",
|
||||||
|
responses(
|
||||||
|
(status = OK, body = Vec<User>, description = "List of users"),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["read:users",]),
|
||||||
|
),
|
||||||
|
tag = USERS_TAG)]
|
||||||
|
pub async fn get_users(
|
||||||
|
Extension(backend): Extension<ApiBackend>,
|
||||||
|
) -> Result<Json<Vec<User>>, ApiError> {
|
||||||
|
Ok(Json(sql::get_users(backend.pool(), None, None).await?))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
put,
|
||||||
|
path = "/users",
|
||||||
|
summary = "Change User details",
|
||||||
|
description = "Update user information / permissions / groups ",
|
||||||
|
request_body(content = User, description = "User details", content_type = "application/json"),
|
||||||
|
responses(
|
||||||
|
(status = OK, description = "User successfully updated"),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["write:users"]),
|
||||||
|
),
|
||||||
|
tag = USERS_TAG)]
|
||||||
|
pub async fn update_user(
|
||||||
|
Extension(backend): Extension<ApiBackend>,
|
||||||
|
Json(user): Json<User>,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
sql::update_user(backend.pool(), &user).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/users",
|
||||||
|
summary = "Create a new User",
|
||||||
|
description = "Creates a new user with the given information ",
|
||||||
|
request_body(content = User, description = "User details", content_type = "application/json"),
|
||||||
|
responses(
|
||||||
|
(status = OK, description = "User successfully created (Assigned Password in body)", body = String),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["write:users",]),
|
||||||
|
),
|
||||||
|
tag = USERS_TAG)]
|
||||||
|
pub async fn create_user(
|
||||||
|
Extension(backend): Extension<ApiBackend>,
|
||||||
|
Json(user): Json<User>,
|
||||||
|
) -> Result<String, ApiError> {
|
||||||
|
// create password if not Active Directory user
|
||||||
|
let (password, hash) = match user.active_directory_auth {
|
||||||
|
true => (String::new(), None),
|
||||||
|
false => {
|
||||||
|
let salt = create_random(20);
|
||||||
|
let argon_config = argon2::Config::default();
|
||||||
|
|
||||||
|
let password = create_random(10);
|
||||||
|
let password_hash =
|
||||||
|
argon2::hash_encoded(password.as_bytes(), salt.as_bytes(), &argon_config)?;
|
||||||
|
|
||||||
|
(password, Some(password_hash))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// create user
|
||||||
|
sql::create_new_user(backend.pool(), &user, hash).await?;
|
||||||
|
// send created password back to frontend
|
||||||
|
Ok(password)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[debug_handler]
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/users/available_ad_users",
|
||||||
|
summary = "Get Active Directory Users",
|
||||||
|
description = "Get all Available Users from the Active Directory that are not already registered with this API.",
|
||||||
|
responses(
|
||||||
|
(status = OK, body = Vec<ActiveDirectoryUser>, description = "List of AD users"),
|
||||||
|
),
|
||||||
|
security(
|
||||||
|
("user_auth" = ["write:users",]),
|
||||||
|
),
|
||||||
|
tag = USERS_TAG)]
|
||||||
|
pub async fn get_ad_users(
|
||||||
|
Extension(backend): Extension<ApiBackend>,
|
||||||
|
Json(credentials): Json<Option<Credentials>>,
|
||||||
|
) -> Result<Json<Vec<ActiveDirectoryUser>>, ApiError> {
|
||||||
|
let api_user_ids: Vec<String> = sql::get_users(backend.pool(), None, None)
|
||||||
|
.await?
|
||||||
|
.iter()
|
||||||
|
.map(|user| user.id())
|
||||||
|
.collect();
|
||||||
|
let mut ldap = LDAPBackend::from_config(backend.config()).await?;
|
||||||
|
// bind to AD user if credentials are given
|
||||||
|
if let Some(credentials) = credentials {
|
||||||
|
ldap.ad_bind(&credentials.id, &credentials.password).await?;
|
||||||
|
}
|
||||||
|
let ad_users = ldap
|
||||||
|
.get_ad_user_list()
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.filter(|entry| !api_user_ids.contains(&entry.id))
|
||||||
|
.collect();
|
||||||
|
// disconnect from AD server
|
||||||
|
ldap.unbind().await;
|
||||||
|
|
||||||
|
Ok(Json(ad_users))
|
||||||
|
}
|
118
src/api/routes/users/models.rs
Normal file
118
src/api/routes/users/models.rs
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
|
use axum_jwt_login::UserPermissions;
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use ts_rs::TS;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::errors::ApiError;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
permissions::{Permission, PermissionContainer},
|
||||||
|
sql,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, sqlx::Type, TS, ToSchema)]
|
||||||
|
#[repr(i16)]
|
||||||
|
pub enum UserStatus {
|
||||||
|
Deleted = -1,
|
||||||
|
Deactivated = 0,
|
||||||
|
Active = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||||
|
pub struct GroupContainer(pub HashSet<i32>);
|
||||||
|
|
||||||
|
impl From<Option<Vec<i32>>> for GroupContainer {
|
||||||
|
fn from(value: Option<Vec<i32>>) -> Self {
|
||||||
|
let set = match value {
|
||||||
|
Some(values) => HashSet::from_iter(values),
|
||||||
|
None => HashSet::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Self(set)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||||
|
#[ts(export)]
|
||||||
|
pub struct User {
|
||||||
|
pub user_id: String,
|
||||||
|
pub active_directory_auth: bool,
|
||||||
|
#[serde(skip)] // do not leak password hashes
|
||||||
|
pub password: String,
|
||||||
|
pub name: String,
|
||||||
|
pub surname: String,
|
||||||
|
pub email: String,
|
||||||
|
#[schema(inline)]
|
||||||
|
pub groups: GroupContainer,
|
||||||
|
#[schema(inline)]
|
||||||
|
pub group_permissions: PermissionContainer,
|
||||||
|
#[schema(inline)]
|
||||||
|
pub permissions: PermissionContainer,
|
||||||
|
pub status_flag: UserStatus,
|
||||||
|
#[serde(default)]
|
||||||
|
#[schema(value_type = String, read_only)]
|
||||||
|
pub creation_date: NaiveDateTime,
|
||||||
|
#[serde(default)]
|
||||||
|
#[schema(value_type = String, read_only)]
|
||||||
|
pub last_change: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserPermissions for User {
|
||||||
|
type Error = ApiError;
|
||||||
|
type Permission = Permission;
|
||||||
|
type Id = String;
|
||||||
|
|
||||||
|
fn id(&self) -> Self::Id {
|
||||||
|
self.user_id.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_user_permissions(&self) -> Result<HashSet<Self::Permission>, Self::Error> {
|
||||||
|
Ok(self.permissions.0.iter().cloned().collect())
|
||||||
|
}
|
||||||
|
fn get_group_permissions(&self) -> Result<HashSet<Self::Permission>, Self::Error> {
|
||||||
|
Ok(self.group_permissions.0.iter().cloned().collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl User {
|
||||||
|
pub async fn update_with_ad_details(
|
||||||
|
&self,
|
||||||
|
pool: &PgPool,
|
||||||
|
details: HashMap<String, Vec<String>>,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
let create_error = |key: &str| {
|
||||||
|
ApiError::InternalError(format!(
|
||||||
|
"Update User Information: Information '{key}' is not present"
|
||||||
|
))
|
||||||
|
};
|
||||||
|
|
||||||
|
// get field values
|
||||||
|
let name = details
|
||||||
|
.get("givenName")
|
||||||
|
.ok_or(create_error("Name"))?
|
||||||
|
.first()
|
||||||
|
.ok_or(create_error("Name"))?;
|
||||||
|
let surname = details
|
||||||
|
.get("sn")
|
||||||
|
.ok_or(create_error("Surname"))?
|
||||||
|
.first()
|
||||||
|
.ok_or(create_error("Surname"))?;
|
||||||
|
let email = details
|
||||||
|
.get("mail")
|
||||||
|
.ok_or(create_error("Email"))?
|
||||||
|
.first()
|
||||||
|
.ok_or(create_error("Email"))?;
|
||||||
|
|
||||||
|
if name != &self.name || surname != &self.surname || email != &self.email {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
sql::update_user_details(&mut transaction, &self.id(), name, surname, email).await?;
|
||||||
|
transaction.commit().await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
102
src/api/routes/users/permissions.rs
Normal file
102
src/api/routes/users/permissions.rs
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
use std::{collections::HashSet, convert::Infallible, fmt::Display, str::FromStr};
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use strum::EnumString;
|
||||||
|
use ts_rs::TS;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash, TS, ToSchema)]
|
||||||
|
pub enum Permission {
|
||||||
|
Read(PermissionDetail),
|
||||||
|
Write(PermissionDetail),
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Permission {
|
||||||
|
type Err = Infallible;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
if let Some((permission, detail)) = s.split_once(':') {
|
||||||
|
match permission {
|
||||||
|
"Read" => Ok(Self::Read(
|
||||||
|
PermissionDetail::from_str(detail).unwrap_or_default(),
|
||||||
|
)),
|
||||||
|
"Write" => Ok(Self::Write(
|
||||||
|
PermissionDetail::from_str(detail).unwrap_or_default(),
|
||||||
|
)),
|
||||||
|
_ => Ok(Self::None),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(Self::None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Permission {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let string = match self {
|
||||||
|
Permission::Read(detail) => format!("Read:{detail}"),
|
||||||
|
Permission::Write(detail) => format!("Write:{detail}"),
|
||||||
|
Permission::None => "None".to_string(),
|
||||||
|
};
|
||||||
|
f.write_str(&string)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug,
|
||||||
|
Default,
|
||||||
|
Clone,
|
||||||
|
Copy,
|
||||||
|
Serialize,
|
||||||
|
Deserialize,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
Hash,
|
||||||
|
EnumString,
|
||||||
|
strum::Display,
|
||||||
|
TS,
|
||||||
|
ToSchema,
|
||||||
|
)]
|
||||||
|
pub enum PermissionDetail {
|
||||||
|
Users,
|
||||||
|
APIKeys,
|
||||||
|
#[default]
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||||
|
pub struct PermissionContainer(pub HashSet<Permission>);
|
||||||
|
|
||||||
|
impl From<Option<Vec<String>>> for PermissionContainer {
|
||||||
|
fn from(value: Option<Vec<String>>) -> Self {
|
||||||
|
let set = match value {
|
||||||
|
Some(values) => HashSet::from_iter(
|
||||||
|
values
|
||||||
|
.iter()
|
||||||
|
.map(|s| Permission::from_str(s).unwrap_or(Permission::None)),
|
||||||
|
),
|
||||||
|
None => HashSet::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Self(set)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use crate::api::routes::users::permissions::PermissionDetail;
|
||||||
|
|
||||||
|
use super::Permission;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_permissions() {
|
||||||
|
let permission = Permission::Read(PermissionDetail::Users);
|
||||||
|
assert_eq!("Read:Users", permission.to_string());
|
||||||
|
|
||||||
|
let parsed = Permission::from_str("Write:Users").unwrap();
|
||||||
|
assert_eq!(parsed, Permission::Write(PermissionDetail::Users));
|
||||||
|
}
|
||||||
|
}
|
184
src/api/routes/users/sql.rs
Normal file
184
src/api/routes/users/sql.rs
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
use sqlx::{PgPool, Postgres, Transaction};
|
||||||
|
|
||||||
|
use crate::errors::ApiError;
|
||||||
|
|
||||||
|
use super::models::{User, UserStatus};
|
||||||
|
|
||||||
|
pub async fn get_users(
|
||||||
|
pool: &PgPool,
|
||||||
|
filter_status: Option<UserStatus>,
|
||||||
|
filter_id: Option<String>,
|
||||||
|
) -> Result<Vec<User>, ApiError> {
|
||||||
|
Ok(sqlx::query_as!(
|
||||||
|
User,
|
||||||
|
r#"SELECT
|
||||||
|
USERS."UserID" as user_id,
|
||||||
|
USERS."ActiveDirectoryAuth" as active_directory_auth,
|
||||||
|
USERS."Password" as password,
|
||||||
|
USERS."Name" as name,
|
||||||
|
USERS."Surname" as surname,
|
||||||
|
USERS."Email" as email,
|
||||||
|
USERS."StatusFlag" as "status_flag: UserStatus",
|
||||||
|
USERS."CreationDate" as "creation_date",
|
||||||
|
USERS."LastChanged" as "last_change",
|
||||||
|
array_remove(ARRAY_AGG(USERS_GROUPS."GroupID"), NULL) AS groups,
|
||||||
|
array_remove(ARRAY_AGG(USER_PERMISSIONS."Permission"), NULL) AS permissions,
|
||||||
|
array_remove(ARRAY_AGG(GROUP_PERMISSIONS."Permission"), NULL) AS group_permissions
|
||||||
|
FROM
|
||||||
|
users
|
||||||
|
LEFT JOIN PUBLIC.USER_PERMISSIONS ON USER_PERMISSIONS."UserID" = USERS."UserID"
|
||||||
|
LEFT JOIN USERS_GROUPS ON USERS."UserID" = USERS_GROUPS."UserID"
|
||||||
|
LEFT JOIN GROUP_PERMISSIONS ON GROUP_PERMISSIONS."GroupID" = USERS_GROUPS."GroupID"
|
||||||
|
WHERE
|
||||||
|
($1::smallint IS NULL OR USERS."StatusFlag" = $1)
|
||||||
|
AND ($2::varchar IS NULL OR USERS."UserID" = $2)
|
||||||
|
GROUP BY USERS."UserID""#,
|
||||||
|
filter_status.map(|s| s as i16),
|
||||||
|
filter_id
|
||||||
|
)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_user(pool: &PgPool, user: &User) -> Result<(), ApiError> {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
// update general stuff
|
||||||
|
sqlx::query!(
|
||||||
|
r#"UPDATE users SET
|
||||||
|
"ActiveDirectoryAuth" = $2,
|
||||||
|
"StatusFlag" = $3,
|
||||||
|
"LastChanged" = NOW()
|
||||||
|
WHERE "UserID" = $1"#,
|
||||||
|
user.user_id,
|
||||||
|
user.active_directory_auth,
|
||||||
|
user.status_flag as i16,
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// update user information
|
||||||
|
update_user_details(
|
||||||
|
&mut transaction,
|
||||||
|
&user.user_id,
|
||||||
|
&user.name,
|
||||||
|
&user.surname,
|
||||||
|
&user.email,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
// update permissions
|
||||||
|
update_user_permissions(&mut transaction, &user).await?;
|
||||||
|
// update groups
|
||||||
|
update_user_groups(&mut transaction, user).await?;
|
||||||
|
|
||||||
|
// commit transaction
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_user_details<'a>(
|
||||||
|
pool: &'a mut Transaction<'static, Postgres>,
|
||||||
|
user_id: &String,
|
||||||
|
name: &String,
|
||||||
|
surname: &String,
|
||||||
|
email: &String,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
// let test = **pool;
|
||||||
|
sqlx::query!(
|
||||||
|
r#"UPDATE users SET
|
||||||
|
"Name" = $2,
|
||||||
|
"Surname" = $3,
|
||||||
|
"Email" = $4
|
||||||
|
WHERE "UserID" = $1"#,
|
||||||
|
user_id,
|
||||||
|
name,
|
||||||
|
surname,
|
||||||
|
email
|
||||||
|
)
|
||||||
|
.execute(&mut **pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_user_permissions<'a>(
|
||||||
|
pool: &'a mut Transaction<'static, Postgres>,
|
||||||
|
user: &User,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
// delete all permissions for user
|
||||||
|
sqlx::query!(
|
||||||
|
r#"DELETE FROM user_permissions
|
||||||
|
WHERE "UserID" = $1"#,
|
||||||
|
user.user_id,
|
||||||
|
)
|
||||||
|
.execute(&mut **pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// insert permissions
|
||||||
|
for permission in &user.permissions.0 {
|
||||||
|
sqlx::query!(
|
||||||
|
r#"INSERT INTO user_permissions
|
||||||
|
("UserID", "Permission")
|
||||||
|
VALUES ($1, $2)"#,
|
||||||
|
user.user_id,
|
||||||
|
permission.to_string()
|
||||||
|
)
|
||||||
|
.execute(&mut **pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_user_groups<'a>(
|
||||||
|
pool: &'a mut Transaction<'static, Postgres>,
|
||||||
|
user: &User,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
// delete all permissions for user
|
||||||
|
sqlx::query!(
|
||||||
|
r#"DELETE FROM users_groups
|
||||||
|
WHERE "UserID" = $1"#,
|
||||||
|
user.user_id,
|
||||||
|
)
|
||||||
|
.execute(&mut **pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// insert permissions
|
||||||
|
for group_id in &user.groups.0 {
|
||||||
|
sqlx::query!(
|
||||||
|
r#"INSERT INTO users_groups
|
||||||
|
("UserID", "GroupID")
|
||||||
|
VALUES ($1, $2)"#,
|
||||||
|
user.user_id,
|
||||||
|
group_id
|
||||||
|
)
|
||||||
|
.execute(&mut **pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_new_user(
|
||||||
|
pool: &PgPool,
|
||||||
|
user: &User,
|
||||||
|
password: Option<String>,
|
||||||
|
) -> Result<(), ApiError> {
|
||||||
|
let mut transaction = pool.begin().await?;
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
r#"INSERT INTO users
|
||||||
|
("UserID", "Password")
|
||||||
|
VALUES ($1, $2)"#,
|
||||||
|
user.user_id,
|
||||||
|
password
|
||||||
|
)
|
||||||
|
.execute(&mut *transaction)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// commit transaction
|
||||||
|
transaction.commit().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
69
src/authentication/api_key.rs
Normal file
69
src/authentication/api_key.rs
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
use rand::{distributions::Alphanumeric, Rng};
|
||||||
|
use sha2::{Digest, Sha512};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub(crate) const KEY_LENGTH: usize = 40;
|
||||||
|
|
||||||
|
pub trait ApiKeyInfo {
|
||||||
|
fn id(&self) -> String;
|
||||||
|
fn validate(&self, hash: &str) -> bool;
|
||||||
|
fn requires_auth(&self) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiKeyInfo for ApiKey {
|
||||||
|
fn id(&self) -> String {
|
||||||
|
self.id.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate(&self, hash: &str) -> bool {
|
||||||
|
let mut sha512 = Sha512::new();
|
||||||
|
sha512.update(hash);
|
||||||
|
|
||||||
|
format!("{:X}", sha512.finalize()) == self.hash
|
||||||
|
}
|
||||||
|
|
||||||
|
fn requires_auth(&self) -> bool {
|
||||||
|
self.auth_required
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
|
pub(crate) struct ApiKey {
|
||||||
|
pub(crate) id: String,
|
||||||
|
pub(crate) key: Option<String>,
|
||||||
|
pub(crate) hash: String,
|
||||||
|
pub(crate) name: String,
|
||||||
|
pub(crate) auth_required: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiKey {
|
||||||
|
pub(crate) fn create(name: &str, requires_auth: bool) -> Self {
|
||||||
|
let uuid = Uuid::new_v4().simple();
|
||||||
|
let key: String = create_random(KEY_LENGTH);
|
||||||
|
let mut sha512 = Sha512::new();
|
||||||
|
sha512.update(key.clone());
|
||||||
|
|
||||||
|
ApiKey {
|
||||||
|
id: uuid.to_string(),
|
||||||
|
key: Some(key.to_owned()),
|
||||||
|
hash: format!("{:X}", sha512.finalize()),
|
||||||
|
auth_required: requires_auth,
|
||||||
|
name: name.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn validate(&self, password: &str) -> bool {
|
||||||
|
let mut sha512 = Sha512::new();
|
||||||
|
sha512.update(password);
|
||||||
|
|
||||||
|
format!("{:X}", sha512.finalize()) == self.hash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn create_random(len: usize) -> String {
|
||||||
|
rand::thread_rng()
|
||||||
|
.sample_iter(&Alphanumeric)
|
||||||
|
.take(len)
|
||||||
|
.map(char::from)
|
||||||
|
.collect()
|
||||||
|
}
|
27
src/authentication/extract.rs
Normal file
27
src/authentication/extract.rs
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
use axum::{
|
||||||
|
async_trait,
|
||||||
|
extract::FromRequestParts,
|
||||||
|
http::{request::Parts, StatusCode},
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::AuthenticationBackend;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<S, T> FromRequestParts<S> for AuthenticationBackend<T>
|
||||||
|
where
|
||||||
|
S: Send + Sync,
|
||||||
|
T: Send + Sync + Clone + 'static,
|
||||||
|
{
|
||||||
|
type Rejection = (StatusCode, &'static str);
|
||||||
|
|
||||||
|
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
||||||
|
parts
|
||||||
|
.extensions
|
||||||
|
.get::<AuthenticationBackend<T>>()
|
||||||
|
.cloned()
|
||||||
|
.ok_or((
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
"Can't extract auth session. Is `AuthenticationLayer` enabled?",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
112
src/authentication/jwt.rs
Normal file
112
src/authentication/jwt.rs
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use chrono::{DateTime, TimeZone, Utc};
|
||||||
|
use jsonwebtoken::{decode, Algorithm, DecodingKey, EncodingKey, Header, Validation};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use super::user::User;
|
||||||
|
|
||||||
|
// Session length in seconds -> 12 hours
|
||||||
|
pub(crate) const DEFAULT_SESSION_LENGTH: std::time::Duration = Duration::from_secs(12 * 3600);
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct JwtAuthorizer {
|
||||||
|
encoding_key: EncodingKey,
|
||||||
|
decoding_key: DecodingKey,
|
||||||
|
algorithm: Algorithm,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Here we've implemented `Debug` manually to avoid accidentally logging the keys.
|
||||||
|
impl std::fmt::Debug for JwtAuthorizer {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("JwtAuthorizer")
|
||||||
|
.field("encoding_key", &"[redacted]")
|
||||||
|
.field("decoding_key", &"[redacted]")
|
||||||
|
.field("algorithm", &self.algorithm)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JwtAuthorizer {
|
||||||
|
pub fn new(encoding_key: EncodingKey, decoding_key: DecodingKey, algorithm: Algorithm) -> Self {
|
||||||
|
Self {
|
||||||
|
encoding_key,
|
||||||
|
decoding_key,
|
||||||
|
algorithm,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encode(&self, user: User) -> Result<String, jsonwebtoken::errors::Error> {
|
||||||
|
let claims = RegisteredClaims::new(user);
|
||||||
|
jsonwebtoken::encode(&Header::default(), &claims, &self.encoding_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode(&self, token: &str) -> Result<User, jsonwebtoken::errors::Error> {
|
||||||
|
decode::<RegisteredClaims<User>>(
|
||||||
|
&token,
|
||||||
|
&self.decoding_key,
|
||||||
|
&Validation::new(self.algorithm),
|
||||||
|
)
|
||||||
|
.map(|data| data.claims.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Claims mentioned in the JWT specifications.
|
||||||
|
///
|
||||||
|
/// <https://www.rfc-editor.org/rfc/rfc7519#section-4.1>
|
||||||
|
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||||
|
pub struct RegisteredClaims<T> {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub iss: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub sub: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub exp: Option<NumericDate>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub nbf: Option<NumericDate>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub iat: Option<NumericDate>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub jti: Option<String>,
|
||||||
|
pub data: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> RegisteredClaims<T> {
|
||||||
|
pub fn new(data: T) -> Self {
|
||||||
|
let now = Utc::now();
|
||||||
|
let exp = now + DEFAULT_SESSION_LENGTH;
|
||||||
|
Self {
|
||||||
|
iss: Some(env!("CARGO_PKG_NAME").to_string()),
|
||||||
|
sub: Some("User token".to_string()),
|
||||||
|
exp: Some(exp.into()),
|
||||||
|
nbf: Some(now.into()),
|
||||||
|
iat: Some(now.into()),
|
||||||
|
jti: None,
|
||||||
|
data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The number of seconds from 1970-01-01T00:00:00Z UTC until the specified UTC date/time ignoring leap seconds.
|
||||||
|
/// (<https://www.rfc-editor.org/rfc/rfc7519#section-2>)
|
||||||
|
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct NumericDate(pub i64);
|
||||||
|
|
||||||
|
/// accesses the underlying value
|
||||||
|
impl From<NumericDate> for i64 {
|
||||||
|
fn from(t: NumericDate) -> Self {
|
||||||
|
t.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<DateTime<Utc>> for NumericDate {
|
||||||
|
fn from(t: DateTime<Utc>) -> Self {
|
||||||
|
Self(t.timestamp())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<NumericDate> for DateTime<Utc> {
|
||||||
|
fn from(t: NumericDate) -> Self {
|
||||||
|
Utc.timestamp_opt(t.0, 0).unwrap()
|
||||||
|
}
|
||||||
|
}
|
191
src/authentication/layer.rs
Normal file
191
src/authentication/layer.rs
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
use std::{
|
||||||
|
future::Future,
|
||||||
|
pin::Pin,
|
||||||
|
task::{Context, Poll},
|
||||||
|
};
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
extract::Request,
|
||||||
|
http::{self, header::AUTHORIZATION, HeaderMap, HeaderName, Response},
|
||||||
|
};
|
||||||
|
use tower_layer::Layer;
|
||||||
|
use tower_service::Service;
|
||||||
|
use tracing::Instrument;
|
||||||
|
|
||||||
|
use super::{api_key::ApiKeyInfo, AuthenticationBackend, AuthenticationData, UserAuthData};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AuthenticationLayer<T> {
|
||||||
|
backend: AuthenticationBackend<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> AuthenticationLayer<T> {
|
||||||
|
pub fn new(backend: AuthenticationBackend<T>) -> Self {
|
||||||
|
Self { backend }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, T> Layer<S> for AuthenticationLayer<T>
|
||||||
|
where
|
||||||
|
T: Clone,
|
||||||
|
{
|
||||||
|
type Service = AuthenticationService<S, T>;
|
||||||
|
|
||||||
|
fn layer(&self, service: S) -> Self::Service {
|
||||||
|
AuthenticationService {
|
||||||
|
backend: self.backend.clone(),
|
||||||
|
service,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This service implements the Log behavior
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AuthenticationService<S, T> {
|
||||||
|
backend: AuthenticationBackend<T>,
|
||||||
|
service: S,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<ReqBody, ResBody, S, T> Service<Request<ReqBody>> for AuthenticationService<S, T>
|
||||||
|
where
|
||||||
|
S: Service<Request<ReqBody>, Response = Response<ResBody>> + Clone + Send + 'static,
|
||||||
|
S::Future: Send + 'static,
|
||||||
|
ReqBody: Send + 'static,
|
||||||
|
ResBody: Default + Send,
|
||||||
|
T: ApiKeyInfo + Send + Sync + Clone + 'static,
|
||||||
|
{
|
||||||
|
type Response = S::Response;
|
||||||
|
type Error = S::Error;
|
||||||
|
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||||
|
self.service.poll_ready(cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn call(&mut self, mut req: Request<ReqBody>) -> Self::Future {
|
||||||
|
let span = tracing::info_span!("call", user.id = tracing::field::Empty);
|
||||||
|
|
||||||
|
let mut backend = self.backend.clone();
|
||||||
|
|
||||||
|
// Because the inner service can panic until ready, we need to ensure we only
|
||||||
|
// use the ready service.
|
||||||
|
//
|
||||||
|
// See: https://docs.rs/tower/latest/tower/trait.Service.html#be-careful-when-cloning-inner-services
|
||||||
|
let clone = self.service.clone();
|
||||||
|
let mut inner = std::mem::replace(&mut self.service, clone);
|
||||||
|
|
||||||
|
Box::pin(
|
||||||
|
async move {
|
||||||
|
// check for api key if api key requirement is enabled
|
||||||
|
let bearer_required = if let Some(ref header) = backend.api_key_header {
|
||||||
|
match extract_api_key(req.headers(), header) {
|
||||||
|
Some((id, hash)) => {
|
||||||
|
// get api key information
|
||||||
|
if let Some(key) = backend.session_store.get_api_key(&id).await {
|
||||||
|
// validate api key
|
||||||
|
if !key.validate(&hash) {
|
||||||
|
tracing::error!("API Key invalid");
|
||||||
|
let mut res = Response::default();
|
||||||
|
*res.status_mut() = http::StatusCode::UNAUTHORIZED;
|
||||||
|
return Ok(res);
|
||||||
|
}
|
||||||
|
// check if further authentication is required
|
||||||
|
if key.requires_auth() {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
// add API Key to authenticated information
|
||||||
|
backend.authenticated_user =
|
||||||
|
Some(AuthenticationData::ApiKey(key));
|
||||||
|
false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let mut res = Response::default();
|
||||||
|
*res.status_mut() = http::StatusCode::UNAUTHORIZED;
|
||||||
|
return Ok(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let mut res = Response::default();
|
||||||
|
*res.status_mut() = http::StatusCode::UNAUTHORIZED;
|
||||||
|
return Ok(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
};
|
||||||
|
|
||||||
|
// Authenticate with bearer if required
|
||||||
|
if bearer_required {
|
||||||
|
// try to extract Authentication Bearer from headers
|
||||||
|
let bearer = extract_bearer(req.headers());
|
||||||
|
// check if bearer is present
|
||||||
|
if let Some(bearer) = bearer {
|
||||||
|
// check if bearer belongs to active session
|
||||||
|
if backend.session_store.includes(&bearer).await {
|
||||||
|
// try to decode bearer
|
||||||
|
match backend.jwt_authorizer.decode(&bearer) {
|
||||||
|
Ok(user) => {
|
||||||
|
backend.authenticated_user =
|
||||||
|
Some(AuthenticationData::User(UserAuthData {
|
||||||
|
user,
|
||||||
|
token: bearer,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// tracing::Span::current().record("user.id", user.id().to_string());
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
// remove token from session storage
|
||||||
|
backend.session_store.remove(&bearer).await;
|
||||||
|
// log error
|
||||||
|
tracing::error!("Error decoding JWT token: {err}");
|
||||||
|
// return internal server error
|
||||||
|
let mut res = Response::default();
|
||||||
|
*res.status_mut() = http::StatusCode::INTERNAL_SERVER_ERROR;
|
||||||
|
return Ok(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
req.extensions_mut().insert(backend);
|
||||||
|
|
||||||
|
inner.call(req).await
|
||||||
|
}
|
||||||
|
.instrument(span),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_bearer(headers: &HeaderMap) -> Option<String> {
|
||||||
|
// Check that its a well-formed bearer and return
|
||||||
|
headers
|
||||||
|
.get(AUTHORIZATION)
|
||||||
|
.map(|header| {
|
||||||
|
let split = header.to_str().unwrap_or("").split_once(' ');
|
||||||
|
let bearer = match split {
|
||||||
|
// Found proper bearer
|
||||||
|
Some((name, contents)) if name == "Bearer" => Some(contents.to_string()),
|
||||||
|
// Found nothing
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
bearer
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_api_key(headers: &HeaderMap, name: &HeaderName) -> Option<(String, String)> {
|
||||||
|
// Check that its a well-formed bearer and return
|
||||||
|
headers
|
||||||
|
.get(name)
|
||||||
|
.map(|header| {
|
||||||
|
let split = header.to_str().unwrap_or("").split_once(".");
|
||||||
|
let api_key = match split {
|
||||||
|
Some((id, hash)) => Some((id.to_string(), hash.to_string())),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
api_key
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
}
|
76
src/authentication/middleware.rs
Normal file
76
src/authentication/middleware.rs
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
/// Login predicate middleware.
|
||||||
|
///
|
||||||
|
/// Requires that the user is authenticated.
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! login_required {
|
||||||
|
($backend_type:ty) => {{
|
||||||
|
async fn is_authenticated(
|
||||||
|
auth_session: $crate::axum_test::AuthenticationBackend<$backend_type>,
|
||||||
|
) -> bool {
|
||||||
|
auth_session.is_authenticated()
|
||||||
|
}
|
||||||
|
|
||||||
|
$crate::predicate_required!(is_authenticated, axum::http::StatusCode::UNAUTHORIZED)
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Permission predicate middleware.
|
||||||
|
///
|
||||||
|
/// Requires that the specified permissions, either user or group or both, are
|
||||||
|
/// all assigned to the user.
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! permission_required {
|
||||||
|
($backend_type:ty, login_url = $login_url:expr, redirect_field = $redirect_field:expr, $($perm:expr),+ $(,)?) => {{
|
||||||
|
use $crate::AuthzBackend;
|
||||||
|
|
||||||
|
async fn is_authorized(auth_session: $crate::axum_test::AuthSession<$backend_type>) -> bool {
|
||||||
|
if let Some(ref user) = auth_session.user {
|
||||||
|
let mut has_all_permissions = true;
|
||||||
|
$(
|
||||||
|
has_all_permissions = has_all_permissions &&
|
||||||
|
auth_session.backend.has_perm(user, $perm.into()).await.unwrap_or(false);
|
||||||
|
)+
|
||||||
|
has_all_permissions
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$crate::predicate_required!(
|
||||||
|
is_authorized,
|
||||||
|
login_url = $login_url,
|
||||||
|
redirect_field = $redirect_field
|
||||||
|
)
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Predicate middleware.
|
||||||
|
///
|
||||||
|
/// Can be specified with a login URL and next redirect field or an alternative
|
||||||
|
/// which implements [`IntoResponse`](axum::response::IntoResponse).
|
||||||
|
///
|
||||||
|
/// When the predicate passes, the request processes normally. On failure,
|
||||||
|
/// either a redirect to the specified login URL is issued or the alternative is
|
||||||
|
/// used as the response.
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! predicate_required {
|
||||||
|
($predicate:expr, $alternative:expr) => {{
|
||||||
|
use axum::{
|
||||||
|
extract::Request,
|
||||||
|
middleware::{from_fn, Next},
|
||||||
|
response::IntoResponse,
|
||||||
|
};
|
||||||
|
|
||||||
|
from_fn(
|
||||||
|
|auth_session: $crate::axum_test::AuthenticationBackend<_>,
|
||||||
|
req: Request,
|
||||||
|
next: Next| async move {
|
||||||
|
if $predicate(auth_session).await {
|
||||||
|
next.run(req).await
|
||||||
|
} else {
|
||||||
|
$alternative.into_response()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}};
|
||||||
|
}
|
123
src/authentication/mod.rs
Normal file
123
src/authentication/mod.rs
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
pub mod api_key;
|
||||||
|
mod extract;
|
||||||
|
mod jwt;
|
||||||
|
pub mod layer;
|
||||||
|
mod middleware;
|
||||||
|
mod sessions;
|
||||||
|
mod user;
|
||||||
|
|
||||||
|
// use api_key::ApiKey;
|
||||||
|
use axum::http::HeaderName;
|
||||||
|
use jsonwebtoken::{Algorithm, DecodingKey, EncodingKey};
|
||||||
|
use jwt::JwtAuthorizer;
|
||||||
|
use sessions::SessionStore;
|
||||||
|
use user::User;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AuthenticationBackend<T> {
|
||||||
|
sql_pool: (),
|
||||||
|
session_store: SessionStore<T>,
|
||||||
|
jwt_authorizer: JwtAuthorizer,
|
||||||
|
api_key_header: Option<HeaderName>,
|
||||||
|
authenticated_user: Option<AuthenticationData<T>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> AuthenticationBackend<T> {
|
||||||
|
pub fn is_authenticated(&self) -> bool {
|
||||||
|
self.authenticated_user.is_some()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum AuthenticationData<T> {
|
||||||
|
User(UserAuthData),
|
||||||
|
ApiKey(T),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct UserAuthData {
|
||||||
|
user: User,
|
||||||
|
token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Default)]
|
||||||
|
pub struct AuthenticationBackendBuilder<ApiKey> {
|
||||||
|
pool: Option<()>,
|
||||||
|
api_header_key: Option<HeaderName>,
|
||||||
|
loaded_api_keys: Option<Vec<ApiKey>>,
|
||||||
|
jwt_decode_key: Option<DecodingKey>,
|
||||||
|
jwt_encoding_key: Option<EncodingKey>,
|
||||||
|
jwt_algorithm: Option<Algorithm>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<ApiKey> AuthenticationBackendBuilder<ApiKey> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
pool: None,
|
||||||
|
api_header_key: None,
|
||||||
|
loaded_api_keys: None,
|
||||||
|
jwt_decode_key: None,
|
||||||
|
jwt_encoding_key: None,
|
||||||
|
jwt_algorithm: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn pool(self, pool: ()) -> Self {
|
||||||
|
let mut me = self;
|
||||||
|
me.pool = Some(pool);
|
||||||
|
me
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn use_api_key(self, key_header_name: HeaderName) -> Self {
|
||||||
|
let mut me = self;
|
||||||
|
me.api_header_key = Some(key_header_name);
|
||||||
|
me
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn api_keys(self, keys: Vec<ApiKey>) -> Self {
|
||||||
|
let mut me = self;
|
||||||
|
me.loaded_api_keys = Some(keys);
|
||||||
|
me
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn jwt_decoding_key(self, key: DecodingKey) -> Self {
|
||||||
|
let mut me = self;
|
||||||
|
me.jwt_decode_key = Some(key);
|
||||||
|
me
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn jwt_encoding_key(self, key: EncodingKey) -> Self {
|
||||||
|
let mut me = self;
|
||||||
|
me.jwt_encoding_key = Some(key);
|
||||||
|
me
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn jwt_algorithm(self, algorithm: Algorithm) -> Self {
|
||||||
|
let mut me = self;
|
||||||
|
me.jwt_algorithm = Some(algorithm);
|
||||||
|
me
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(self) -> Result<AuthenticationBackend<ApiKey>, String> {
|
||||||
|
if self.pool.is_none() {
|
||||||
|
return Err("Missing Pool".to_string());
|
||||||
|
}
|
||||||
|
if self.jwt_decode_key.is_none() {
|
||||||
|
return Err("Missing JWT Decoding key".to_string());
|
||||||
|
}
|
||||||
|
if self.jwt_encoding_key.is_none() {
|
||||||
|
return Err("Missing JWT Encoding key".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(AuthenticationBackend {
|
||||||
|
sql_pool: self.pool.unwrap(),
|
||||||
|
session_store: SessionStore::new(self.loaded_api_keys),
|
||||||
|
jwt_authorizer: JwtAuthorizer::new(
|
||||||
|
self.jwt_encoding_key.unwrap(),
|
||||||
|
self.jwt_decode_key.unwrap(),
|
||||||
|
self.jwt_algorithm.unwrap_or(Algorithm::RS256),
|
||||||
|
),
|
||||||
|
api_key_header: self.api_header_key,
|
||||||
|
authenticated_user: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
54
src/authentication/sessions.rs
Normal file
54
src/authentication/sessions.rs
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
use std::{
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
// use super::ApiKey;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SessionStore<T> {
|
||||||
|
active_sessions: Arc<Mutex<HashSet<String>>>,
|
||||||
|
active_api_keys: Option<Arc<Mutex<HashMap<String, T>>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> SessionStore<T> {
|
||||||
|
pub fn new(initial_api_keys: Option<Vec<T>>) -> Self {
|
||||||
|
Self {
|
||||||
|
active_sessions: Arc::new(Mutex::new(HashSet::new())),
|
||||||
|
active_api_keys: None, //initial_api_keys.map(|keys| {
|
||||||
|
// Arc::new(Mutex::new(HashMap::from_iter(
|
||||||
|
// keys.into_iter().map(|key| (key.id.clone(), key)),
|
||||||
|
// )))
|
||||||
|
// }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert(&self, token: impl Into<String>) {
|
||||||
|
let mut sessions = self.active_sessions.lock().await;
|
||||||
|
sessions.insert(token.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn includes(&self, token: &str) -> bool {
|
||||||
|
let sessions = self.active_sessions.lock().await;
|
||||||
|
sessions.contains(token)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn remove(&self, token: &str) -> bool {
|
||||||
|
let mut sessions = self.active_sessions.lock().await;
|
||||||
|
sessions.remove(token)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_api_key(&self, key: &str) -> Option<T>
|
||||||
|
where
|
||||||
|
T: Clone,
|
||||||
|
{
|
||||||
|
if let Some(ref key_store) = self.active_api_keys {
|
||||||
|
let lock = key_store.lock().await;
|
||||||
|
lock.get(key).cloned()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
4
src/authentication/user.rs
Normal file
4
src/authentication/user.rs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct User {}
|
310
src/cli.rs
Normal file
310
src/cli.rs
Normal file
@ -0,0 +1,310 @@
|
|||||||
|
use std::{collections::HashSet, path::PathBuf};
|
||||||
|
|
||||||
|
use clap::{command, Parser, Subcommand};
|
||||||
|
use error_stack::{Report, ResultExt};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use tokio::sync::mpsc::UnboundedReceiver;
|
||||||
|
use tokio_util::sync::CancellationToken;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::{
|
||||||
|
self,
|
||||||
|
routes::api_keys::{models::ApiKey, sql::create_api_key},
|
||||||
|
},
|
||||||
|
config::Configuration,
|
||||||
|
errors::AppError,
|
||||||
|
APP_NAME, ROOT_PATH,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = APP_NAME, version = "1.0", about = "A tool with subcommands")]
|
||||||
|
pub struct Cli {
|
||||||
|
#[arg(short, long, value_name = "FILE", help = "Configuration file location")]
|
||||||
|
config: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Subcommands for specific actions
|
||||||
|
#[command(subcommand)]
|
||||||
|
command: Option<Commands>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum Commands {
|
||||||
|
Serve {
|
||||||
|
/// lists test values
|
||||||
|
#[arg(short, long)]
|
||||||
|
port: Option<u16>,
|
||||||
|
},
|
||||||
|
Migrate,
|
||||||
|
#[command(subcommand)]
|
||||||
|
Service(ServiceCommands),
|
||||||
|
#[command(subcommand)]
|
||||||
|
Create(CreateCommands),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum ServiceCommands {
|
||||||
|
Install,
|
||||||
|
Uninstall,
|
||||||
|
Run,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
enum CreateCommands {
|
||||||
|
ApiKey {
|
||||||
|
/// name of API Key
|
||||||
|
#[arg(short, long)]
|
||||||
|
name: String,
|
||||||
|
/// name of API Key
|
||||||
|
#[arg(short, long, default_value = "true")]
|
||||||
|
requires_auth: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Cli {
|
||||||
|
pub fn config(&self) -> PathBuf {
|
||||||
|
match self.config.as_ref() {
|
||||||
|
Some(path) => path.clone(),
|
||||||
|
None => ROOT_PATH.with_file_name("config.toml"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn handle(
|
||||||
|
&self,
|
||||||
|
config: &mut Configuration,
|
||||||
|
) -> Result<DaemonStatus, Report<AppError>> {
|
||||||
|
match &self.command {
|
||||||
|
Some(command) => match command {
|
||||||
|
Commands::Serve { port } => {
|
||||||
|
if let Some(port) = port {
|
||||||
|
config.port = *port;
|
||||||
|
}
|
||||||
|
start_service(config, None).await
|
||||||
|
}
|
||||||
|
Commands::Migrate => {
|
||||||
|
let pool = PgPool::connect(&config.database_url)
|
||||||
|
.await
|
||||||
|
.change_context(AppError)?;
|
||||||
|
sqlx::migrate!("src/migrations")
|
||||||
|
.run(&pool)
|
||||||
|
.await
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
Ok(DaemonStatus::NotRunning)
|
||||||
|
}
|
||||||
|
Commands::Service(service_commands) => match service_commands {
|
||||||
|
ServiceCommands::Install => {
|
||||||
|
// TODO do things
|
||||||
|
Ok(DaemonStatus::NotRunning)
|
||||||
|
}
|
||||||
|
ServiceCommands::Uninstall => {
|
||||||
|
// TODO do things
|
||||||
|
Ok(DaemonStatus::NotRunning)
|
||||||
|
}
|
||||||
|
ServiceCommands::Run => {
|
||||||
|
// TODO do things
|
||||||
|
// create shutdown signals
|
||||||
|
// let (shutdown_send, mut shutdown_recv) = mpsc::unbounded_channel();
|
||||||
|
// Register generated `ffi_service_main` with the system and start the service, blocking
|
||||||
|
// this thread until the service is stopped.
|
||||||
|
#[cfg(windows)]
|
||||||
|
let _ = service_dispatcher::start(env!("CARGO_PKG_NAME"), ffi_service_main);
|
||||||
|
Ok(DaemonStatus::NotRunning)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Commands::Create(create_commands) => match create_commands {
|
||||||
|
CreateCommands::ApiKey {
|
||||||
|
name,
|
||||||
|
requires_auth,
|
||||||
|
} => {
|
||||||
|
// create API Key
|
||||||
|
let (key_secret, key) =
|
||||||
|
ApiKey::create(name, *requires_auth, HashSet::new(), config)
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
// Add API Key to Database
|
||||||
|
let pool = PgPool::connect(&config.database_url)
|
||||||
|
.await
|
||||||
|
.change_context(AppError)?;
|
||||||
|
create_api_key(&pool, &key).await.change_context(AppError)?;
|
||||||
|
|
||||||
|
// print API key secret to console
|
||||||
|
println!("Created API Key: {}.{key_secret}", key.id);
|
||||||
|
|
||||||
|
Ok(DaemonStatus::NotRunning)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
None => start_service(config, None).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum DaemonStatus {
|
||||||
|
NotRunning,
|
||||||
|
Running((CancellationToken, Option<UnboundedReceiver<()>>)),
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn start_service(
|
||||||
|
config: &Configuration,
|
||||||
|
shutdown_signal: Option<UnboundedReceiver<()>>,
|
||||||
|
) -> Result<DaemonStatus, Report<AppError>> {
|
||||||
|
// create cancellation token
|
||||||
|
let cancellation_token = CancellationToken::new();
|
||||||
|
|
||||||
|
api::start(config, cancellation_token.clone()).await?;
|
||||||
|
|
||||||
|
Ok(DaemonStatus::Running((cancellation_token, shutdown_signal)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
mod windows {
|
||||||
|
use error_stack::{Report, ResultExt};
|
||||||
|
use std::{ffi::OsString, thread, time::Duration};
|
||||||
|
use windows_service::{
|
||||||
|
service::{
|
||||||
|
ServiceAccess, ServiceErrorControl, ServiceInfo, ServiceStartType, ServiceState,
|
||||||
|
ServiceType,
|
||||||
|
},
|
||||||
|
service_manager::{ServiceManager, ServiceManagerAccess},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::errors::AppError;
|
||||||
|
use crate::APP_NAME;
|
||||||
|
|
||||||
|
pub fn install() -> Result<(), Report<AppError>> {
|
||||||
|
let manager_access = ServiceManagerAccess::CONNECT | ServiceManagerAccess::CREATE_SERVICE;
|
||||||
|
let service_manager = ServiceManager::local_computer(None::<&str>, manager_access)
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
// This example installs the service defined in `examples/ping_service.rs`.
|
||||||
|
// In the real world code you would set the executable path to point to your own binary
|
||||||
|
// that implements windows service.
|
||||||
|
let service_binary_path = ::std::env::current_exe()
|
||||||
|
.change_context(AppError)?
|
||||||
|
.with_file_name(format!("{}.exe", env!("CARGO_PKG_NAME")));
|
||||||
|
|
||||||
|
let service_info = ServiceInfo {
|
||||||
|
name: OsString::from(env!("CARGO_PKG_NAME")),
|
||||||
|
display_name: OsString::from(&format!("{APP_NAME}")),
|
||||||
|
service_type: ServiceType::OWN_PROCESS,
|
||||||
|
start_type: ServiceStartType::AutoStart,
|
||||||
|
error_control: ServiceErrorControl::Normal,
|
||||||
|
executable_path: service_binary_path,
|
||||||
|
launch_arguments: vec!["service run".into()],
|
||||||
|
dependencies: vec![],
|
||||||
|
account_name: None, // run as System
|
||||||
|
account_password: None,
|
||||||
|
};
|
||||||
|
let service = service_manager
|
||||||
|
.create_service(&service_info, ServiceAccess::CHANGE_CONFIG)
|
||||||
|
.into_report()
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
service
|
||||||
|
.set_description(format!("{APP_NAME}"))
|
||||||
|
.into_report()
|
||||||
|
.change_context(AppError)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove() -> Result<(), Report<AppError>> {
|
||||||
|
let manager_access = ServiceManagerAccess::CONNECT;
|
||||||
|
let service_manager = ServiceManager::local_computer(None::<&str>, manager_access)
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
let service_access =
|
||||||
|
ServiceAccess::QUERY_STATUS | ServiceAccess::STOP | ServiceAccess::DELETE;
|
||||||
|
let service = service_manager
|
||||||
|
.open_service(env!("CARGO_PKG_NAME"), service_access)
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
let service_status = service.query_status().change_context(AppError)?;
|
||||||
|
if service_status.current_state != ServiceState::Stopped {
|
||||||
|
service.stop().change_context(AppError)?;
|
||||||
|
// Wait for service to stop
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
service.delete().into_report().change_context(AppError)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Service entry function which is called on background thread by the system with service
|
||||||
|
// parameters. There is no stdout or stderr at this point so make sure to configure the log
|
||||||
|
// output to file if needed.
|
||||||
|
pub fn service_main(_arguments: Vec<OsString>) {
|
||||||
|
if let Err(_e) = run_service() {
|
||||||
|
// Handle the error, by logging or something.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_service() -> Result<(), Report<AppError>> {
|
||||||
|
// Create a channel to be able to poll a stop event from the service worker loop.
|
||||||
|
let (shutdown_tx, shutdown_rx) = tokio::sync::mpsc::channel(1);
|
||||||
|
|
||||||
|
// Define system service event handler that will be receiving service events.
|
||||||
|
let event_handler = move |control_event| -> ServiceControlHandlerResult {
|
||||||
|
match control_event {
|
||||||
|
// Notifies a service to report its current status information to the service
|
||||||
|
// control manager. Always return NoError even if not implemented.
|
||||||
|
ServiceControl::Interrogate => ServiceControlHandlerResult::NoError,
|
||||||
|
|
||||||
|
// Handle stop
|
||||||
|
ServiceControl::Stop => {
|
||||||
|
shutdown_tx.try_send(()).unwrap();
|
||||||
|
ServiceControlHandlerResult::NoError
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => ServiceControlHandlerResult::NotImplemented,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Register system service event handler.
|
||||||
|
// The returned status handle should be used to report service status changes to the system.
|
||||||
|
let status_handle =
|
||||||
|
service_control_handler::register(env!("CARGO_PKG_NAME"), event_handler)
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
// Tell the system that service is running
|
||||||
|
status_handle
|
||||||
|
.set_service_status(ServiceStatus {
|
||||||
|
service_type: ServiceType::OWN_PROCESS,
|
||||||
|
current_state: ServiceState::Running,
|
||||||
|
controls_accepted: ServiceControlAccept::STOP,
|
||||||
|
exit_code: ServiceExitCode::Win32(0),
|
||||||
|
checkpoint: 0,
|
||||||
|
wait_hint: Duration::default(),
|
||||||
|
process_id: None,
|
||||||
|
})
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
// run service - blocking thread
|
||||||
|
let rt = tokio::runtime::Builder::new_current_thread()
|
||||||
|
.enable_all()
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
rt.block_on(async move {
|
||||||
|
if let Err(e) = execute(shutdown_rx).await {
|
||||||
|
error!("{e:?}");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Tell the system that service has stopped.
|
||||||
|
status_handle
|
||||||
|
.set_service_status(ServiceStatus {
|
||||||
|
service_type: ServiceType::OWN_PROCESS,
|
||||||
|
current_state: ServiceState::Stopped,
|
||||||
|
controls_accepted: ServiceControlAccept::empty(),
|
||||||
|
exit_code: ServiceExitCode::Win32(0),
|
||||||
|
checkpoint: 0,
|
||||||
|
wait_hint: Duration::default(),
|
||||||
|
process_id: None,
|
||||||
|
})
|
||||||
|
.into_report()
|
||||||
|
.change_context(ServiceError::Starting)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
88
src/config.rs
Normal file
88
src/config.rs
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use config::{Config, Environment, File};
|
||||||
|
use error_stack::{Report, ResultExt};
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
use crate::errors::AppError;
|
||||||
|
|
||||||
|
fn default_port() -> u16 {
|
||||||
|
8080
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Default, Clone)]
|
||||||
|
#[allow(unused)]
|
||||||
|
pub struct ConfigInfo {
|
||||||
|
pub location: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Default, Clone)]
|
||||||
|
pub struct LDAP {
|
||||||
|
pub server: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub skip_tls_verify: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub ad_domain: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub user_search_base: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub elevated_search: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub elevated_user_id: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub elevated_user_pw: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Default, Clone)]
|
||||||
|
#[allow(unused)]
|
||||||
|
pub struct Configuration {
|
||||||
|
#[serde(default)]
|
||||||
|
pub config: ConfigInfo,
|
||||||
|
#[serde(default)]
|
||||||
|
pub debug: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub token_secret: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub session_length: u64,
|
||||||
|
#[serde(default = "default_port")]
|
||||||
|
pub port: u16,
|
||||||
|
#[serde(default)]
|
||||||
|
#[serde(alias = "DATABASE_URL")]
|
||||||
|
pub database_url: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub ldap: LDAP,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Configuration {
|
||||||
|
pub fn new(location: PathBuf) -> Result<Self, Report<AppError>> {
|
||||||
|
let s = Config::builder()
|
||||||
|
.add_source(File::from(location.clone()))
|
||||||
|
.add_source(Environment::default())
|
||||||
|
.set_override("config.location", location.to_str().unwrap_or("default"))
|
||||||
|
.change_context(AppError)?
|
||||||
|
.build()
|
||||||
|
.change_context(AppError)?;
|
||||||
|
|
||||||
|
let settings = s.try_deserialize().change_context(AppError)?;
|
||||||
|
|
||||||
|
Ok(settings)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check(&self) -> Result<(), Report<AppError>> {
|
||||||
|
// check if elevated LDAP user is set if activated
|
||||||
|
if self.ldap.elevated_search {
|
||||||
|
if self.ldap.elevated_user_id == "" {
|
||||||
|
return Err(Report::new(AppError).attach_printable(
|
||||||
|
"LDAP: Search with elevated user is activated but no user is configured",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.ldap.elevated_user_pw == "" {
|
||||||
|
return Err(Report::new(AppError).attach_printable(
|
||||||
|
"LDAP: Search with elevated user is activated but no password is configured",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
124
src/errors.rs
Normal file
124
src/errors.rs
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
http::StatusCode,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use error_stack::Context;
|
||||||
|
use serde::Serialize;
|
||||||
|
use sha2::digest::InvalidLength;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
|
use crate::api::{backend::ApiBackend, routes::users::models::User};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct AppError;
|
||||||
|
|
||||||
|
impl fmt::Display for AppError {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
fmt.write_str("Error on executing Application")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Context for AppError {}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct ErrorInfo<'a> {
|
||||||
|
error_message: &'a str,
|
||||||
|
debug_info: Option<&'a String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
pub enum ApiError {
|
||||||
|
SQLQueryError(String),
|
||||||
|
InvalidCredentials,
|
||||||
|
InternalError(String),
|
||||||
|
AccessDenied,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiError {
|
||||||
|
fn as_error_info(&self) -> (StatusCode, ErrorInfo) {
|
||||||
|
let (status, error_message, debug_info) = match self {
|
||||||
|
Self::InvalidCredentials => (StatusCode::UNAUTHORIZED, "Invalid credentials", None),
|
||||||
|
Self::SQLQueryError(error) => (
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
"Invalid credentials",
|
||||||
|
Some(error),
|
||||||
|
),
|
||||||
|
Self::InternalError(error) => (
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
"Internal Server Error",
|
||||||
|
Some(error),
|
||||||
|
),
|
||||||
|
Self::AccessDenied => (StatusCode::FORBIDDEN, "Access Denied", None), // ApiError::WrongCredentials => (StatusCode::UNAUTHORIZED, "Wrong credentials"),
|
||||||
|
// ApiError::MissingCredentials => (StatusCode::BAD_REQUEST, "Missing credentials"),
|
||||||
|
// ApiError::TokenCreation => (StatusCode::INTERNAL_SERVER_ERROR, "Token creation error"),
|
||||||
|
// ApiError::InvalidToken => (StatusCode::BAD_REQUEST, "Invalid token"),
|
||||||
|
// ApiError::InvalidApiKey => (StatusCode::BAD_REQUEST, "Invalid api key"),
|
||||||
|
// ApiError::InternalServerError => {
|
||||||
|
// (StatusCode::INTERNAL_SERVER_ERROR, "Unspecified error")
|
||||||
|
// }
|
||||||
|
// ApiError::AccessDenied => (StatusCode::UNAUTHORIZED, "Access denied"),
|
||||||
|
// ApiError::InvalidPermissions => (StatusCode::BAD_REQUEST, "Invalid permissions"),
|
||||||
|
};
|
||||||
|
|
||||||
|
(
|
||||||
|
status,
|
||||||
|
ErrorInfo {
|
||||||
|
error_message,
|
||||||
|
debug_info,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for ApiError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
let (status, error_info) = self.as_error_info();
|
||||||
|
let body = Json(error_info);
|
||||||
|
(status, body).into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<sqlx::Error> for ApiError {
|
||||||
|
fn from(error: sqlx::Error) -> Self {
|
||||||
|
error!("{error}");
|
||||||
|
Self::SQLQueryError(error.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<axum_jwt_login::Error<User, ApiBackend>> for ApiError {
|
||||||
|
fn from(value: axum_jwt_login::Error<User, ApiBackend>) -> Self {
|
||||||
|
match value {
|
||||||
|
axum_jwt_login::Error::Jwt(error) => ApiError::InternalError(error.to_string()),
|
||||||
|
axum_jwt_login::Error::Backend(error) => error,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<argon2::Error> for ApiError {
|
||||||
|
fn from(_: argon2::Error) -> Self {
|
||||||
|
Self::InvalidCredentials
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<InvalidLength> for ApiError {
|
||||||
|
fn from(value: InvalidLength) -> Self {
|
||||||
|
Self::InternalError(format!("Invalid HMac Key length: {value}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for ApiError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let error_info = self.as_error_info().1;
|
||||||
|
f.write_str(&format!(
|
||||||
|
"{}: {}",
|
||||||
|
error_info.error_message,
|
||||||
|
error_info.debug_info.unwrap_or(&"".to_string())
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ApiError {}
|
55
src/ldap_test.rs
Normal file
55
src/ldap_test.rs
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
use ldap3::exop::WhoAmI;
|
||||||
|
use ldap3::{LdapConnAsync, Scope, SearchEntry};
|
||||||
|
|
||||||
|
pub async fn try_authentication() -> ldap3::result::Result<()> {
|
||||||
|
println!("Hello, world!");
|
||||||
|
|
||||||
|
// let (conn, mut ldap) = LdapConnAsync::new("ldap://ldap.forumsys.com:389").await?;
|
||||||
|
let (conn, mut ldap) = LdapConnAsync::new("ldap://192.168.10.200:389").await?;
|
||||||
|
ldap3::drive!(conn);
|
||||||
|
|
||||||
|
let res = ldap
|
||||||
|
// .simple_bind("cn=read-only-admin,dc=example,dc=com", "password")
|
||||||
|
.simple_bind(
|
||||||
|
// "CN=Abel Austin,OU=Accounting,OU=Mylab Users,DC=mylab,DC=local",
|
||||||
|
"MYLAB\\A0H67123",
|
||||||
|
"Passwort123",
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
println!("{res:?}");
|
||||||
|
// let (res, re) = ldap
|
||||||
|
// .search(
|
||||||
|
// "ou=mathematicians,dc=example,dc=com",
|
||||||
|
// Scope::Subtree,
|
||||||
|
// "(objectClass=*)",
|
||||||
|
// vec!["*"],
|
||||||
|
// )
|
||||||
|
// .await?
|
||||||
|
// .success()?;
|
||||||
|
|
||||||
|
// for entry in res {
|
||||||
|
// println!("{:?}", SearchEntry::construct(entry));
|
||||||
|
// }
|
||||||
|
|
||||||
|
let (res, re) = ldap
|
||||||
|
.search(
|
||||||
|
// "CN=Abel Austin,OU=Accounting,OU=Mylab Users,DC=mylab,DC=local",
|
||||||
|
"OU=Mylab Users,DC=mylab,DC=local",
|
||||||
|
Scope::Subtree,
|
||||||
|
// "(objectClass=*)",
|
||||||
|
"(&(ObjectCategory=Person)(sAMAccountName=A0H67123))",
|
||||||
|
vec!["givenName", "sn"],
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.success()?;
|
||||||
|
|
||||||
|
for entry in res {
|
||||||
|
println!("{:?}", SearchEntry::construct(entry));
|
||||||
|
}
|
||||||
|
|
||||||
|
let test = ldap.extended(WhoAmI).await?.0;
|
||||||
|
let whoami: ldap3::exop::WhoAmIResp = test.parse();
|
||||||
|
println!("{whoami:?}");
|
||||||
|
|
||||||
|
Ok(ldap.unbind().await?)
|
||||||
|
}
|
96
src/main.rs
Normal file
96
src/main.rs
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
use std::{path::PathBuf, time::Duration};
|
||||||
|
|
||||||
|
use clap::Parser;
|
||||||
|
use error_stack::Report;
|
||||||
|
use errors::AppError;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use tokio::signal;
|
||||||
|
use tokio_util::task::TaskTracker;
|
||||||
|
use tracing::{error, info};
|
||||||
|
|
||||||
|
pub mod api;
|
||||||
|
mod cli;
|
||||||
|
mod config;
|
||||||
|
mod errors;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
pub static ROOT_PATH: Lazy<PathBuf> = Lazy::new(|| match std::env::current_exe() {
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
Ok(path) => path,
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
Ok(_) => PathBuf::from("./"),
|
||||||
|
Err(e) => {
|
||||||
|
error!("Exiting on error for getting root path: {e:?}");
|
||||||
|
panic!("Exiting on error for getting root path: {e:?}");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
pub const APP_NAME: &str = "Generic API Service";
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Report<AppError>> {
|
||||||
|
dotenv::dotenv().ok();
|
||||||
|
|
||||||
|
// prepare CLI
|
||||||
|
let cli = cli::Cli::parse();
|
||||||
|
|
||||||
|
// load config file
|
||||||
|
let mut config = config::Configuration::new(cli.config())?;
|
||||||
|
config.check()?;
|
||||||
|
|
||||||
|
println!("{config:?}");
|
||||||
|
|
||||||
|
// handle CLI input
|
||||||
|
let daemon = cli.handle(&mut config).await?;
|
||||||
|
|
||||||
|
match daemon {
|
||||||
|
cli::DaemonStatus::NotRunning => {}
|
||||||
|
cli::DaemonStatus::Running((cancellation_token, shutdown_signal)) => {
|
||||||
|
// create task tracker
|
||||||
|
let tracker = TaskTracker::new();
|
||||||
|
|
||||||
|
// terminate signal
|
||||||
|
#[cfg(unix)]
|
||||||
|
let terminate = async {
|
||||||
|
signal::unix::signal(signal::unix::SignalKind::terminate())
|
||||||
|
.expect("Failed to install termination signal handler")
|
||||||
|
.recv()
|
||||||
|
.await;
|
||||||
|
};
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
let terminate = std::future::pending::<()>();
|
||||||
|
|
||||||
|
let shutdown_signal = async {
|
||||||
|
match shutdown_signal {
|
||||||
|
Some(mut signal) => signal.recv().await,
|
||||||
|
None => std::future::pending::<Option<()>>().await,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// wait for shutdown signals
|
||||||
|
tokio::select! {
|
||||||
|
_ = signal::ctrl_c() => {},
|
||||||
|
_ = terminate => {},
|
||||||
|
_ = shutdown_signal => {},
|
||||||
|
}
|
||||||
|
|
||||||
|
// send shutdown signal to application and wait
|
||||||
|
cancellation_token.cancel();
|
||||||
|
|
||||||
|
// close task tracker
|
||||||
|
tracker.close();
|
||||||
|
|
||||||
|
// Wait for everything to finish.
|
||||||
|
tracker.wait().await;
|
||||||
|
tokio::time::sleep(Duration::from_millis(200)).await;
|
||||||
|
|
||||||
|
info!("{APP_NAME} gracefully shut down.");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// let _ = axum_test::start_axum().await.unwrap();
|
||||||
|
|
||||||
|
// let _ = ldap_test::try_authentication().await.unwrap();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
37
src/migrations/01_api_keys.sql
Normal file
37
src/migrations/01_api_keys.sql
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
-- Table: public.apikeys
|
||||||
|
|
||||||
|
-- DROP TABLE IF EXISTS public.apikeys;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS public.apikeys
|
||||||
|
(
|
||||||
|
"KeyID" character varying(32) COLLATE pg_catalog."default" NOT NULL,
|
||||||
|
"Name" character varying(255) COLLATE pg_catalog."default" NOT NULL,
|
||||||
|
"UserAuthRequired" boolean NOT NULL DEFAULT true,
|
||||||
|
"Hash" bytea NOT NULL,
|
||||||
|
"CreationDate" timestamp without time zone NOT NULL DEFAULT now(),
|
||||||
|
"LastChanged" timestamp without time zone NOT NULL DEFAULT now(),
|
||||||
|
CONSTRAINT apikeys_pkey PRIMARY KEY ("KeyID")
|
||||||
|
)
|
||||||
|
|
||||||
|
TABLESPACE pg_default;
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public.apikeys
|
||||||
|
OWNER to postgres;
|
||||||
|
|
||||||
|
COMMENT ON COLUMN public.apikeys."KeyID"
|
||||||
|
IS 'UUID of API Key';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN public.apikeys."Name"
|
||||||
|
IS 'Name/Description of API Key';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN public.apikeys."Hash"
|
||||||
|
IS 'Hashed value of API Key';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN public.apikeys."UserAuthRequired"
|
||||||
|
IS 'Indication if this api key requires additional user authentication';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN public.apikeys."CreationDate"
|
||||||
|
IS 'Time of creation';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN public.apikeys."LastChanged"
|
||||||
|
IS 'Time of last modification';
|
19
src/migrations/02_users.sql
Normal file
19
src/migrations/02_users.sql
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE public.users
|
||||||
|
(
|
||||||
|
"UserID" character varying(10)[] NOT NULL,
|
||||||
|
"ActiveDirectoryAuth" boolean NOT NULL DEFAULT false,
|
||||||
|
"Name" character varying(250) NOT NULL DEFAULT '',
|
||||||
|
"Surname" character varying(250) NOT NULL DEFAULT '',
|
||||||
|
"Email" character varying(500) NOT NULL DEFAULT '',
|
||||||
|
"Password" character varying(255) NOT NULL DEFAULT '',
|
||||||
|
"CreationDate" timestamp without time zone NOT NULL DEFAULT NOW(),
|
||||||
|
"LastChanged" timestamp without time zone NOT NULL DEFAULT NOW(),
|
||||||
|
"StatusFlag" smallint NOT NULL,
|
||||||
|
PRIMARY KEY ("UserID")
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public.users
|
||||||
|
OWNER to postgres;
|
||||||
|
|
||||||
|
COMMENT ON TABLE public.users
|
||||||
|
IS 'Table containing user information';
|
12
src/migrations/03_groups.sql
Normal file
12
src/migrations/03_groups.sql
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
CREATE TABLE public.groups
|
||||||
|
(
|
||||||
|
"GroupID" serial NOT NULL,
|
||||||
|
"GroupName" character varying(255) NOT NULL DEFAULT '',
|
||||||
|
PRIMARY KEY ("GroupID")
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public.groups
|
||||||
|
OWNER to postgres;
|
||||||
|
|
||||||
|
COMMENT ON TABLE public.groups
|
||||||
|
IS 'Group information';
|
18
src/migrations/04_user_permissions.sql
Normal file
18
src/migrations/04_user_permissions.sql
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS public.user_permissions
|
||||||
|
(
|
||||||
|
"UserID" character varying(10) COLLATE pg_catalog."default" NOT NULL,
|
||||||
|
"Permission" character varying(250) COLLATE pg_catalog."default" NOT NULL,
|
||||||
|
CONSTRAINT "DistinctUserPermission" UNIQUE ("UserID", "Permission"),
|
||||||
|
CONSTRAINT "UserID" FOREIGN KEY ("UserID")
|
||||||
|
REFERENCES public.users ("UserID") MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
)
|
||||||
|
|
||||||
|
TABLESPACE pg_default;
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public.user_permissions
|
||||||
|
OWNER to postgres;
|
||||||
|
|
||||||
|
COMMENT ON TABLE public.user_permissions
|
||||||
|
IS 'Contains the permissions for every user';
|
18
src/migrations/05_group_permssions.sql
Normal file
18
src/migrations/05_group_permssions.sql
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS public.group_permissions
|
||||||
|
(
|
||||||
|
"GroupID" integer NOT NULL,
|
||||||
|
"Permission" character varying(250) COLLATE pg_catalog."default" NOT NULL,
|
||||||
|
CONSTRAINT "DistinctGroupPermission" UNIQUE ("GroupID", "Permission"),
|
||||||
|
CONSTRAINT "GroupID" FOREIGN KEY ("GroupID")
|
||||||
|
REFERENCES public.groups ("GroupID") MATCH SIMPLE
|
||||||
|
ON UPDATE NO ACTION
|
||||||
|
ON DELETE NO ACTION
|
||||||
|
)
|
||||||
|
|
||||||
|
TABLESPACE pg_default;
|
||||||
|
|
||||||
|
ALTER TABLE IF EXISTS public.group_permissions
|
||||||
|
OWNER to postgres;
|
||||||
|
|
||||||
|
COMMENT ON TABLE public.group_permissions
|
||||||
|
IS 'Group -> Permission relation';
|
6
src/utils.rs
Normal file
6
src/utils.rs
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
pub(crate) fn create_random(len: usize) -> String {
|
||||||
|
rand::Rng::sample_iter(rand::thread_rng(), &rand::distributions::Alphanumeric)
|
||||||
|
.take(len)
|
||||||
|
.map(char::from)
|
||||||
|
.collect()
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user