15 Commits
test ... main

Author SHA1 Message Date
dfee02629d optimize release build
All checks were successful
Backend Actions / check (push) Successful in 1m21s
Frontend Actions / check (push) Successful in 1m33s
Backend Actions / build (push) Successful in 3m33s
Frontend Actions / build (push) Successful in 1m27s
Frontend Actions / test (push) Successful in 1m10s
Backend Actions / test (push) Successful in 5m28s
2025-08-04 09:49:52 +01:00
2914abbcd0 fix frontend lints
Some checks failed
Backend Actions / check (push) Successful in 2m27s
Backend Actions / test (push) Failing after 2m37s
Backend Actions / build (push) Successful in 3m46s
Frontend Actions / check (push) Successful in 1m24s
Frontend Actions / build (push) Successful in 1m22s
Frontend Actions / test (push) Successful in 46s
2025-08-01 16:15:31 +01:00
70334ea0ae add prepared sqlx files
Some checks failed
Backend Actions / check (push) Has started running
Backend Actions / build (push) Has started running
Backend Actions / test (push) Has been cancelled
Frontend Actions / build (push) Has been cancelled
Frontend Actions / test (push) Has been cancelled
Frontend Actions / check (push) Has been cancelled
2025-08-01 16:14:33 +01:00
c96b2adada fix tests and properly intgrate sqlx
Some checks failed
Backend Actions / check (push) Failing after 3m26s
Frontend Actions / check (push) Failing after 3m17s
Backend Actions / test (push) Failing after 3m20s
Frontend Actions / test (push) Successful in 51s
Frontend Actions / build (push) Successful in 56s
Backend Actions / build (push) Failing after 10m57s
2025-08-01 15:21:39 +01:00
6ec6aa2aa7 add database url for sqlx
All checks were successful
Backend Actions / check (push) Successful in 38s
Backend Actions / build (push) Successful in 1m11s
Frontend Actions / check (push) Successful in 1m36s
Backend Actions / test (push) Successful in 2m22s
Frontend Actions / build (push) Successful in 1m28s
Frontend Actions / test (push) Successful in 46s
2025-07-28 02:01:47 +01:00
46cb354c9a run in container
All checks were successful
Backend Actions / build (push) Successful in 1m8s
Backend Actions / check (push) Successful in 1m34s
Frontend Actions / check (push) Successful in 1m37s
Frontend Actions / build (push) Successful in 1m33s
Backend Actions / test (push) Successful in 3m41s
Frontend Actions / test (push) Successful in 1m5s
2025-07-28 01:57:13 +01:00
bfda9d3735 fix lints 2025-07-28 01:31:22 +01:00
84bbbab6ea add prepared sqlx files
Some checks failed
Backend Actions / build (push) Successful in 52s
Backend Actions / check (push) Failing after 1m7s
Frontend Actions / check (push) Successful in 1m35s
Frontend Actions / build (push) Successful in 1m36s
Backend Actions / test (push) Failing after 2m1s
Frontend Actions / test (push) Successful in 52s
2025-07-28 01:30:32 +01:00
8a6f8b5875 add database service to backend test
Some checks failed
Backend Actions / check (push) Failing after 2m15s
Backend Actions / build (push) Failing after 3m15s
Backend Actions / test (push) Failing after 3m22s
Frontend Actions / check (push) Successful in 1m16s
Frontend Actions / build (push) Has been cancelled
Frontend Actions / test (push) Has been cancelled
2025-07-28 01:01:08 +01:00
79e43f19df add database connection to backend 2025-07-28 00:56:41 +01:00
20f64cd35d Add request id header to incoming requests
All checks were successful
Backend Actions / check (push) Successful in 1m1s
Backend Actions / build (push) Successful in 1m51s
Frontend Actions / check (push) Successful in 59s
Backend Actions / test (push) Successful in 2m20s
Frontend Actions / build (push) Successful in 59s
Frontend Actions / test (push) Successful in 55s
2025-07-25 23:23:16 +01:00
6bd6dbad38 Add workflow for ui
All checks were successful
Backend Actions / check (push) Successful in 12s
Backend Actions / build (push) Successful in 22s
Backend Actions / test (push) Successful in 32s
Frontend Actions / check (push) Successful in 1m2s
Frontend Actions / build (push) Successful in 1m1s
Frontend Actions / test (push) Successful in 51s
2025-07-25 23:03:58 +01:00
df81605ecc run prettier on ui 2025-07-25 22:39:10 +01:00
8c12ca0024 add ui testing
All checks were successful
Backend Actions / check (push) Successful in 13s
Backend Actions / build (push) Successful in 28s
Backend Actions / test (push) Successful in 37s
2025-07-25 17:28:41 +01:00
70835f04fa add ui docker build
All checks were successful
Backend Actions / check (push) Successful in 28s
Backend Actions / build (push) Successful in 27s
Backend Actions / test (push) Successful in 35s
2025-07-25 17:12:13 +01:00
40 changed files with 3110 additions and 157 deletions

View File

@ -41,7 +41,22 @@ jobs:
run: cargo build --release --locked
test:
runs-on: rust-nextest
runs-on: ubuntu-latest
container: git.molloy.xyz/fergus-molloy/ubuntu:rust-nextest
services:
postgres:
image: postgres:17-alpine
ports:
- "5432:5432"
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DATABASE: nuchat_test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
defaults:
run:
working-directory: ./backend
@ -59,6 +74,10 @@ jobs:
run: cargo build --locked --bin nuchat
- name: Run Tests
run: ./scripts/test.sh
env:
POSTGRES_URL: "postgresql://postgres:postgres@postgres:5432"
DATABASE_URL: "postgresql://postgres:postgres@postgres:5432"
SKIP_DOCKER: "1"
- name: Upload Test Logs
if: ${{ failure() }}
uses: actions/upload-artifact@v3

View File

@ -0,0 +1,72 @@
name: Frontend Actions
run-name: ${{ gitea.actor }} is running frontend actions
on: [push]
jobs:
check:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./ui
steps:
- uses: actions/setup-node@v4
with:
node-version: '24'
- uses: actions/checkout@v4
- name: Load Cache
uses: actions/cache@v4
with:
path: |
ui/node_modules
key: ${{ runner.os }}-node-check-${{ hashFiles('ui/package-lock.json') }}
- name: Install Dependencies
run: npm ci
- name: Run Eslint
run: npm run lint
- name: Run Prettier
run: npm run fmt -- . --check
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./ui
steps:
- uses: actions/setup-node@v4
with:
node-version: '24'
- uses: actions/checkout@v4
- name: Load Cache
uses: actions/cache@v4
with:
path: |
ui/node_modules
key: ${{ runner.os }}-node-build-${{ hashFiles('ui/package-lock.json') }}
- run: node --version
- name: Install Dependencies
run: npm ci
- name: Build UI
run: npm run build
test:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./ui
steps:
- uses: actions/setup-node@v4
with:
node-version: '24'
- uses: actions/checkout@v4
- name: Load Cache
uses: actions/cache@v4
with:
path: |
ui/node_modules
key: ${{ runner.os }}-node-test-${{ hashFiles('ui/package-lock.json') }}
- run: node --version
- name: Install Dependencies
run: npm ci
- name: Run tests
run: npm run test

View File

@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO messages (id, contents, created_at) VALUES($1, $2, current_timestamp) RETURNING id",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Uuid",
"Text"
]
},
"nullable": [
false
]
},
"hash": "060e058f46bf96f6505fb8a1d1b305c062c5c8a7ada56b34d6c9c229de3b9b34"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "PostgreSQL",
"query": "SELECT id, contents, created_at FROM messages",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "contents",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "created_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false
]
},
"hash": "5af6cb153161fced4a308b34b5c303f6907a9e06021bbeca31d41de236514589"
}

View File

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "select exists(SELECT datname FROM pg_catalog.pg_database WHERE datname = $1);",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Name"
]
},
"nullable": [
null
]
},
"hash": "6060467ee8046709f486ab35233e43eb849de082aa86c4d877b0c0818e27c104"
}

View File

@ -0,0 +1,34 @@
{
"db_name": "PostgreSQL",
"query": "SELECT id, name, created_at FROM servers WHERE id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "created_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false
]
},
"hash": "66b1dcdcbc9eae32237b1f712c32498003efb42d843b5bb7d33668d5cc3199a7"
}

View File

@ -0,0 +1,34 @@
{
"db_name": "PostgreSQL",
"query": "SELECT id, contents, created_at FROM messages WHERE id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "contents",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "created_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false
]
},
"hash": "a86c7ef23b4f356ffccb0be2af40e6fbbde85dd57fd2a37b6654279c55d441e3"
}

View File

@ -0,0 +1,32 @@
{
"db_name": "PostgreSQL",
"query": "SELECT id, name, created_at FROM servers",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "created_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false
]
},
"hash": "cc8f315cf11481ed3ef0cf8f08c54fe508833afc74b18b4b47a7b7ee217a2439"
}

View File

@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO servers (id, name, created_at) VALUES($1, $2, current_timestamp) RETURNING id",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Uuid",
"Text"
]
},
"nullable": [
false
]
},
"hash": "d6d5a09a7a6849b4610269cdc1a121caedd1b8853f7de70b02a8ed5a9c5615aa"
}

1012
backend/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -4,14 +4,18 @@ version = "0.1.0"
edition = "2024"
[dependencies]
axum = "0.8.4"
axum = { version = "0.8.4", features = [] }
chrono = { version = "0.4.41", features = ["serde"] }
clap = { version = "4.5.41", features = ["derive"] }
futures = "0.3.31"
http = "1.3.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.141"
sqlx = { version = "0.8.6", features = ["postgres", "macros", "runtime-tokio", "uuid", "chrono"] }
tap = "1.0.1"
tokio = { version = "1.0", features = ["full"] }
tower = { version = "0.5.2", features = ["full"] }
tower-http = { version = "0.6.6", features = ["timeout", "trace", "auth"] }
tower-http = { version = "0.6.6", features = ["timeout", "trace", "auth", "request-id"] }
tower-http-util = "0.1.0"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
@ -31,3 +35,9 @@ name ="nuchat"
default = [ "shutdown" ]
all = ["shutdown"]
shutdown = []
[profile.release]
codegen-units = 1
lto = "fat"
panic = "abort"
strip = "symbols"

View File

@ -0,0 +1,7 @@
-- Add migration script here
CREATE TABLE servers (
id uuid NOT NULL,
PRIMARY KEY(id),
name TEXT NOT NULL,
created_at timestamptz NOT NULL
)

View File

@ -0,0 +1,7 @@
-- Add migration script here
CREATE TABLE messages (
id uuid NOT NULL,
PRIMARY KEY(id),
contents TEXT NOT NULL,
created_at timestamptz NOT NULL
)

View File

@ -1,11 +1,34 @@
#!/usr/bin/env bash
POSTGRES_URL=${POSTGRES_URL:-"postgresql://postgres:postgres@localhost:5432"}
if ! command -v cargo-nextest > /dev/null 2>&1; then
echo "Command not found cargo-nextest"
echo "Try installing with cargo install cargo-nextest"
exit 1
fi
if ! command -v sqlx > /dev/null 2>&1; then
echo "Command not found sqlx"
echo "Try installing with cargo install sqlx-cli"
exit 1
fi
export DATABASE_URL="$POSTGRES_URL/nuchat_dev"
if [ -z "$SKIP_DOCKER" ]; then
# force restart database so no connections
# prevent database from being dropped
docker compose -f ../docker-compose.yml down
docker compose -f ../docker-compose.yml up -d db
sleep 1
fi
# recreate database and tables
sqlx database drop -y
sqlx database create
sqlx migrate run
if [ ! -d logs ]; then
mkdir logs
fi
@ -14,7 +37,8 @@ fi
curl -s -X POST localhost:7001/admin/shutdown 2>&1 > /dev/null
# start server
cargo run -- --port 7001 2>&1 > logs/nuchat.log &
cargo run -- --port 7001 --postgres-url "$POSTGRES_URL" --database "nuchat_dev" 2>&1 > logs/nuchat.log &
sleep 1
# run tests
cargo nextest run --color=always 2>&1 | tee logs/test-output.log
cargo nextest run --color=always --no-fail-fast 2>&1 | tee logs/test-output.log

View File

@ -3,4 +3,6 @@ pub struct Config {
pub port: u32,
pub host: String,
pub admin_secret: Option<String>,
pub postgres_url: String,
pub database_name: String,
}

View File

@ -1,5 +1,7 @@
mod config;
mod router;
mod state;
pub use config::Config;
pub use router::app;
pub use state::{AppState, NuState};

View File

@ -1,8 +1,12 @@
use std::sync::mpsc;
use clap::Parser;
use nuchat::AppState;
use nuchat::Config;
use nuchat::NuState;
use nuchat::app;
use sqlx::Pool;
use sqlx::Postgres;
use tokio::net::TcpListener;
use tokio::signal;
use tracing::info;
@ -22,6 +26,14 @@ struct Args {
/// Admin secret to use, leave blank to disable
#[arg(long)]
admin_secret: Option<String>,
/// postgres base url, should container users and host info
#[arg(long, default_value = "postgres://postgres:postgres@localhost:5432")]
postgres_url: String,
/// name of database to use
#[arg(long, default_value = "nuchat_dev")]
database: String,
}
#[tokio::main]
@ -37,16 +49,26 @@ async fn main() {
.with(tracing_subscriber::fmt::layer().with_target(false))
.init();
let database_url = format!("{}/{}", config.0.postgres_url, config.0.database_name);
info!("Connecting to database: {database_url}");
let pool = Pool::<Postgres>::connect(&database_url)
.await
.expect("Could not connect to database");
let listener = TcpListener::bind(format!("{}:{}", config.0.host, config.0.port))
.await
.unwrap();
tracing::debug!("listening on {}", listener.local_addr().unwrap());
let (app, rx) = app(&config.0);
axum::serve(listener, app)
let state = AppState::new(NuState::new(pool.clone(), config.0));
let (app, rx) = app(&state);
axum::serve(listener, app.with_state(state))
.with_graceful_shutdown(shutdown_signal(rx))
.await
.unwrap();
pool.close().await;
info!("Server stopped");
}
@ -91,6 +113,8 @@ impl BinConfig {
port: args.port,
host: args.host,
admin_secret: args.admin_secret,
postgres_url: args.postgres_url,
database_name: args.database,
})
}
}

View File

@ -1,33 +1,64 @@
mod admin;
mod healthcheck;
mod messages;
mod servers;
use std::sync::mpsc;
use std::time::Duration;
use crate::config;
use crate::AppState;
use axum::extract::Request;
use axum::routing::get;
use axum::{Router, body::Body};
use http::{HeaderName, HeaderValue};
use tower::ServiceBuilder;
use tower_http::request_id::{MakeRequestId, RequestId, SetRequestIdLayer};
use tower_http::timeout::TimeoutLayer;
use tower_http::trace::TraceLayer;
use tracing::Level;
use uuid::Uuid;
pub fn app(config: &config::Config) -> (Router, mpsc::Receiver<bool>) {
#[derive(Clone)]
struct RequestIdLayer;
impl MakeRequestId for RequestIdLayer {
fn make_request_id<B>(&mut self, _: &http::Request<B>) -> Option<RequestId> {
let id = Uuid::now_v7().to_string();
Some(RequestId::new(id.parse().unwrap()))
}
}
pub fn app(state: &AppState) -> (Router<AppState>, mpsc::Receiver<bool>) {
let (tx, rx) = mpsc::channel();
(
Router::new()
.with_state(state.clone())
.route("/healthcheck", get(healthcheck::healthcheck))
.route("/forever", get(std::future::pending::<()>))
.nest("/admin", admin::router(tx, config))
.route(
"/servers",
get(servers::get_servers).post(servers::create_server),
)
.route("/servers/{id}", get(servers::get_server_by_id))
.route(
"/messages",
get(messages::get_messages).post(messages::create_message),
)
.route("/messages/{id}", get(messages::get_message_by_id))
.nest("/admin", admin::router(tx, state))
.layer(
ServiceBuilder::new()
.layer(SetRequestIdLayer::new(
HeaderName::from_static("x-request-id"),
RequestIdLayer,
))
.layer(
TraceLayer::new_for_http().make_span_with(|req: &Request<Body>| {
let default = HeaderValue::from_static("<missing>");
let req_id = req.headers().get("x-request-id").unwrap_or(&default);
tracing::span!(
Level::DEBUG,
"request",
trace_id = Uuid::now_v7().to_string(),
req_id = req_id.to_str().unwrap(),
method = format!("{}", req.method()),
uri = format!("{}", req.uri()),
)

View File

@ -7,13 +7,17 @@ use http::StatusCode;
use tower_http::validate_request::ValidateRequestHeaderLayer;
use tracing::{info, warn};
pub fn router(tx: mpsc::Sender<bool>, config: &crate::Config) -> Router {
let r = Router::new().route("/", get(async || StatusCode::OK));
use crate::AppState;
pub fn router(tx: mpsc::Sender<bool>, state: &AppState) -> Router<AppState> {
let r = Router::new()
.with_state(state.clone())
.route("/", get(async || StatusCode::OK));
let r = add_shutdown_endpoint(r, tx);
if let Some(secret) = config.admin_secret.clone() {
if let Some(secret) = &state.config.admin_secret {
info!("Enabled admin authorization");
r.layer(ValidateRequestHeaderLayer::bearer(&secret))
r.layer(ValidateRequestHeaderLayer::bearer(secret))
} else {
warn!("Admin authorization disabled");
r
@ -21,7 +25,7 @@ pub fn router(tx: mpsc::Sender<bool>, config: &crate::Config) -> Router {
}
#[cfg(feature = "shutdown")]
fn add_shutdown_endpoint(r: Router, tx: mpsc::Sender<bool>) -> Router {
fn add_shutdown_endpoint(r: Router<AppState>, tx: mpsc::Sender<bool>) -> Router<AppState> {
r.route(
"/shutdown",
post(async move || {
@ -44,17 +48,21 @@ fn add_shutdown_endpoint(r: Router, _: mpsc::Sender<bool>) -> Router {
mod test {
use axum::{body::Body, http::Request};
use http::header;
use sqlx::PgPool;
use tower::ServiceExt;
use crate::config;
use crate::{config, state::NuState};
use super::*;
#[tokio::test]
async fn test_authorization_disables_when_no_secret_set() {
#[sqlx::test]
async fn test_authorization_disables_when_no_secret_set(pool: PgPool) {
let (tx, _) = mpsc::channel();
let resp = router(tx, &config::Config::default())
let state = AppState::new(NuState::new(pool, config::Config::default()));
let resp = router(tx, &state)
.with_state(state)
.oneshot(Request::builder().uri("/").body(Body::empty()).unwrap())
.await
.unwrap();
@ -62,8 +70,8 @@ mod test {
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_authorization_unauthorized_no_bearer_token() {
#[sqlx::test]
async fn test_authorization_unauthorized_no_bearer_token(pool: PgPool) {
let (tx, _) = mpsc::channel();
let conf = config::Config {
@ -71,7 +79,10 @@ mod test {
..Default::default()
};
let resp = router(tx, &conf)
let state = AppState::new(NuState::new(pool, conf));
let resp = router(tx, &state)
.with_state(state)
.oneshot(Request::builder().uri("/").body(Body::empty()).unwrap())
.await
.unwrap();
@ -79,8 +90,8 @@ mod test {
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_authorization_unauthorized_invalid_bearer_token() {
#[sqlx::test]
async fn test_authorization_unauthorized_invalid_bearer_token(pool: PgPool) {
let (tx, _) = mpsc::channel();
let conf = config::Config {
@ -88,7 +99,10 @@ mod test {
..Default::default()
};
let resp = router(tx, &conf)
let state = AppState::new(NuState::new(pool, conf));
let resp = router(tx, &state)
.with_state(state)
.oneshot(
Request::builder()
.uri("/")
@ -102,8 +116,8 @@ mod test {
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_authorization_authorized_valid_bearer_token() {
#[sqlx::test]
async fn test_authorization_authorized_valid_bearer_token(pool: PgPool) {
let (tx, _) = mpsc::channel();
let conf = config::Config {
@ -111,7 +125,10 @@ mod test {
..Default::default()
};
let resp = router(tx, &conf)
let state = AppState::new(NuState::new(pool, conf));
let resp = router(tx, &state)
.with_state(state)
.oneshot(
Request::builder()
.uri("/")

View File

@ -1,6 +1,78 @@
use axum::extract::{self, State};
use axum::response::Json;
use http::StatusCode;
use serde_json::{Value, json};
use tracing::error;
pub async fn healthcheck() -> Json<Value> {
Json(json!({"healthy": true}))
use crate::AppState;
pub async fn healthcheck(State(s): extract::State<AppState>) -> Result<Json<Value>, StatusCode> {
sqlx::query!(
"select exists(SELECT datname FROM pg_catalog.pg_database WHERE datname = $1);",
s.config.database_name
)
.fetch_one(&s.db)
.await
.and_then(|x| x.exists.ok_or(sqlx::Error::RowNotFound))
.and_then(|db| {
if db {
Ok(Json(json!({"healthy": db})))
} else {
error!("Could not find configured database in postgres");
Err(sqlx::Error::RowNotFound)
}
})
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
}
#[cfg(test)]
mod test {
use axum::{Router, body::Body, routing::get};
use http::Request;
use sqlx::PgPool;
use tower::ServiceExt;
use crate::{Config, NuState};
use super::*;
#[sqlx::test]
async fn healthcheck_passes_with_db_connection(pool: PgPool) {
let state = AppState::new(NuState::new(
pool,
Config {
database_name: String::from("nuchat_dev"),
..Config::default()
},
));
let resp = Router::new()
.route("/", get(healthcheck))
.with_state(state)
.oneshot(Request::builder().uri("/").body(Body::empty()).unwrap())
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[sqlx::test]
async fn healthcheck_fails_db_doesnt_exist(pool: PgPool) {
let state = AppState::new(NuState::new(
pool,
Config {
database_name: String::from("asdfasdfasdf"),
..Config::default()
},
));
let resp = Router::new()
.route("/", get(healthcheck))
.with_state(state)
.oneshot(Request::builder().uri("/").body(Body::empty()).unwrap())
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR);
}
}

View File

@ -0,0 +1,68 @@
use axum::{
Form, Json,
body::Body,
extract::{Path, State},
response::{IntoResponse, Response},
};
use http::StatusCode;
use sqlx::types::chrono;
use tracing::info;
use uuid::Uuid;
use crate::AppState;
#[derive(serde::Deserialize, serde::Serialize, Debug)]
pub struct Message {
pub contents: String,
pub id: Uuid,
pub created_at: chrono::DateTime<chrono::Utc>,
}
#[derive(serde::Deserialize)]
pub struct CreateMessage {
pub contents: String,
}
pub async fn get_messages(State(s): State<AppState>) -> Result<Json<Vec<Message>>, StatusCode> {
sqlx::query_as!(Message, r#"SELECT id, contents, created_at FROM messages"#)
.fetch_all(&s.db)
.await
.map(Json)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
}
pub async fn create_message(
State(s): State<AppState>,
Form(message): Form<CreateMessage>,
) -> Result<Response<Body>, StatusCode> {
info!("Creating new message with name: {}", message.contents);
let id = Uuid::now_v7();
sqlx::query!(
r"INSERT INTO messages (id, contents, created_at) VALUES($1, $2, current_timestamp) RETURNING id",
id,
message.contents
).fetch_one(&s.db).await
.map(|row| {
let mut resp = Json(row.id).into_response();
let status = resp.status_mut();
*status = StatusCode::CREATED;
info!("Successfully created message Message[{}, {}]", row.id, message.contents);
resp
}).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
}
pub async fn get_message_by_id(
Path(id): Path<Uuid>,
State(s): State<AppState>,
) -> Result<Json<Message>, StatusCode> {
sqlx::query_as!(
Message,
r#"SELECT id, contents, created_at FROM messages WHERE id = $1"#,
id
)
.fetch_optional(&s.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
.and_then(|mayber_message| mayber_message.map(Json).ok_or(StatusCode::NOT_FOUND))
}

View File

@ -0,0 +1,68 @@
use axum::{
Form, Json,
body::Body,
extract::{Path, State},
response::{IntoResponse, Response},
};
use http::StatusCode;
use sqlx::types::chrono;
use tracing::info;
use uuid::Uuid;
use crate::AppState;
#[derive(serde::Deserialize, serde::Serialize, Debug)]
pub struct Server {
pub name: String,
pub id: Uuid,
pub created_at: chrono::DateTime<chrono::Utc>,
}
#[derive(serde::Deserialize)]
pub struct CreateServer {
pub name: String,
}
pub async fn get_servers(State(s): State<AppState>) -> Result<Json<Vec<Server>>, StatusCode> {
sqlx::query_as!(Server, r#"SELECT id, name, created_at FROM servers"#)
.fetch_all(&s.db)
.await
.map(Json)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
}
pub async fn create_server(
State(s): State<AppState>,
Form(server): Form<CreateServer>,
) -> Result<Response<Body>, StatusCode> {
info!("Creating new server with name: {}", server.name);
let id = Uuid::now_v7();
sqlx::query!(
r"INSERT INTO servers (id, name, created_at) VALUES($1, $2, current_timestamp) RETURNING id",
id,
server.name
).fetch_one(&s.db).await
.map(|row| {
let mut resp = Json(row.id).into_response();
let status = resp.status_mut();
*status = StatusCode::CREATED;
info!("Successfully created server Server[{}, {}]", row.id, server.name);
resp
}).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
}
pub async fn get_server_by_id(
Path(id): Path<Uuid>,
State(s): State<AppState>,
) -> Result<Json<Server>, StatusCode> {
sqlx::query_as!(
Server,
r#"SELECT id, name, created_at FROM servers WHERE id = $1"#,
id
)
.fetch_optional(&s.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
.and_then(|mayber_server| mayber_server.map(Json).ok_or(StatusCode::NOT_FOUND))
}

19
backend/src/state.rs Normal file
View File

@ -0,0 +1,19 @@
use std::sync::Arc;
use sqlx::PgPool;
use crate::Config;
pub type AppState = Arc<NuState>;
#[derive(Clone)]
pub struct NuState {
pub db: sqlx::PgPool,
pub config: Config,
}
impl NuState {
#[must_use]
pub fn new(db: PgPool, config: Config) -> Self {
Self { db, config }
}
}

View File

@ -6,11 +6,20 @@ services:
command: --host 0.0.0.0
depends_on:
- db
frontend:
build: ./ui
ports:
- "3000:3000"
command: --host 0.0.0.0
depends_on:
- db
db:
image: postgres:17-alpine
environment:
POSTGRES_USER: $POSTGRES_USER
POSTGRES_PASSWORD: $POSTGRES_PASSWORD
POSTGRES_DB: nuchat
POSTGRES_DB: nuchat_dev
ports:
- "5432:5432"

24
ui/.dockerignore Normal file
View File

@ -0,0 +1,24 @@
# Nuxt dev/build outputs
.output
.data
.nuxt
.nitro
.cache
dist
# Node dependencies
node_modules
# Logs
logs
*.log
# Misc
.DS_Store
.fleet
.idea
# Local env files
.env
.env.*
!.env.example

15
ui/Dockerfile Normal file
View File

@ -0,0 +1,15 @@
FROM node:24-alpine AS base
FROM base AS build
WORKDIR /app
COPY package.json package-lock.json ./
RUN npm ci
COPY . .
RUN npm run build
FROM base AS prod
WORKDIR /app
COPY --from=build /app/.output ./ui
ENV PORT=3000
ENTRYPOINT [ "node", "/app/ui/server/index.mjs" ]

View File

@ -1,6 +1,8 @@
<template>
<div>
<NuxtRouteAnnouncer />
<NuxtWelcome />
<NuxtLayout>
<NuxtPage />
</NuxtLayout>
</div>
</template>

View File

@ -0,0 +1 @@
@import "tailwindcss";

View File

@ -0,0 +1,7 @@
<template>
<div class="bg-sky-900 text-white p-4 mt-auto">
<div class="text-center text-sm opacity-75">
© 2025 NuChat. All rights reserved.
</div>
</div>
</template>

View File

@ -0,0 +1,5 @@
<template>
<div class="text-2xl bold bg-sky-900 text-white p-2">
<NuxtLink to="/">NuChat</NuxtLink>
</div>
</template>

View File

@ -0,0 +1,14 @@
<template>
<RouterLink
:to="`/servers/${id}`"
class="block p-3 hover:bg-sky-100 rounded-lg transition-colors duration-200 border border-transparent hover:border-sky-300"
>
<slot />
</RouterLink>
</template>
<script setup lang="ts">
defineProps<{
id: string | number;
}>();
</script>

View File

@ -0,0 +1,38 @@
<template>
<div class="h-full border-2 border-sky-300 flex flex-col p-4 bg-gray-50">
<h2 class="text-lg font-semibold text-gray-800 mb-4">Servers</h2>
<div class="space-y-2">
<ServerLink
v-for="server in serversWithFallback"
:id="server.id"
:key="server.id"
class="text-gray-700 hover:text-sky-800"
>
{{ server.name }}
</ServerLink>
</div>
</div>
</template>
<script setup lang="ts">
interface Server {
id: string;
name: string;
}
const { data: servers, error } = await useFetch<Server[]>("/api/servers");
if (error.value) {
console.error("Failed to fetch servers:", error.value);
}
const serversWithFallback = computed(() => {
return (
servers.value || [
{ id: "1", name: "General" },
{ id: "2", name: "Gaming" },
{ id: "3", name: "Tech Talk" },
]
);
});
</script>

View File

@ -0,0 +1,12 @@
<template>
<div>
<AppHeader />
<div class="grid grid-cols-12">
<ServerSidebar class="col-span-2" />
<div class="col-span-10">
<slot />
</div>
</div>
<AppFooter />
</div>
</template>

View File

@ -0,0 +1,14 @@
// @noErrors
import { it, expect } from "vitest";
// ---cut---
// tests/components/SomeComponents.nuxt.spec.ts
import { mountSuspended } from "@nuxt/test-utils/runtime";
import Index from "~/pages/index.vue";
// tests/App.nuxt.spec.ts
it("can also mount an app", async () => {
const component = await mountSuspended(Index, { route: "/test" });
expect(component.html()).toMatchInlineSnapshot(`
"<h1>Hello World</h1>"
`);
});

3
ui/app/pages/index.vue Normal file
View File

@ -0,0 +1,3 @@
<template>
<h1>Hello World</h1>
</template>

View File

@ -1,6 +1,9 @@
// @ts-check
import withNuxt from './.nuxt/eslint.config.mjs'
import withNuxt from "./.nuxt/eslint.config.mjs";
export default withNuxt(
// Your custom configs here
)
export default withNuxt({
rules: {
"prefer-const": "warn",
"no-unexpected-multiline": 0,
},
});

View File

@ -1,6 +1,20 @@
import tailwindcss from "@tailwindcss/vite";
// https://nuxt.com/docs/api/configuration/nuxt-config
export default defineNuxtConfig({
compatibilityDate: '2025-07-15',
compatibilityDate: "2025-07-15",
devtools: { enabled: true },
modules: ['@nuxt/eslint', '@nuxt/icon', '@nuxt/test-utils']
})
modules: [
"@nuxt/eslint",
"@nuxt/icon",
"@nuxt/test-utils",
"@nuxt/test-utils/module",
],
css: ["~/assets/css/main.css"],
vite: {
plugins: [tailwindcss()],
},
nitro: {
preset: "node-server",
},
});

1340
ui/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -7,14 +7,33 @@
"dev": "nuxt dev",
"generate": "nuxt generate",
"preview": "nuxt preview",
"postinstall": "nuxt prepare"
"postinstall": "nuxt prepare",
"test": "vitest run",
"watch": "vitest watch",
"lint": "eslint",
"fmt": "prettier"
},
"dependencies": {
"@nuxt/eslint": "1.6.0",
"@nuxt/icon": "^1.15.0",
"@nuxt/test-utils": "^3.19.2",
"@tailwindcss/vite": "^4.1.11",
"nuxt": "^4.0.1",
"tailwindcss": "^4.1.11",
"vue": "^3.5.17",
"vue-router": "^4.5.1"
},
"devDependencies": {
"@eslint/css": "^0.10.0",
"@eslint/js": "^9.32.0",
"@nuxt/test-utils": "^3.19.2",
"@vue/test-utils": "^2.4.6",
"eslint": "^9.32.0",
"eslint-plugin-vue": "^10.3.0",
"globals": "^16.3.0",
"happy-dom": "^18.0.1",
"playwright-core": "^1.54.1",
"prettier": "3.6.2",
"typescript-eslint": "^8.38.0",
"vitest": "^3.2.4"
}
}

7
ui/vitest.config.ts Normal file
View File

@ -0,0 +1,7 @@
import { defineVitestConfig } from "@nuxt/test-utils/config";
export default defineVitestConfig({
test: {
environment: "nuxt",
},
});