Started milestone 2

This commit is contained in:
sam
2026-05-03 15:53:25 +02:00
parent 43483c2145
commit 041955345b
18 changed files with 4616 additions and 60 deletions

4223
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -7,8 +7,8 @@
- [x] Create crates: `cargo new --lib core_protocol`, `cargo new --bin server_node`, `cargo new --bin client_node`. - [x] Create crates: `cargo new --lib core_protocol`, `cargo new --bin server_node`, `cargo new --bin client_node`.
- [x] Add strict lints (`#![forbid(unsafe_code)]`, etc.) to the root workspace or individual `lib.rs`/`main.rs` files. - [x] Add strict lints (`#![forbid(unsafe_code)]`, etc.) to the root workspace or individual `lib.rs`/`main.rs` files.
- [x] **Dependencies (`core_protocol`):** Add `serde`, `bincode`, `uuid`, `chrono`, `thiserror`, `secrecy` (for zeroing sensitive keys). - [x] **Dependencies (`core_protocol`):** Add `serde`, `bincode`, `uuid`, `chrono`, `thiserror`, `secrecy` (for zeroing sensitive keys).
- [x] **Dependencies (`server_node`):** Add `tokio` (full), `tracing`, `tracing-subscriber`, `anyhow`, `dashmap`. - [x] **Dependencies (`server_node`):** Add `tokio` (full), `tracing`, `tracing-subscriber`, `anyhow`, `dashmap`, `tokio-util`, `tokio-serde`, `futures`.
- [x] **Dependencies (`client_node`):** Add `tokio` (rt-multi-thread), `tracing`, `tracing-subscriber`, `anyhow`. - [x] **Dependencies (`client_node`):** Add `tokio` (rt-multi-thread), `tracing`, `tracing-subscriber`, `anyhow`, `tokio-util`, `tokio-serde`, `futures`.
### 2. Protocol Definitions (`core_protocol`) ### 2. Protocol Definitions (`core_protocol`)
- [x] Create `src/tcp_events.rs`. Define `enum TcpEvent { AuthRequest { username: String, ... }, AuthResponse { session_token: u32, ... }, ChannelJoin { ... }, ChatMessage { ... } }` with `#[derive(Serialize, Deserialize)]`. - [x] Create `src/tcp_events.rs`. Define `enum TcpEvent { AuthRequest { username: String, ... }, AuthResponse { session_token: u32, ... }, ChannelJoin { ... }, ChatMessage { ... } }` with `#[derive(Serialize, Deserialize)]`.
@@ -23,7 +23,7 @@
### 4. Login Logic & State ### 4. Login Logic & State
- [x] **Server State:** Create `server_node/src/state.rs`. Define a `DashMap<u32, UserState>` to store active session tokens. - [x] **Server State:** Create `server_node/src/state.rs`. Define a `DashMap<u32, UserState>` to store active session tokens.
- [x] **Authentication Flow:** Client sends `TcpEvent::AuthRequest`. Server generates a random `u32` session token, stores it in `DashMap`, and returns `TcpEvent::AuthResponse`. - [x] **Authentication Flow:** Client sends `TcpEvent::AuthRequest`. Server generates a sequential `u32` session token (via `AtomicU32`), stores it in `DashMap`, and returns `TcpEvent::AuthResponse`.
- [x] **Validation:** Ensure the server actively drops the connection if the client sends invalid or excessively large payloads. - [x] **Validation:** Ensure the server actively drops the connection if the client sends invalid or excessively large payloads.
### 5. Observability (Logging) ### 5. Observability (Logging)

View File

@@ -10,14 +10,14 @@ voice_app/
│ ├── Standards/ # Strict rules for code quality and testing │ ├── Standards/ # Strict rules for code quality and testing
│ └── Concept/ # UI mockups and aesthetic references │ └── Concept/ # UI mockups and aesthetic references
├── core_protocol/ # Shared binary logic and network Enums ├── core_protocol/ # Shared binary logic and network Enums
│ ├── Cargo.toml # deps: serde, bincode, uuid, chrono │ ├── Cargo.toml # deps: serde, bincode, uuid, chrono, thiserror, secrecy
│ └── src/ │ └── src/
│ ├── lib.rs # Exports modules │ ├── lib.rs # Exports modules
│ ├── tcp_events.rs # Enums for reliable Auth/Chat/Admin events │ ├── tcp_events.rs # Enums for reliable Auth/Chat/Admin events
│ ├── udp_packets.rs # Structs for VoiceFrame headers and payload │ ├── udp_packets.rs # Structs for VoiceFrame headers and payload
│ └── constants.rs # Fixed specs: 48kHz, 20ms frame (960 samples) │ └── constants.rs # Fixed specs: 48kHz, 20ms frame (960 samples)
├── server_node/ # Headless relay, DB host, and Web Admin dashboard ├── server_node/ # Headless relay, DB host, and Web Admin dashboard
│ ├── Cargo.toml # deps: tokio, sqlx, axum, rust-embed, argon2, jwt │ ├── Cargo.toml # deps: tokio, sqlx, axum, rust-embed, argon2, jwt, dashmap, tracing, anyhow, tokio-util, tokio-serde
│ ├── migrations/ # SQLx .sql scripts for SQLite schema persistence │ ├── migrations/ # SQLx .sql scripts for SQLite schema persistence
│ ├── web_dashboard/ # Admin UI source (HTML/CSS/JS) for embedding │ ├── web_dashboard/ # Admin UI source (HTML/CSS/JS) for embedding
│ └── src/ │ └── src/
@@ -30,24 +30,28 @@ voice_app/
│ ├── telemetry.rs # Prometheus /metrics endpoint for server health │ ├── telemetry.rs # Prometheus /metrics endpoint for server health
│ └── auth_service.rs # Argon2 hashing and JWT token generation │ └── auth_service.rs # Argon2 hashing and JWT token generation
├── client_node/ # Desktop application, audio engine, and plugin host ├── client_node/ # Desktop application, audio engine, and plugin host
│ ├── Cargo.toml # deps: eframe, cpal, audiopus, webrtc-dsp, extism │ ├── Cargo.toml # deps: tokio, eframe, cpal, audiopus, webrtc-dsp, extism, tracing, anyhow, tokio-util, tokio-serde
│ └── src/ │ └── src/
│ ├── main.rs # Entry: Initializes eframe and background Tokio runtime │ ├── main.rs # Entry: Initializes eframe and background Tokio runtime
│ ├── app_state.rs # Actor Pattern bridge for non-blocking UI/Net │ ├── app_state.rs # Actor Pattern bridge for non-blocking UI/Net
│ ├── ui/ # egui Graphical Interface │ ├── ui/ # egui Graphical Interface
│ │ ├── mod.rs # Module exports
│ │ ├── layout.rs # Main window shell │ │ ├── layout.rs # Main window shell
│ │ ├── side_panel.rs # Channel tree and connection UI │ │ ├── side_panel.rs # Channel tree and connection UI
│ │ └── chat_area.rs # Text messages and system logs │ │ └── chat_area.rs # Text messages and system logs
│ ├── network/ # Internet connectivity modules │ ├── network/ # Internet connectivity modules
│ │ ├── mod.rs # Module exports
│ │ ├── control.rs # TCP: TLS, Heartbeats, Auto-reconnect │ │ ├── control.rs # TCP: TLS, Heartbeats, Auto-reconnect
│ │ ├── voice.rs # UDP: Jitter buffer (40ms), Seq ordering, Decryption │ │ ├── voice.rs # UDP: Jitter buffer (40ms), Seq ordering, Decryption
│ │ └── chaos.rs # UDP Middleware: Artificial packet loss/latency injection │ │ └── chaos.rs # UDP Middleware: Artificial packet loss/latency injection
│ ├── audio/ # Real-time pipeline with 20ms frames │ ├── audio/ # Real-time pipeline with 20ms frames
│ │ ├── mod.rs # Module exports
│ │ ├── capture.rs # Microphone -> Lock-free Ringbuffer │ │ ├── capture.rs # Microphone -> Lock-free Ringbuffer
│ │ ├── dsp.rs # Noise suppression & Echo cancellation (WebRTC) │ │ ├── dsp.rs # Noise suppression & Echo cancellation (WebRTC)
│ │ ├── codec.rs # Opus Encoding/Decoding (48kbps VBR) │ │ ├── codec.rs # Opus Encoding/Decoding (48kbps VBR)
│ │ └── playback.rs # Ringbuffer -> Speaker output │ │ └── playback.rs # Ringbuffer -> Speaker output
│ └── plugins/ # Wasm Extension Sandbox │ └── plugins/ # Wasm Extension Sandbox
│ ├── mod.rs # Module exports
│ ├── runtime.rs # Extism Wasm runtime initialization │ ├── runtime.rs # Extism Wasm runtime initialization
│ └── hooks.rs # Event triggers: OnVoice, OnMessage, OnJoin │ └── hooks.rs # Event triggers: OnVoice, OnMessage, OnJoin
├── tests/ # Integration and Load Testing ├── tests/ # Integration and Load Testing

View File

@@ -6,9 +6,17 @@ edition = "2024"
[dependencies] [dependencies]
anyhow = "1.0.102" anyhow = "1.0.102"
core_protocol = { version = "0.1.0", path = "../core_protocol" } core_protocol = { version = "0.1.0", path = "../core_protocol" }
cpal = "0.17.3"
eframe = "0.34.1"
egui = "0.34.1"
futures = "0.3.32" futures = "0.3.32"
hound = "3.5.1"
rdev = "0.5.3"
ringbuf = "0.5.0"
tokio = { version = "1.52.1", features = ["rt-multi-thread", "net", "macros"] } tokio = { version = "1.52.1", features = ["rt-multi-thread", "net", "macros"] }
tokio-serde = { version = "0.9.0", features = ["bincode"] } tokio-serde = { version = "0.9.0", features = ["bincode"] }
tokio-util = { version = "0.7.18", features = ["codec"] } tokio-util = { version = "0.7.18", features = ["codec"] }
tracing = "0.1.44" tracing = "0.1.44"
tracing-subscriber = "0.3.23" tracing-subscriber = "0.3.23"
webrtc-audio-processing = { version = "2.0.4", features = ["bundled"] }
webrtc-audio-processing-config = "2.0.4"

View File

@@ -0,0 +1,59 @@
//! Headless and hardware-backed audio capture logic.
//!
//! This module abstracts `cpal` to capture microphone input cleanly,
//! pushing raw samples directly to a lock-free ringbuffer.
//!
//! The `cpal` data callback is kept strictly lock-free and allocation-free
//! to satisfy real-time audio constraints.
use anyhow::{Result, anyhow};
use cpal::StreamConfig;
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use ringbuf::HeapProd;
use ringbuf::traits::Producer;
use tracing::{error, info};
use super::{INPUT_CHANNELS, SAMPLE_RATE};
/// Sets up the default microphone device and returns the active cpal stream.
///
/// The stream pushes raw f32 samples into the provided ringbuffer producer.
///
/// # Errors
/// Returns an error if no input device is found or if the stream cannot be built.
pub fn start_audio_capture(mut producer: HeapProd<f32>) -> Result<cpal::Stream> {
let host = cpal::default_host();
let device = host
.default_input_device()
.ok_or_else(|| anyhow!("No default input device found"))?;
// cpal 0.17 deprecates `name()` in favor of `description()`.
let device_desc = device
.description()
.map_or_else(|_| "Unknown".to_string(), |d| d.name().to_string());
info!("Using input device: {}", device_desc);
let config = StreamConfig {
channels: INPUT_CHANNELS,
// cpal 0.17 changed SampleRate from a tuple struct to a plain u32 alias.
sample_rate: SAMPLE_RATE,
buffer_size: cpal::BufferSize::Default,
};
let stream = device.build_input_stream(
&config,
move |data: &[f32], _: &cpal::InputCallbackInfo| {
// STRICT RULE: No locks, no allocations in this callback.
// Just push samples to the ringbuffer.
let _ = producer.push_slice(data);
},
move |err| {
error!("An error occurred on the audio capture stream: {}", err);
},
None, // None = default timeout
)?;
stream.play()?;
Ok(stream)
}

View File

@@ -0,0 +1,108 @@
//! DSP and Voice Activity Detection (VAD) thread.
//!
//! Pulls audio from the lock-free ringbuffer, applies WebRTC noise suppression
//! and echo cancellation, then checks for voice activity before signalling
//! the UI via a `tokio::sync::watch` channel.
//!
//! This thread is a dedicated `std::thread` (not a Tokio task) because
//! real-time audio processing must never be at the mercy of a cooperative
//! async scheduler.
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::thread;
use std::time::Duration;
use ringbuf::HeapCons;
use ringbuf::traits::{Consumer, Observer};
use tokio::sync::watch;
use tracing::info;
use webrtc_audio_processing::Processor;
use webrtc_audio_processing_config::{
Config, EchoCanceller, NoiseSuppression, NoiseSuppressionLevel,
};
use super::{FRAME_SIZE, SAMPLE_RATE};
/// RMS threshold below which a frame is considered silence.
/// This provides a simple amplitude-based VAD since the WebRTC v2 API
/// removed the standalone voice detection configuration.
const VAD_RMS_THRESHOLD: f32 = 0.01;
/// Spawns the dedicated background DSP thread.
///
/// Reads 960-sample frames from the ringbuffer, applies WebRTC
/// noise suppression + echo cancellation, and updates the active
/// speaker state via the provided watch channel.
pub fn spawn_dsp_thread(
mut consumer: HeapCons<f32>,
ptt_flag: Arc<AtomicBool>,
active_speaker_tx: watch::Sender<bool>,
) {
thread::spawn(move || {
info!("DSP thread started.");
let ap = match Processor::new(SAMPLE_RATE) {
Ok(ap) => ap,
Err(e) => {
tracing::error!("Failed to initialize WebRTC APM: {:?}", e);
return;
}
};
let config = Config {
echo_canceller: Some(EchoCanceller::default()),
noise_suppression: Some(NoiseSuppression {
level: NoiseSuppressionLevel::High,
analyze_linear_aec_output: false,
}),
..Default::default()
};
ap.set_config(config);
// Mono capture: one channel with FRAME_SIZE samples.
let mut frame_buf = vec![vec![0.0f32; FRAME_SIZE]];
loop {
// Wait until we have a full 20ms frame (960 samples at 48kHz).
if consumer.occupied_len() >= FRAME_SIZE {
let _ = consumer.pop_slice(&mut frame_buf[0]);
let is_transmitting = ptt_flag.load(Ordering::Relaxed);
// Run the WebRTC DSP pipeline on the capture frame.
if let Err(e) = ap.process_capture_frame(&mut frame_buf) {
tracing::warn!("APM processing failed: {:?}", e);
}
// Simple RMS-based VAD since webrtc-audio-processing v2
// removed the dedicated VoiceDetection config field.
let rms = compute_rms(&frame_buf[0]);
let has_voice = rms > VAD_RMS_THRESHOLD;
let should_transmit = is_transmitting && has_voice;
// Only update the watch channel when the state actually changes
// to avoid unnecessary UI repaints.
if *active_speaker_tx.borrow() != should_transmit {
let _ = active_speaker_tx.send(should_transmit);
}
} else {
thread::sleep(Duration::from_millis(2));
}
}
});
}
/// Computes the Root Mean Square (RMS) of a sample buffer.
///
/// Used as a lightweight VAD: if the RMS is below a threshold,
/// the frame is considered silence.
fn compute_rms(samples: &[f32]) -> f32 {
if samples.is_empty() {
return 0.0;
}
let sum_sq: f32 = samples.iter().map(|s| s * s).sum();
#[allow(clippy::cast_precision_loss)] // FRAME_SIZE (960) is well within f32's 23-bit mantissa.
let divisor = samples.len() as f32;
(sum_sq / divisor).sqrt()
}

View File

@@ -0,0 +1,19 @@
//! Shared audio configuration constants.
//!
//! Defines strict mathematical constraints required for Opus encoding and decoding.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::pedantic)]
#![deny(clippy::unwrap_used, clippy::expect_used)]
pub mod capture;
pub mod dsp;
/// The strict sample rate required across the entire DSP pipeline.
pub const SAMPLE_RATE: u32 = 48_000;
/// The number of channels for microphone capture (Mono).
pub const INPUT_CHANNELS: u16 = 1;
/// The exact number of samples required per frame for Opus (20ms).
pub const FRAME_SIZE: usize = 960;

View File

@@ -0,0 +1,34 @@
//! Global Hotkey listener.
//!
//! Uses `rdev` to capture global PTT events across the OS without requiring window focus.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::pedantic)]
#![deny(clippy::unwrap_used, clippy::expect_used)]
use rdev::{Event, EventType, Key, listen};
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::thread;
use tracing::info;
/// Spawns the global hotkey listener.
pub fn spawn_hotkey_listener(ptt_flag: Arc<AtomicBool>) {
thread::spawn(move || {
info!("Hotkey listener started. Press 'V' to talk.");
let callback = move |event: Event| match event.event_type {
EventType::KeyPress(Key::KeyV) => {
ptt_flag.store(true, Ordering::Relaxed);
}
EventType::KeyRelease(Key::KeyV) => {
ptt_flag.store(false, Ordering::Relaxed);
}
_ => {}
};
if let Err(e) = listen(callback) {
tracing::error!("Error listening to global hotkeys: {:?}", e);
}
});
}

View File

@@ -1,24 +1,76 @@
//! Client Node entry point. //! Client Node entry point.
//! //!
//! This module initializes the desktop client application, sets up the Tokio //! This module initializes the desktop client application, sets up the Tokio
//! background thread for networking, and eventually binds to the UI framework. //! background thread for networking, and eventually binds to the UI framework.
#![forbid(unsafe_code)] #![forbid(unsafe_code)]
#![deny(clippy::all, clippy::pedantic)] #![deny(clippy::all, clippy::pedantic)]
#![deny(clippy::unwrap_used, clippy::expect_used)] #![deny(clippy::unwrap_used, clippy::expect_used)]
mod audio;
mod hotkey;
mod network; mod network;
mod ui;
use anyhow::Result;
use eframe::egui;
use ringbuf::HeapRb;
use ringbuf::traits::Split;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use tokio::sync::watch;
use tracing::{error, info}; use tracing::{error, info};
#[tokio::main] fn main() -> Result<()> {
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt::init(); tracing_subscriber::fmt::init();
info!("Starting client node..."); info!("Starting client node...");
if let Err(e) = network::control::connect_and_auth("TestUser").await { // Setup communication channels
error!("Connection error: {:?}", e); let (active_speaker_tx, active_speaker_rx) = watch::channel(false);
} let ptt_flag = Arc::new(AtomicBool::new(false));
// Setup lock-free ringbuffer for audio capture (4096 capacity)
let audio_rb = HeapRb::<f32>::new(4096);
let (producer, consumer) = audio_rb.split();
// Spawn DSP and audio capture threads
audio::dsp::spawn_dsp_thread(consumer, ptt_flag.clone(), active_speaker_tx);
let _stream = audio::capture::start_audio_capture(producer).map_err(|e| {
error!("Failed to start audio capture: {:?}", e);
e
});
// Spawn Global Hotkey listener
hotkey::spawn_hotkey_listener(ptt_flag);
// Spawn custom tokio runtime for network background tasks
std::thread::spawn(move || {
let Ok(rt) = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
else {
tracing::error!("Failed to build tokio runtime");
return;
};
rt.block_on(async {
if let Err(e) = network::control::connect_and_auth("TestUser").await {
error!("Connection error: {:?}", e);
}
});
});
let options = eframe::NativeOptions {
viewport: egui::ViewportBuilder::default().with_inner_size([800.0, 600.0]),
..Default::default()
};
eframe::run_native(
"Voice App",
options,
Box::new(|_cc| Ok(Box::new(ui::VoiceApp::new(active_speaker_rx)))),
)
.map_err(|e| anyhow::anyhow!("eframe error: {e:?}"))?;
Ok(()) Ok(())
} }

View File

@@ -8,8 +8,8 @@ use core_protocol::constants;
use core_protocol::tcp_events::TcpEvent; use core_protocol::tcp_events::TcpEvent;
use futures::{SinkExt, StreamExt}; use futures::{SinkExt, StreamExt};
use tokio::net::TcpStream; use tokio::net::TcpStream;
use tokio_serde::formats::Bincode;
use tokio_serde::SymmetricallyFramed; use tokio_serde::SymmetricallyFramed;
use tokio_serde::formats::Bincode;
use tokio_util::codec::{Framed, LengthDelimitedCodec}; use tokio_util::codec::{Framed, LengthDelimitedCodec};
use tracing::{info, warn}; use tracing::{info, warn};
@@ -22,7 +22,7 @@ type FramedStream = SymmetricallyFramed<
/// Connects to the server and performs the initial authentication handshake. /// Connects to the server and performs the initial authentication handshake.
/// ///
/// This establishes a length-delimited TCP connection to prevent fragmentation, /// This establishes a length-delimited TCP connection to prevent fragmentation,
/// sends an `AuthRequest` with the given username, and awaits the `AuthResponse`. /// sends an `AuthRequest` with the given username, and awaits the `AuthResponse`.
/// ///
/// # Arguments /// # Arguments
@@ -34,31 +34,37 @@ type FramedStream = SymmetricallyFramed<
pub async fn connect_and_auth(username: &str) -> Result<()> { pub async fn connect_and_auth(username: &str) -> Result<()> {
let addr = format!("127.0.0.1:{}", constants::TCP_PORT); let addr = format!("127.0.0.1:{}", constants::TCP_PORT);
info!("Connecting to server at {}...", addr); info!("Connecting to server at {}...", addr);
let stream = TcpStream::connect(&addr).await.context("Failed to connect to server")?; let stream = TcpStream::connect(&addr)
.await
.context("Failed to connect to server")?;
info!("Connected!"); info!("Connected!");
// Construct the codec pipeline exactly mirroring the server's configuration // Construct the codec pipeline exactly mirroring the server's configuration
// to ensure reliable packet framing. // to ensure reliable packet framing.
let length_delimited = Framed::new(stream, LengthDelimitedCodec::new()); let length_delimited = Framed::new(stream, LengthDelimitedCodec::new());
let mut framed: FramedStream = SymmetricallyFramed::new( let mut framed: FramedStream =
length_delimited, SymmetricallyFramed::new(length_delimited, Bincode::<TcpEvent, TcpEvent>::default());
Bincode::<TcpEvent, TcpEvent>::default(),
);
let auth_req = TcpEvent::AuthRequest { let auth_req = TcpEvent::AuthRequest {
username: username.to_string(), username: username.to_string(),
}; };
framed.send(auth_req).await.context("Failed to send AuthRequest")?; framed
.send(auth_req)
.await
.context("Failed to send AuthRequest")?;
info!("Sent AuthRequest for user: {}", username); info!("Sent AuthRequest for user: {}", username);
if let Some(response) = framed.next().await { if let Some(response) = framed.next().await {
let response = response.context("Failed to deserialize response")?; let response = response.context("Failed to deserialize response")?;
match response { match response {
TcpEvent::AuthResponse { session_token } => { TcpEvent::AuthResponse { session_token } => {
info!("Successfully authenticated! Session token: {}", session_token); info!(
"Successfully authenticated! Session token: {}",
session_token
);
} }
_ => { _ => {
warn!("Received unexpected event instead of AuthResponse"); warn!("Received unexpected event instead of AuthResponse");

64
client_node/src/ui/app.rs Normal file
View File

@@ -0,0 +1,64 @@
//! The core application state for the eframe UI.
//!
//! This module defines the `VoiceApp` struct which implements `eframe::App`.
//! It listens to background events via `tokio::sync::watch` and draws the UI at 60 FPS.
//!
//! We implement `ui()` (not the deprecated `update()`) because eframe 0.34
//! changed the required trait method to receive an `&mut egui::Ui` directly
//! instead of a raw `egui::Context`.
use eframe::egui;
use tokio::sync::watch;
/// The central state for the eframe UI.
pub struct VoiceApp {
/// Receiver for the active speaker state, updated by the DSP thread.
pub active_speaker_rx: watch::Receiver<bool>,
/// Whether the audio dumper is enabled for debugging.
pub audio_dumper_enabled: bool,
}
impl VoiceApp {
/// Creates a new `VoiceApp` instance.
#[must_use]
pub fn new(active_speaker_rx: watch::Receiver<bool>) -> Self {
Self {
active_speaker_rx,
audio_dumper_enabled: false,
}
}
}
impl eframe::App for VoiceApp {
fn ui(&mut self, ui: &mut egui::Ui, _frame: &mut eframe::Frame) {
let is_active_speaker = *self.active_speaker_rx.borrow();
// Use columns to simulate a side-panel layout within the single Ui.
ui.columns(2, |columns| {
// Left column: Channel tree view
columns[0].heading("Channels");
columns[0].label("General");
columns[0].label("Gaming");
columns[0].label("AFK");
// Right column: Voice chat state + dev tools
columns[1].heading("Voice Chat");
columns[1].horizontal(|ui| {
ui.label("You: ");
if is_active_speaker {
ui.label(egui::RichText::new("Speaking").color(egui::Color32::GREEN));
} else {
ui.label(egui::RichText::new("Silent").color(egui::Color32::GRAY));
}
});
columns[1].separator();
columns[1].heading("Developer Settings");
columns[1].checkbox(&mut self.audio_dumper_enabled, "Enable Audio Dumper (.wav)");
});
// Force continuous repaint so the watch channel updates immediately reflect.
ui.ctx().request_repaint();
}
}

11
client_node/src/ui/mod.rs Normal file
View File

@@ -0,0 +1,11 @@
//! UI Module entry point.
//!
//! This module handles the egui visual interface and maintains the `eframe` Application state.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::pedantic)]
#![deny(clippy::unwrap_used, clippy::expect_used)]
pub mod app;
pub use app::VoiceApp;

View File

@@ -1,6 +1,6 @@
//! Fixed system-wide specifications. //! Fixed system-wide specifications.
//! //!
//! Contains constants required for hardware configuration, network binding, //! Contains constants required for hardware configuration, network binding,
//! and math routines such as audio framing lengths. //! and math routines such as audio framing lengths.
/// The uniform audio sampling rate (48 kHz). /// The uniform audio sampling rate (48 kHz).

View File

@@ -1,7 +1,7 @@
//! Core Protocol definitions for the Voice App. //! Core Protocol definitions for the Voice App.
//! //!
//! This module defines the foundational types and constants shared between the //! This module defines the foundational types and constants shared between the
//! client and server nodes. It ensures that both ends of the connection speak the //! client and server nodes. It ensures that both ends of the connection speak the
//! exact same structural language for serialization/deserialization. //! exact same structural language for serialization/deserialization.
#![forbid(unsafe_code)] #![forbid(unsafe_code)]

View File

@@ -1,6 +1,6 @@
//! TCP Control Lane events. //! TCP Control Lane events.
//! //!
//! Defines the reliable commands sent over the TCP connection for state //! Defines the reliable commands sent over the TCP connection for state
//! synchronization, such as authentication and text chat. //! synchronization, such as authentication and text chat.
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};

View File

@@ -6,8 +6,8 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// The header attached to every UDP voice frame. /// The header attached to every UDP voice frame.
/// ///
/// We separate this from the payload to allow the server to rapidly route packets /// We separate this from the payload to allow the server to rapidly route packets
/// using just the `session_token` without fully deserializing the heavy audio data. /// using just the `session_token` without fully deserializing the heavy audio data.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VoicePacketHeader { pub struct VoicePacketHeader {

View File

@@ -1,7 +1,7 @@
//! Concurrent server state management. //! Concurrent server state management.
//! //!
//! This module defines the global application state shared across all active //! This module defines the global application state shared across all active
//! Tokio tasks. It heavily relies on lock-free and fine-grained locking primitives //! Tokio tasks. It heavily relies on lock-free and fine-grained locking primitives
//! like `DashMap` and `AtomicU32` to ensure high performance without stuttering. //! like `DashMap` and `AtomicU32` to ensure high performance without stuttering.
use dashmap::DashMap; use dashmap::DashMap;
@@ -33,8 +33,8 @@ impl AppState {
} }
/// Generates a strictly unique, monotonically increasing session token. /// Generates a strictly unique, monotonically increasing session token.
/// ///
/// We use `Ordering::Relaxed` because we only need atomicity on the counter /// We use `Ordering::Relaxed` because we only need atomicity on the counter
/// itself, not strict memory ordering with other operations. /// itself, not strict memory ordering with other operations.
#[must_use] #[must_use]
pub fn generate_token(&self) -> u32 { pub fn generate_token(&self) -> u32 {

View File

@@ -1,7 +1,7 @@
//! Reliable TCP routing and connection management. //! Reliable TCP routing and connection management.
//! //!
//! This module implements the reliable control lane for client communication. //! This module implements the reliable control lane for client communication.
//! It handles the initial authentication handshake and routes incoming TCP events //! It handles the initial authentication handshake and routes incoming TCP events
//! from the length-delimited codec to the broader application state. //! from the length-delimited codec to the broader application state.
use anyhow::{Context, Result}; use anyhow::{Context, Result};
@@ -9,8 +9,8 @@ use core_protocol::tcp_events::TcpEvent;
use futures::{SinkExt, StreamExt}; use futures::{SinkExt, StreamExt};
use std::sync::Arc; use std::sync::Arc;
use tokio::net::TcpStream; use tokio::net::TcpStream;
use tokio_serde::formats::Bincode;
use tokio_serde::SymmetricallyFramed; use tokio_serde::SymmetricallyFramed;
use tokio_serde::formats::Bincode;
use tokio_util::codec::{Framed, LengthDelimitedCodec}; use tokio_util::codec::{Framed, LengthDelimitedCodec};
use tracing::{error, info, instrument, warn}; use tracing::{error, info, instrument, warn};
@@ -26,7 +26,7 @@ type FramedStream = SymmetricallyFramed<
/// Handles the lifecycle of a newly connected client's TCP stream. /// Handles the lifecycle of a newly connected client's TCP stream.
/// ///
/// This spans an instrumented task for the connection, setting up the necessary /// This spans an instrumented task for the connection, setting up the necessary
/// framers and codecs before entering the event loop. /// framers and codecs before entering the event loop.
#[instrument(skip(stream, state))] #[instrument(skip(stream, state))]
pub async fn handle_connection(stream: TcpStream, state: Arc<AppState>) { pub async fn handle_connection(stream: TcpStream, state: Arc<AppState>) {
@@ -39,13 +39,11 @@ pub async fn handle_connection(stream: TcpStream, state: Arc<AppState>) {
}; };
info!("New connection from {}", peer_addr); info!("New connection from {}", peer_addr);
// We pad the TCP stream with a length-delimited codec to guarantee frame boundaries, // We pad the TCP stream with a length-delimited codec to guarantee frame boundaries,
// avoiding fragmentation issues common in raw TCP sockets. // avoiding fragmentation issues common in raw TCP sockets.
let length_delimited = Framed::new(stream, LengthDelimitedCodec::new()); let length_delimited = Framed::new(stream, LengthDelimitedCodec::new());
let mut framed: FramedStream = SymmetricallyFramed::new( let mut framed: FramedStream =
length_delimited, SymmetricallyFramed::new(length_delimited, Bincode::<TcpEvent, TcpEvent>::default());
Bincode::<TcpEvent, TcpEvent>::default(),
);
if let Err(e) = process_connection(&mut framed, state).await { if let Err(e) = process_connection(&mut framed, state).await {
warn!("Connection closed with error: {:?}", e); warn!("Connection closed with error: {:?}", e);
@@ -65,10 +63,10 @@ async fn process_connection(framed: &mut FramedStream, state: Arc<AppState>) ->
match event { match event {
TcpEvent::AuthRequest { username } => { TcpEvent::AuthRequest { username } => {
// REDACTED standard: we might log the username, but this is a reminder // REDACTED standard: we might log the username, but this is a reminder
// for future sensitive items to use [REDACTED]. // for future sensitive items to use [REDACTED].
info!("AuthRequest received for user: {}", username); info!("AuthRequest received for user: {}", username);
let session_token = state.generate_token(); let session_token = state.generate_token();
state.active_users.insert( state.active_users.insert(