camera-trng/src/main.rs

194 lines
6.7 KiB
Rust

use axum::{
body::Body,
extract::Query,
http::{header, StatusCode},
response::{Html, IntoResponse, Response, Json},
routing::get,
Router,
};
use nokhwa::{
pixel_format::RgbFormat,
utils::{CameraIndex, RequestedFormat, RequestedFormatType},
Camera,
};
use sha2::{Digest, Sha256};
use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering};
use serde_json::json;
const MAX_BYTES_PER_REQUEST: usize = 1024;
const MAX_CONCURRENT: usize = 4;
const DEFAULT_PORT: u16 = 8787;
static ACTIVE_REQUESTS: AtomicUsize = AtomicUsize::new(0);
static REQUEST_COUNTER: AtomicU64 = AtomicU64::new(0);
fn is_fake_camera() -> bool {
std::env::var("FAKE_CAMERA")
.map(|v| v == "1" || v.to_lowercase() == "true")
.unwrap_or(false)
}
#[derive(serde::Deserialize)]
struct RandomQuery {
#[serde(default = "default_bytes")]
bytes: usize,
#[serde(default)]
hex: bool,
}
fn default_bytes() -> usize { 32 }
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let port = std::env::var("PORT").ok().and_then(|p| p.parse().ok()).unwrap_or(DEFAULT_PORT);
if is_fake_camera() {
println!("FAKE_CAMERA mode enabled - using /dev/urandom for entropy");
} else {
println!("Testing camera access...");
if let Err(e) = test_camera() {
eprintln!("Camera error: {}. Server will still start.", e);
} else {
println!("Camera OK");
}
}
let app = Router::new()
.route("/", get(index))
.route("/random", get(get_random))
.route("/health", get(health))
.route("/.well-known/mcp.json", get(mcp_wellknown));
let addr = format!("0.0.0.0:{}", port);
println!("Camera TRNG on http://{}", addr);
let listener = tokio::net::TcpListener::bind(&addr).await?;
axum::serve(listener, app).await?;
Ok(())
}
async fn index() -> Html<&'static str> { Html(INDEX_HTML) }
async fn health() -> &'static str { "ok" }
async fn mcp_wellknown() -> Json<serde_json::Value> {
Json(json!({
"mcp": {
"spec_version": "2026-01-21",
"status": "active",
"servers": [],
"tools": [{
"name": "camera-trng",
"description": "True random number generator using camera sensor thermal/shot noise",
"url_template": "{origin}/random?bytes={bytes}&hex={hex}",
"capabilities": ["random-generation", "entropy-source"],
"auth": { "type": "none" },
"parameters": {
"bytes": { "type": "integer", "default": 32, "max": 1024, "description": "Number of random bytes" },
"hex": { "type": "boolean", "default": false, "description": "Return hex-encoded string" }
}
}]
}
}))
}
fn test_camera() -> Result<(), String> {
let index = CameraIndex::Index(0);
let format = RequestedFormat::new::<RgbFormat>(RequestedFormatType::None);
Camera::new(index, format).map_err(|e| e.to_string())?;
Ok(())
}
async fn get_random(Query(params): Query<RandomQuery>) -> Response {
let current = ACTIVE_REQUESTS.fetch_add(1, Ordering::SeqCst);
if current >= MAX_CONCURRENT {
ACTIVE_REQUESTS.fetch_sub(1, Ordering::SeqCst);
return (StatusCode::TOO_MANY_REQUESTS, "Too many requests").into_response();
}
let bytes = params.bytes.min(MAX_BYTES_PER_REQUEST);
if bytes == 0 {
ACTIVE_REQUESTS.fetch_sub(1, Ordering::SeqCst);
return (StatusCode::BAD_REQUEST, "bytes must be > 0").into_response();
}
let request_id = REQUEST_COUNTER.fetch_add(1, Ordering::SeqCst);
let use_fake = is_fake_camera();
let result = tokio::task::spawn_blocking(move || {
if use_fake {
extract_entropy_fake(bytes, request_id)
} else {
extract_entropy_camera(bytes, request_id)
}
}).await;
ACTIVE_REQUESTS.fetch_sub(1, Ordering::SeqCst);
match result {
Ok(Ok(data)) => {
if params.hex {
Response::builder().header(header::CONTENT_TYPE, "text/plain")
.body(Body::from(hex::encode(&data))).unwrap()
} else {
Response::builder().header(header::CONTENT_TYPE, "application/octet-stream")
.body(Body::from(data)).unwrap()
}
}
Ok(Err(e)) => (StatusCode::INTERNAL_SERVER_ERROR, e).into_response(),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}
}
fn extract_entropy_fake(num_bytes: usize, request_id: u64) -> Result<Vec<u8>, String> {
use std::io::Read;
let mut entropy = Vec::with_capacity(num_bytes);
let mut hasher = Sha256::new();
let frames_needed = (num_bytes / 32) + 1;
// Simulate reading "frames" from /dev/urandom
let mut urandom = std::fs::File::open("/dev/urandom").map_err(|e| e.to_string())?;
let mut fake_frame = vec![0u8; 640 * 480 * 3]; // Simulated RGB frame
for frame_idx in 0..frames_needed {
urandom.read_exact(&mut fake_frame).map_err(|e| e.to_string())?;
let lsbs: Vec<u8> = fake_frame.iter().map(|b| b & 0x03).collect();
hasher.update(&lsbs);
hasher.update(&request_id.to_le_bytes());
hasher.update(&(frame_idx as u64).to_le_bytes());
hasher.update(&nanos_now().to_le_bytes());
let hash = hasher.finalize_reset();
entropy.extend_from_slice(&hash);
if entropy.len() >= num_bytes { break; }
}
entropy.truncate(num_bytes);
Ok(entropy)
}
fn extract_entropy_camera(num_bytes: usize, request_id: u64) -> Result<Vec<u8>, String> {
let index = CameraIndex::Index(0);
let format = RequestedFormat::new::<RgbFormat>(RequestedFormatType::None);
let mut camera = Camera::new(index, format).map_err(|e| e.to_string())?;
camera.open_stream().map_err(|e| e.to_string())?;
let mut entropy = Vec::with_capacity(num_bytes);
let mut hasher = Sha256::new();
let frames_needed = (num_bytes / 32) + 1;
for frame_idx in 0..frames_needed {
let frame = camera.frame().map_err(|e| e.to_string())?;
let raw = frame.buffer();
let lsbs: Vec<u8> = raw.iter().map(|b| b & 0x03).collect();
hasher.update(&lsbs);
hasher.update(&request_id.to_le_bytes());
hasher.update(&(frame_idx as u64).to_le_bytes());
hasher.update(&nanos_now().to_le_bytes());
let hash = hasher.finalize_reset();
entropy.extend_from_slice(&hash);
if entropy.len() >= num_bytes { break; }
}
camera.stop_stream().ok();
entropy.truncate(num_bytes);
Ok(entropy)
}
fn nanos_now() -> u128 {
std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_nanos()
}
const INDEX_HTML: &str = include_str!("index.html");