Compare commits

..

3 Commits

6 changed files with 216 additions and 154 deletions

7
rtiow/Cargo.lock generated
View File

@ -2591,6 +2591,7 @@ dependencies = [
"structopt 0.2.18", "structopt 0.2.18",
"strum", "strum",
"strum_macros", "strum_macros",
"tev_client",
"thiserror", "thiserror",
"vec3", "vec3",
] ]
@ -3083,6 +3084,12 @@ dependencies = [
"redox_termios", "redox_termios",
] ]
[[package]]
name = "tev_client"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c845c2d56d4f732d09a32c9ea2cd3f01923be7a5f98d9f7f0a347205c3141036"
[[package]] [[package]]
name = "textwrap" name = "textwrap"
version = "0.11.0" version = "0.11.0"

View File

@ -30,6 +30,7 @@ stl = {path = "../../../stl"}
strum = { version = "0.24.1", features = ["derive"] } strum = { version = "0.24.1", features = ["derive"] }
strum_macros = "0.24.3" strum_macros = "0.24.3"
thiserror = "1.0.38" thiserror = "1.0.38"
tev_client = "0.5.2"
#stl = {git = "https://git-private.z.xinu.tv/wathiede/stl"} #stl = {git = "https://git-private.z.xinu.tv/wathiede/stl"}
[dev-dependencies] [dev-dependencies]

View File

@ -2,6 +2,7 @@ use std::{
collections::HashMap, collections::HashMap,
fs::File, fs::File,
io::BufWriter, io::BufWriter,
net::TcpStream,
path::Path, path::Path,
sync::{Arc, Mutex}, sync::{Arc, Mutex},
time, time,
@ -9,9 +10,9 @@ use std::{
use chrono::Local; use chrono::Local;
use image; use image;
use lazy_static::lazy_static;
use log::info; use log::info;
use serde_derive::Serialize; use serde_derive::Serialize;
use tev_client::{PacketCreateImage, PacketUpdateImage, TevClient};
use crate::{renderer::Scene, vec3::Vec3}; use crate::{renderer::Scene, vec3::Vec3};
@ -24,10 +25,6 @@ pub const ADAPTIVE_DEPTH: &str = "adaptive_depth";
// Grey scale showing rays cast per pixel. // Grey scale showing rays cast per pixel.
pub const RAYS_PER_PIXEL: &str = "rays_per_pixel"; pub const RAYS_PER_PIXEL: &str = "rays_per_pixel";
lazy_static! {
static ref DEBUGGER: Arc<Mutex<Debugger>> = Arc::new(Mutex::new(Debugger::new()));
}
#[derive(Serialize)] #[derive(Serialize)]
struct ImageMetadata { struct ImageMetadata {
name: String, name: String,
@ -74,65 +71,86 @@ impl Image {
} }
} }
struct Debugger { pub struct OutputManager {
images: HashMap<String, (ImageType, Image)>, images: Arc<Mutex<HashMap<String, (ImageType, Image)>>>,
tev_client: Option<Arc<Mutex<TevClient>>>,
} }
impl Debugger { impl OutputManager {
fn new() -> Debugger { pub fn new(tev_addr: &Option<String>) -> std::io::Result<OutputManager> {
Debugger { let tev_client = if let Some(addr) = tev_addr {
images: HashMap::new(), Some(Arc::new(Mutex::new(TevClient::wrap(TcpStream::connect(
} addr,
} )?))))
} else {
None
};
Ok(OutputManager {
images: Arc::new(Mutex::new(HashMap::new())),
tev_client,
})
} }
pub fn register_image(name: String, dimensions: (usize, usize), it: ImageType) { pub fn register_image(&self, name: String, dimensions: (usize, usize), it: ImageType) {
let mut debugger = DEBUGGER.lock().unwrap(); let mut images = self.images.lock().unwrap();
debugger images.insert(name.clone(), (it, Image::new(dimensions.0, dimensions.1)));
.images self.tev_client.clone().map(|c| {
.insert(name, (it, Image::new(dimensions.0, dimensions.1))); c.lock().unwrap().send(PacketCreateImage {
image_name: &name,
grab_focus: false,
width: dimensions.0 as u32,
height: dimensions.1 as u32,
channel_names: &["R", "G", "B"],
})
});
} }
pub fn set_pixel(name: &str, x: usize, y: usize, pixel: Vec3) { pub fn set_pixel(&self, name: &str, x: usize, y: usize, pixel: Vec3) {
let mut debugger = DEBUGGER.lock().unwrap(); let mut images = self.images.lock().unwrap();
let (_it, img) = debugger let (_it, img) = images
.images
.get_mut(name) .get_mut(name)
.unwrap_or_else(|| panic!("couldn't find image named '{}'", name)); .unwrap_or_else(|| panic!("couldn't find image named '{}'", name));
let y_inv = img.h - y - 1; let y_inv = img.h - y - 1;
img.put_pixel(x, y_inv, pixel); img.put_pixel(x, y_inv, pixel);
self.tev_client.clone().map(|c| {
c.lock().unwrap().send(PacketUpdateImage {
image_name: &name,
grab_focus: false,
channel_names: &["R", "G", "B"],
channel_offsets: &[0, 1, 2],
channel_strides: &[0, 0, 0],
x: x as u32,
y: y_inv as u32,
width: 1,
height: 1,
data: &[pixel.x, pixel.y, pixel.z],
})
});
} }
pub fn set_pixel_grey(name: &str, x: usize, y: usize, grey: f32) { pub fn set_pixel_grey(&self, name: &str, x: usize, y: usize, grey: f32) {
let mut debugger = DEBUGGER.lock().unwrap(); let mut images = self.images.lock().unwrap();
let (_it, img) = debugger let (_it, img) = images
.images
.get_mut(name) .get_mut(name)
.unwrap_or_else(|| panic!("couldn't find image named '{}'", name)); .unwrap_or_else(|| panic!("couldn't find image named '{}'", name));
let y_inv = img.h - y - 1; let y_inv = img.h - y - 1;
img.put_pixel(x, y_inv, [grey, grey, grey].into()); img.put_pixel(x, y_inv, [grey, grey, grey].into());
} }
trait ImageSaver {
fn save<Q>(&self, path: Q) -> std::io::Result<()>
where
Q: AsRef<Path> + Sized;
}
pub fn write_images<P: AsRef<Path>>( pub fn write_images<P: AsRef<Path>>(
&self,
scene: &Scene, scene: &Scene,
render_time: time::Duration, render_time: time::Duration,
output_dir: P, output_dir: P,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let output_dir: &Path = output_dir.as_ref(); let output_dir: &Path = output_dir.as_ref();
let debugger = DEBUGGER.lock().unwrap();
let now = Local::now(); let now = Local::now();
let images = self.images.lock().unwrap();
// Write out images in consistent order. // Write out images in consistent order.
let mut names = debugger.images.keys().collect::<Vec<_>>(); let mut names = images.keys().collect::<Vec<_>>();
names.sort(); names.sort();
let mut image_metadata = Vec::new(); let mut image_metadata = Vec::new();
for name in &names { for name in &names {
let (it, img) = debugger.images.get(*name).unwrap(); let (it, img) = images.get(*name).unwrap();
let image = format!("{}.png", name); let image = format!("{}.png", name);
let binary = format!("{}.json", name); let binary = format!("{}.json", name);
let ratio = img.w as f32 / img.h as f32; let ratio = img.w as f32 / img.h as f32;
@ -203,7 +221,10 @@ pub fn write_images<P: AsRef<Path>>(
)?; )?;
} }
ImageType::Grey01 | ImageType::GreyNormalized => { ImageType::Grey01 | ImageType::GreyNormalized => {
serde_json::ser::to_writer(f, &img.pix.iter().map(|v| v.x).collect::<Vec<f32>>())?; serde_json::ser::to_writer(
f,
&img.pix.iter().map(|v| v.x).collect::<Vec<f32>>(),
)?;
} }
}; };
} }
@ -220,3 +241,10 @@ pub fn write_images<P: AsRef<Path>>(
)?; )?;
Ok(()) Ok(())
} }
}
trait ImageSaver {
fn save<Q>(&self, path: Q) -> std::io::Result<()>
where
Q: AsRef<Path> + Sized;
}

View File

@ -25,6 +25,7 @@ use crate::{
human, human,
material::{Lambertian, Material}, material::{Lambertian, Material},
output, output,
output::OutputManager,
ray::Ray, ray::Ray,
scenes, scenes,
sphere::Sphere, sphere::Sphere,
@ -109,6 +110,9 @@ pub struct Opt {
/// Use acceleration data structure, may be BVH or kd-tree depending on scene. /// Use acceleration data structure, may be BVH or kd-tree depending on scene.
#[structopt(long = "use_accel")] #[structopt(long = "use_accel")]
pub use_accel: bool, pub use_accel: bool,
/// Host:port of running tev instance.
#[structopt(long = "tev_addr")]
pub tev_addr: Option<String>,
/// Output directory /// Output directory
#[structopt( #[structopt(
@ -246,6 +250,7 @@ fn trace_pixel_adaptive(
x_range: Range<f32>, x_range: Range<f32>,
y_range: Range<f32>, y_range: Range<f32>,
scene: &Scene, scene: &Scene,
output: &OutputManager,
) -> (Vec3, usize) { ) -> (Vec3, usize) {
let w = scene.width as f32; let w = scene.width as f32;
let h = scene.height as f32; let h = scene.height as f32;
@ -260,7 +265,7 @@ fn trace_pixel_adaptive(
&scene.env_map, &scene.env_map,
); );
if depth == 0 { if depth == 0 {
output::set_pixel(output::ADAPTIVE_DEPTH, x, y, [1., 0., 0.].into()); output.set_pixel(output::ADAPTIVE_DEPTH, x, y, [1., 0., 0.].into());
return (center, rays); return (center, rays);
} }
// t = top // t = top
@ -302,6 +307,7 @@ fn trace_pixel_adaptive(
x_range.start..x_mid, x_range.start..x_mid,
y_range.start..y_mid, y_range.start..y_mid,
scene, scene,
output,
); );
let tr = trace_pixel_adaptive( let tr = trace_pixel_adaptive(
depth - 1, depth - 1,
@ -311,6 +317,7 @@ fn trace_pixel_adaptive(
x_mid..x_range.end, x_mid..x_range.end,
y_range.start..y_mid, y_range.start..y_mid,
scene, scene,
output,
); );
let bl = trace_pixel_adaptive( let bl = trace_pixel_adaptive(
depth - 1, depth - 1,
@ -320,6 +327,7 @@ fn trace_pixel_adaptive(
x_range.start..x_mid, x_range.start..x_mid,
y_mid..y_range.end, y_mid..y_range.end,
scene, scene,
output,
); );
let br = trace_pixel_adaptive( let br = trace_pixel_adaptive(
depth - 1, depth - 1,
@ -329,13 +337,14 @@ fn trace_pixel_adaptive(
x_mid..x_range.end, x_mid..x_range.end,
y_mid..y_range.end, y_mid..y_range.end,
scene, scene,
output,
); );
let pixel = (tl.0 + tr.0 + bl.0 + br.0) / 4.; let pixel = (tl.0 + tr.0 + bl.0 + br.0) / 4.;
let rays = tl.1 + tr.1 + bl.1 + br.1; let rays = tl.1 + tr.1 + bl.1 + br.1;
(pixel, rays) (pixel, rays)
} else { } else {
if depth == MAX_ADAPTIVE_DEPTH { if depth == MAX_ADAPTIVE_DEPTH {
output::set_pixel(output::ADAPTIVE_DEPTH, x, y, [0., 1., 0.].into()); output.set_pixel(output::ADAPTIVE_DEPTH, x, y, [0., 1., 0.].into());
} }
(corners, rays) (corners, rays)
} }
@ -418,7 +427,7 @@ enum Response {
}, },
} }
fn render_pixel(scene: &Scene, x: usize, y: usize) -> (Vec3, usize) { fn render_pixel(scene: &Scene, x: usize, y: usize, output: &OutputManager) -> (Vec3, usize) {
let (pixel, rays) = if let Some(threshold) = scene.adaptive_subsampling { let (pixel, rays) = if let Some(threshold) = scene.adaptive_subsampling {
trace_pixel_adaptive( trace_pixel_adaptive(
MAX_ADAPTIVE_DEPTH, MAX_ADAPTIVE_DEPTH,
@ -428,6 +437,7 @@ fn render_pixel(scene: &Scene, x: usize, y: usize) -> (Vec3, usize) {
0.0..1.0, 0.0..1.0,
0.0..1.0, 0.0..1.0,
scene, scene,
output,
) )
} else { } else {
let (pixel, rays) = (0..scene.subsamples) let (pixel, rays) = (0..scene.subsamples)
@ -436,7 +446,7 @@ fn render_pixel(scene: &Scene, x: usize, y: usize) -> (Vec3, usize) {
([0., 0., 0.].into(), 0), ([0., 0., 0.].into(), 0),
|(p1, r1): (Vec3, usize), (p2, r2): (Vec3, usize)| ((p1 + p2), (r1 + r2)), |(p1, r1): (Vec3, usize), (p2, r2): (Vec3, usize)| ((p1 + p2), (r1 + r2)),
); );
output::set_pixel_grey(output::RAYS_PER_PIXEL, x, y, rays as f32); output.set_pixel_grey(output::RAYS_PER_PIXEL, x, y, rays as f32);
(pixel / scene.subsamples as f32, rays) (pixel / scene.subsamples as f32, rays)
}; };
// Gamma correct, use gamma 2 correction, which is 1/gamma where gamma=2 which is 1/2 or // Gamma correct, use gamma 2 correction, which is 1/gamma where gamma=2 which is 1/2 or
@ -452,6 +462,7 @@ fn render_worker(
scene: &Scene, scene: &Scene,
input_chan: Arc<Mutex<Receiver<Request>>>, input_chan: Arc<Mutex<Receiver<Request>>>,
output_chan: &SyncSender<Response>, output_chan: &SyncSender<Response>,
output: &OutputManager,
) { ) {
loop { loop {
let job = { input_chan.lock().unwrap().recv() }; let job = { input_chan.lock().unwrap().recv() };
@ -466,7 +477,7 @@ fn render_worker(
let batch = false; let batch = false;
if batch { if batch {
let (pixels, rays): (Vec<Vec3>, Vec<usize>) = (0..width) let (pixels, rays): (Vec<Vec3>, Vec<usize>) = (0..width)
.map(|x| render_pixel(scene, x, y)) .map(|x| render_pixel(scene, x, y, output))
.collect::<Vec<(_, _)>>() .collect::<Vec<(_, _)>>()
.into_iter() .into_iter()
.unzip(); .unzip();
@ -483,7 +494,7 @@ fn render_worker(
.expect("failed to send pixel response"); .expect("failed to send pixel response");
} else { } else {
(0..width).for_each(|x| { (0..width).for_each(|x| {
let (pixel, rays) = render_pixel(scene, x, y); let (pixel, rays) = render_pixel(scene, x, y, output);
output_chan output_chan
.send(Response::Pixel { .send(Response::Pixel {
x, x,
@ -497,7 +508,7 @@ fn render_worker(
} }
Request::Pixel { x, y } => { Request::Pixel { x, y } => {
trace!("tid {} x {} y {}", tid, x, y); trace!("tid {} x {} y {}", tid, x, y);
let (pixel, rays) = render_pixel(scene, x, y); let (pixel, rays) = render_pixel(scene, x, y, output);
output_chan output_chan
.send(Response::Pixel { .send(Response::Pixel {
x, x,
@ -512,7 +523,17 @@ fn render_worker(
} }
} }
pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::io::Error> { /*
lazy_static! {
static ref DEBUGGER: Arc<Mutex<OutputManager>> = Arc::new(Mutex::new(OutputManager::new()));
}
*/
pub fn render(
scene: Scene,
output_dir: &Path,
tev_addr: &Option<String>,
) -> std::result::Result<(), std::io::Error> {
// Default to half the cores to disable hyperthreading. // Default to half the cores to disable hyperthreading.
let num_threads = scene.num_threads.unwrap_or_else(|| num_cpus::get() / 2); let num_threads = scene.num_threads.unwrap_or_else(|| num_cpus::get() / 2);
let (pixel_req_tx, pixel_req_rx) = sync_channel(2 * num_threads); let (pixel_req_tx, pixel_req_rx) = sync_channel(2 * num_threads);
@ -528,20 +549,23 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
} else { } else {
core_ids core_ids
}; };
let output = output::OutputManager::new(tev_addr)?;
let output = Arc::new(output);
info!("Creating {} render threads", core_ids.len()); info!("Creating {} render threads", core_ids.len());
output::register_image( output.register_image(
output::MAIN_IMAGE.to_string(), output::MAIN_IMAGE.to_string(),
(scene.width, scene.height), (scene.width, scene.height),
output::ImageType::RGB01, output::ImageType::RGB01,
); );
if scene.adaptive_subsampling.is_some() { if scene.adaptive_subsampling.is_some() {
output::register_image( output.register_image(
output::ADAPTIVE_DEPTH.to_string(), output::ADAPTIVE_DEPTH.to_string(),
(scene.width, scene.height), (scene.width, scene.height),
output::ImageType::RGB01, output::ImageType::RGB01,
); );
} }
output::register_image( output.register_image(
output::RAYS_PER_PIXEL.to_string(), output::RAYS_PER_PIXEL.to_string(),
(scene.width, scene.height), (scene.width, scene.height),
output::ImageType::GreyNormalized, output::ImageType::GreyNormalized,
@ -555,9 +579,10 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
let s = sync::Arc::clone(&scene); let s = sync::Arc::clone(&scene);
let pixel_req_rx = pixel_req_rx.clone(); let pixel_req_rx = pixel_req_rx.clone();
let pixel_resp_tx = pixel_resp_tx.clone(); let pixel_resp_tx = pixel_resp_tx.clone();
let output = sync::Arc::clone(&output);
thread::spawn(move || { thread::spawn(move || {
core_affinity::set_for_current(id); core_affinity::set_for_current(id);
render_worker(i, &s, pixel_req_rx, &pixel_resp_tx); render_worker(i, &s, pixel_req_rx, &pixel_resp_tx, &output);
}) })
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -596,12 +621,12 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
match resp { match resp {
Response::Pixel { x, y, pixel, rs } => { Response::Pixel { x, y, pixel, rs } => {
current_stat += rs; current_stat += rs;
output::set_pixel(output::MAIN_IMAGE, x, y, pixel); output.set_pixel(output::MAIN_IMAGE, x, y, pixel);
} }
Response::Line { y, pixels, rs } => { Response::Line { y, pixels, rs } => {
current_stat += rs; current_stat += rs;
for (x, pixel) in pixels.iter().enumerate() { for (x, pixel) in pixels.iter().enumerate() {
output::set_pixel(output::MAIN_IMAGE, x, y, *pixel); output.set_pixel(output::MAIN_IMAGE, x, y, *pixel);
} }
} }
} }
@ -639,5 +664,5 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
) )
); );
output::write_images(&scene, time_diff, output_dir) output.write_images(&scene, time_diff, output_dir)
} }

View File

@ -3,6 +3,7 @@ name = "tracer"
version = "0.1.0" version = "0.1.0"
authors = ["Bill Thiede <git@xinu.tv>"] authors = ["Bill Thiede <git@xinu.tv>"]
edition = "2021" edition = "2021"
default-run = "tracer"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -80,7 +80,7 @@ fn main() -> Result<()> {
.start(pprof_path.to_str().unwrap().as_bytes()) .start(pprof_path.to_str().unwrap().as_bytes())
.unwrap(); .unwrap();
} }
let res = render(scene, &opt.output); let res = render(scene, &opt.output, &opt.tev_addr);
if let Some(pprof_path) = &opt.pprof { if let Some(pprof_path) = &opt.pprof {
info!("Saving pprof to {}", pprof_path.to_string_lossy()); info!("Saving pprof to {}", pprof_path.to_string_lossy());
PROFILER.lock().unwrap().stop().unwrap(); PROFILER.lock().unwrap().stop().unwrap();