Compare commits

...

5 Commits

9 changed files with 786 additions and 629 deletions

963
rtiow/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
[workspace]
resolver = "2"
members = [
"noise_explorer",

View File

@@ -30,6 +30,7 @@ stl = {path = "../../../stl"}
strum = { version = "0.24.1", features = ["derive"] }
strum_macros = "0.24.3"
thiserror = "1.0.38"
tev_client = "0.5.2"
#stl = {git = "https://git-private.z.xinu.tv/wathiede/stl"}
[dev-dependencies]

View File

@@ -2,6 +2,7 @@ use std::{
collections::HashMap,
fs::File,
io::BufWriter,
net::TcpStream,
path::Path,
sync::{Arc, Mutex},
time,
@@ -9,9 +10,9 @@ use std::{
use chrono::Local;
use image;
use lazy_static::lazy_static;
use log::info;
use serde_derive::Serialize;
use tev_client::{PacketCreateImage, PacketUpdateImage, TevClient};
use crate::{renderer::Scene, vec3::Vec3};
@@ -24,10 +25,6 @@ pub const ADAPTIVE_DEPTH: &str = "adaptive_depth";
// Grey scale showing rays cast per pixel.
pub const RAYS_PER_PIXEL: &str = "rays_per_pixel";
lazy_static! {
static ref DEBUGGER: Arc<Mutex<Debugger>> = Arc::new(Mutex::new(Debugger::new()));
}
#[derive(Serialize)]
struct ImageMetadata {
name: String,
@@ -74,43 +71,176 @@ impl Image {
}
}
struct Debugger {
images: HashMap<String, (ImageType, Image)>,
pub struct OutputManager {
images: Arc<Mutex<HashMap<String, (ImageType, Image)>>>,
tev_client: Option<Arc<Mutex<TevClient>>>,
}
impl Debugger {
fn new() -> Debugger {
Debugger {
images: HashMap::new(),
}
impl OutputManager {
pub fn new(tev_addr: &Option<String>) -> std::io::Result<OutputManager> {
let tev_client = if let Some(addr) = tev_addr {
Some(Arc::new(Mutex::new(TevClient::wrap(TcpStream::connect(
addr,
)?))))
} else {
None
};
Ok(OutputManager {
images: Arc::new(Mutex::new(HashMap::new())),
tev_client,
})
}
}
pub fn register_image(name: String, dimensions: (usize, usize), it: ImageType) {
let mut debugger = DEBUGGER.lock().unwrap();
debugger
.images
.insert(name, (it, Image::new(dimensions.0, dimensions.1)));
}
pub fn register_image(&self, name: String, dimensions: (usize, usize), it: ImageType) {
let mut images = self.images.lock().unwrap();
images.insert(name.clone(), (it, Image::new(dimensions.0, dimensions.1)));
self.tev_client.clone().map(|c| {
c.lock().unwrap().send(PacketCreateImage {
image_name: &name,
grab_focus: false,
width: dimensions.0 as u32,
height: dimensions.1 as u32,
channel_names: &["R", "G", "B"],
})
});
}
pub fn set_pixel(name: &str, x: usize, y: usize, pixel: Vec3) {
let mut debugger = DEBUGGER.lock().unwrap();
let (_it, img) = debugger
.images
.get_mut(name)
.unwrap_or_else(|| panic!("couldn't find image named '{}'", name));
let y_inv = img.h - y - 1;
img.put_pixel(x, y_inv, pixel);
}
pub fn set_pixel(&self, name: &str, x: usize, y: usize, pixel: Vec3) {
let mut images = self.images.lock().unwrap();
let (_it, img) = images
.get_mut(name)
.unwrap_or_else(|| panic!("couldn't find image named '{}'", name));
let y_inv = img.h - y - 1;
img.put_pixel(x, y_inv, pixel);
self.tev_client.clone().map(|c| {
c.lock().unwrap().send(PacketUpdateImage {
image_name: &name,
grab_focus: false,
channel_names: &["R", "G", "B"],
channel_offsets: &[0, 1, 2],
channel_strides: &[0, 0, 0],
x: x as u32,
y: y_inv as u32,
width: 1,
height: 1,
data: &[pixel.x, pixel.y, pixel.z],
})
});
}
pub fn set_pixel_grey(name: &str, x: usize, y: usize, grey: f32) {
let mut debugger = DEBUGGER.lock().unwrap();
let (_it, img) = debugger
.images
.get_mut(name)
.unwrap_or_else(|| panic!("couldn't find image named '{}'", name));
let y_inv = img.h - y - 1;
img.put_pixel(x, y_inv, [grey, grey, grey].into());
pub fn set_pixel_grey(&self, name: &str, x: usize, y: usize, grey: f32) {
let mut images = self.images.lock().unwrap();
let (_it, img) = images
.get_mut(name)
.unwrap_or_else(|| panic!("couldn't find image named '{}'", name));
let y_inv = img.h - y - 1;
img.put_pixel(x, y_inv, [grey, grey, grey].into());
}
pub fn write_images<P: AsRef<Path>>(
&self,
scene: &Scene,
render_time: time::Duration,
output_dir: P,
) -> std::io::Result<()> {
let output_dir: &Path = output_dir.as_ref();
let now = Local::now();
let images = self.images.lock().unwrap();
// Write out images in consistent order.
let mut names = images.keys().collect::<Vec<_>>();
names.sort();
let mut image_metadata = Vec::new();
for name in &names {
let (it, img) = images.get(*name).unwrap();
let image = format!("{}.png", name);
let binary = format!("{}.json", name);
let ratio = img.w as f32 / img.h as f32;
let size = (img.w, img.h);
let image_path = output_dir.join(&image);
let binary_path = output_dir.join(&binary);
image_metadata.push(ImageMetadata {
name: name.to_string(),
image,
binary,
ratio,
size,
format: *it,
});
info!("Saving {}", image_path.to_string_lossy());
match it {
ImageType::RGB01 => {
let mut out_img = image::RgbImage::new(img.w as u32, img.h as u32);
out_img
.enumerate_pixels_mut()
.enumerate()
.for_each(|(i, (_x, _y, p))| {
let pixel = img.pix[i];
*p = image::Rgb([
(pixel[0] * 255.).min(255.) as u8,
(pixel[1] * 255.).min(255.) as u8,
(pixel[2] * 255.).min(255.) as u8,
])
});
out_img.save(image_path)?;
}
ImageType::Grey01 => {
let mut out_img = image::GrayImage::new(img.w as u32, img.h as u32);
out_img
.enumerate_pixels_mut()
.enumerate()
.for_each(|(i, (_x, _y, p))| {
let pixel = img.pix[i];
*p = image::Luma([(pixel[0] * 255.).min(255.) as u8])
});
out_img.save(image_path)?;
}
ImageType::GreyNormalized => {
let mut out_img = image::GrayImage::new(img.w as u32, img.h as u32);
let max_val = img.pix.iter().map(|v| v.x).fold(0., f32::max);
out_img
.enumerate_pixels_mut()
.enumerate()
.for_each(|(i, (_x, _y, p))| {
let pixel = img.pix[i];
*p = image::Luma([(pixel[0] / max_val * 255.).min(255.) as u8])
});
out_img.save(image_path)?;
}
};
info!("Saving {}", binary_path.to_string_lossy());
let f = File::create(output_dir.join(binary_path))?;
let f = BufWriter::new(f);
match it {
ImageType::RGB01 => {
serde_json::ser::to_writer(
f,
&img.pix
.iter()
.map(|v| [v.x, v.y, v.z])
.collect::<Vec<[f32; 3]>>(),
)?;
}
ImageType::Grey01 | ImageType::GreyNormalized => {
serde_json::ser::to_writer(
f,
&img.pix.iter().map(|v| v.x).collect::<Vec<f32>>(),
)?;
}
};
}
let f = File::create(output_dir.join("data.json"))?;
let f = BufWriter::new(f);
serde_json::ser::to_writer(
f,
&Data {
timestamp: now.timestamp(),
render_time_seconds: render_time.as_secs_f32(),
scene,
image_metadata,
},
)?;
Ok(())
}
}
trait ImageSaver {
@@ -118,105 +248,3 @@ trait ImageSaver {
where
Q: AsRef<Path> + Sized;
}
pub fn write_images<P: AsRef<Path>>(
scene: &Scene,
render_time: time::Duration,
output_dir: P,
) -> std::io::Result<()> {
let output_dir: &Path = output_dir.as_ref();
let debugger = DEBUGGER.lock().unwrap();
let now = Local::now();
// Write out images in consistent order.
let mut names = debugger.images.keys().collect::<Vec<_>>();
names.sort();
let mut image_metadata = Vec::new();
for name in &names {
let (it, img) = debugger.images.get(*name).unwrap();
let image = format!("{}.png", name);
let binary = format!("{}.json", name);
let ratio = img.w as f32 / img.h as f32;
let size = (img.w, img.h);
let image_path = output_dir.join(&image);
let binary_path = output_dir.join(&binary);
image_metadata.push(ImageMetadata {
name: name.to_string(),
image,
binary,
ratio,
size,
format: *it,
});
info!("Saving {}", image_path.to_string_lossy());
match it {
ImageType::RGB01 => {
let mut out_img = image::RgbImage::new(img.w as u32, img.h as u32);
out_img
.enumerate_pixels_mut()
.enumerate()
.for_each(|(i, (_x, _y, p))| {
let pixel = img.pix[i];
*p = image::Rgb([
(pixel[0] * 255.).min(255.) as u8,
(pixel[1] * 255.).min(255.) as u8,
(pixel[2] * 255.).min(255.) as u8,
])
});
out_img.save(image_path)?;
}
ImageType::Grey01 => {
let mut out_img = image::GrayImage::new(img.w as u32, img.h as u32);
out_img
.enumerate_pixels_mut()
.enumerate()
.for_each(|(i, (_x, _y, p))| {
let pixel = img.pix[i];
*p = image::Luma([(pixel[0] * 255.).min(255.) as u8])
});
out_img.save(image_path)?;
}
ImageType::GreyNormalized => {
let mut out_img = image::GrayImage::new(img.w as u32, img.h as u32);
let max_val = img.pix.iter().map(|v| v.x).fold(0., f32::max);
out_img
.enumerate_pixels_mut()
.enumerate()
.for_each(|(i, (_x, _y, p))| {
let pixel = img.pix[i];
*p = image::Luma([(pixel[0] / max_val * 255.).min(255.) as u8])
});
out_img.save(image_path)?;
}
};
info!("Saving {}", binary_path.to_string_lossy());
let f = File::create(output_dir.join(binary_path))?;
let f = BufWriter::new(f);
match it {
ImageType::RGB01 => {
serde_json::ser::to_writer(
f,
&img.pix
.iter()
.map(|v| [v.x, v.y, v.z])
.collect::<Vec<[f32; 3]>>(),
)?;
}
ImageType::Grey01 | ImageType::GreyNormalized => {
serde_json::ser::to_writer(f, &img.pix.iter().map(|v| v.x).collect::<Vec<f32>>())?;
}
};
}
let f = File::create(output_dir.join("data.json"))?;
let f = BufWriter::new(f);
serde_json::ser::to_writer(
f,
&Data {
timestamp: now.timestamp(),
render_time_seconds: render_time.as_secs_f32(),
scene,
image_metadata,
},
)?;
Ok(())
}

View File

@@ -4,11 +4,12 @@ use crate::{
cuboid::Cuboid,
hitable::Hit,
hitable_list::HitableList,
material::{Lambertian, Material, Metal},
material::{Dielectric, DiffuseLight, Isotropic, Lambertian, Material, Metal},
renderer::Scene,
sphere::Sphere,
texture::{EnvMap, Texture},
};
use chrono::IsoWeek;
use serde::Deserialize;
use std::{collections::HashMap, fs::File, io::BufReader, path::PathBuf, sync::Arc};
use stl::STL;
@@ -46,6 +47,10 @@ impl TryFrom<Config> for Scene {
for mc in c.materials {
let v: Arc<dyn Material> = match mc.material {
Materials::Metal { albedo, fuzzy } => Arc::new(Metal::new(albedo, fuzzy)),
Materials::Dielectric { ref_idx } => Arc::new(Dielectric::new(ref_idx)),
Materials::DiffuseLight { texture } => Arc::new(DiffuseLight::new(texture)),
Materials::Isotropic { texture } => Arc::new(Isotropic::new(texture)),
Materials::Lambertian { texture } => Arc::new(Lambertian::new(texture)),
};
if materials.insert(mc.name.clone(), v).is_some() {
return Err(ConfigError::DuplicateMaterial(mc.name));
@@ -173,6 +178,15 @@ struct MaterialConfig {
enum Materials {
#[serde(rename = "metal")]
Metal { albedo: [f32; 3], fuzzy: f32 },
#[serde(rename = "dielectric")]
Dielectric { ref_idx: f32 },
// TODO(wathiede): these all take Textures, for now, only support RGB
#[serde(rename = "diffuse_light")]
DiffuseLight { texture: [f32; 3] },
#[serde(rename = "isotropic")]
Isotropic { texture: [f32; 3] },
#[serde(rename = "lambertian")]
Lambertian { texture: [f32; 3] },
}
#[derive(Debug, Deserialize)]

View File

@@ -25,6 +25,7 @@ use crate::{
human,
material::{Lambertian, Material},
output,
output::OutputManager,
ray::Ray,
scenes,
sphere::Sphere,
@@ -109,9 +110,17 @@ pub struct Opt {
/// Use acceleration data structure, may be BVH or kd-tree depending on scene.
#[structopt(long = "use_accel")]
pub use_accel: bool,
/// Host:port of running tev instance.
#[structopt(long = "tev_addr")]
pub tev_addr: Option<String>,
/// Output directory
#[structopt(parse(from_os_str), default_value = "/tmp/tracer")]
#[structopt(
short = "o",
long = "output",
parse(from_os_str),
default_value = "/tmp/tracer"
)]
pub output: PathBuf,
}
@@ -241,6 +250,7 @@ fn trace_pixel_adaptive(
x_range: Range<f32>,
y_range: Range<f32>,
scene: &Scene,
output: &OutputManager,
) -> (Vec3, usize) {
let w = scene.width as f32;
let h = scene.height as f32;
@@ -255,7 +265,7 @@ fn trace_pixel_adaptive(
&scene.env_map,
);
if depth == 0 {
output::set_pixel(output::ADAPTIVE_DEPTH, x, y, [1., 0., 0.].into());
output.set_pixel(output::ADAPTIVE_DEPTH, x, y, [1., 0., 0.].into());
return (center, rays);
}
// t = top
@@ -297,6 +307,7 @@ fn trace_pixel_adaptive(
x_range.start..x_mid,
y_range.start..y_mid,
scene,
output,
);
let tr = trace_pixel_adaptive(
depth - 1,
@@ -306,6 +317,7 @@ fn trace_pixel_adaptive(
x_mid..x_range.end,
y_range.start..y_mid,
scene,
output,
);
let bl = trace_pixel_adaptive(
depth - 1,
@@ -315,6 +327,7 @@ fn trace_pixel_adaptive(
x_range.start..x_mid,
y_mid..y_range.end,
scene,
output,
);
let br = trace_pixel_adaptive(
depth - 1,
@@ -324,13 +337,14 @@ fn trace_pixel_adaptive(
x_mid..x_range.end,
y_mid..y_range.end,
scene,
output,
);
let pixel = (tl.0 + tr.0 + bl.0 + br.0) / 4.;
let rays = tl.1 + tr.1 + bl.1 + br.1;
(pixel, rays)
} else {
if depth == MAX_ADAPTIVE_DEPTH {
output::set_pixel(output::ADAPTIVE_DEPTH, x, y, [0., 1., 0.].into());
output.set_pixel(output::ADAPTIVE_DEPTH, x, y, [0., 1., 0.].into());
}
(corners, rays)
}
@@ -413,7 +427,7 @@ enum Response {
},
}
fn render_pixel(scene: &Scene, x: usize, y: usize) -> (Vec3, usize) {
fn render_pixel(scene: &Scene, x: usize, y: usize, output: &OutputManager) -> (Vec3, usize) {
let (pixel, rays) = if let Some(threshold) = scene.adaptive_subsampling {
trace_pixel_adaptive(
MAX_ADAPTIVE_DEPTH,
@@ -423,6 +437,7 @@ fn render_pixel(scene: &Scene, x: usize, y: usize) -> (Vec3, usize) {
0.0..1.0,
0.0..1.0,
scene,
output,
)
} else {
let (pixel, rays) = (0..scene.subsamples)
@@ -431,7 +446,7 @@ fn render_pixel(scene: &Scene, x: usize, y: usize) -> (Vec3, usize) {
([0., 0., 0.].into(), 0),
|(p1, r1): (Vec3, usize), (p2, r2): (Vec3, usize)| ((p1 + p2), (r1 + r2)),
);
output::set_pixel_grey(output::RAYS_PER_PIXEL, x, y, rays as f32);
output.set_pixel_grey(output::RAYS_PER_PIXEL, x, y, rays as f32);
(pixel / scene.subsamples as f32, rays)
};
// Gamma correct, use gamma 2 correction, which is 1/gamma where gamma=2 which is 1/2 or
@@ -447,6 +462,7 @@ fn render_worker(
scene: &Scene,
input_chan: Arc<Mutex<Receiver<Request>>>,
output_chan: &SyncSender<Response>,
output: &OutputManager,
) {
loop {
let job = { input_chan.lock().unwrap().recv() };
@@ -461,7 +477,7 @@ fn render_worker(
let batch = false;
if batch {
let (pixels, rays): (Vec<Vec3>, Vec<usize>) = (0..width)
.map(|x| render_pixel(scene, x, y))
.map(|x| render_pixel(scene, x, y, output))
.collect::<Vec<(_, _)>>()
.into_iter()
.unzip();
@@ -478,7 +494,7 @@ fn render_worker(
.expect("failed to send pixel response");
} else {
(0..width).for_each(|x| {
let (pixel, rays) = render_pixel(scene, x, y);
let (pixel, rays) = render_pixel(scene, x, y, output);
output_chan
.send(Response::Pixel {
x,
@@ -492,7 +508,7 @@ fn render_worker(
}
Request::Pixel { x, y } => {
trace!("tid {} x {} y {}", tid, x, y);
let (pixel, rays) = render_pixel(scene, x, y);
let (pixel, rays) = render_pixel(scene, x, y, output);
output_chan
.send(Response::Pixel {
x,
@@ -507,7 +523,17 @@ fn render_worker(
}
}
pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::io::Error> {
/*
lazy_static! {
static ref DEBUGGER: Arc<Mutex<OutputManager>> = Arc::new(Mutex::new(OutputManager::new()));
}
*/
pub fn render(
scene: Scene,
output_dir: &Path,
tev_addr: &Option<String>,
) -> std::result::Result<(), std::io::Error> {
// Default to half the cores to disable hyperthreading.
let num_threads = scene.num_threads.unwrap_or_else(|| num_cpus::get() / 2);
let (pixel_req_tx, pixel_req_rx) = sync_channel(2 * num_threads);
@@ -523,20 +549,23 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
} else {
core_ids
};
let output = output::OutputManager::new(tev_addr)?;
let output = Arc::new(output);
info!("Creating {} render threads", core_ids.len());
output::register_image(
output.register_image(
output::MAIN_IMAGE.to_string(),
(scene.width, scene.height),
output::ImageType::RGB01,
);
if scene.adaptive_subsampling.is_some() {
output::register_image(
output.register_image(
output::ADAPTIVE_DEPTH.to_string(),
(scene.width, scene.height),
output::ImageType::RGB01,
);
}
output::register_image(
output.register_image(
output::RAYS_PER_PIXEL.to_string(),
(scene.width, scene.height),
output::ImageType::GreyNormalized,
@@ -550,9 +579,10 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
let s = sync::Arc::clone(&scene);
let pixel_req_rx = pixel_req_rx.clone();
let pixel_resp_tx = pixel_resp_tx.clone();
let output = sync::Arc::clone(&output);
thread::spawn(move || {
core_affinity::set_for_current(id);
render_worker(i, &s, pixel_req_rx, &pixel_resp_tx);
render_worker(i, &s, pixel_req_rx, &pixel_resp_tx, &output);
})
})
.collect::<Vec<_>>();
@@ -591,12 +621,12 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
match resp {
Response::Pixel { x, y, pixel, rs } => {
current_stat += rs;
output::set_pixel(output::MAIN_IMAGE, x, y, pixel);
output.set_pixel(output::MAIN_IMAGE, x, y, pixel);
}
Response::Line { y, pixels, rs } => {
current_stat += rs;
for (x, pixel) in pixels.iter().enumerate() {
output::set_pixel(output::MAIN_IMAGE, x, y, *pixel);
output.set_pixel(output::MAIN_IMAGE, x, y, *pixel);
}
}
}
@@ -634,5 +664,5 @@ pub fn render(scene: Scene, output_dir: &Path) -> std::result::Result<(), std::i
)
);
output::write_images(&scene, time_diff, output_dir)
output.write_images(&scene, time_diff, output_dir)
}

View File

@@ -3,6 +3,7 @@ name = "tracer"
version = "0.1.0"
authors = ["Bill Thiede <git@xinu.tv>"]
edition = "2021"
default-run = "tracer"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -1,49 +1,74 @@
[scene]
width = 768
height = 768
#subsamples = 1000
height = 512
subsamples = 100
[camera]
lookfrom = [0.0, 30.0, -100.0]
lookat = [0.0, 0.0, 0.0]
lookfrom = [0.0, 50.0, 100.0]
lookat = [0.0, 10.0, 0.0]
fov = 45
aperture = 0.0
focus_dist = 10.0
time_min = 0.0
time_max = 1.0
[[materials]]
name = "light1"
type = "isotropic"
texture = [20, 10, 10]
[[materials]]
name = "yellow"
type = "isotropic"
texture = [1, 1, 0]
[[materials]]
name = "magenta"
type = "lambertian"
texture = [1, 0, 1]
[[materials]]
name = "green"
type = "metal"
albedo = [0, 1, 0]
fuzzy = 1.5
type = "diffuse_light"
texture = [0, 1, 0]
[[materials]]
name = "blue"
name = "metal"
type = "metal"
albedo = [0, 0, 1]
fuzzy = 1.5
albedo = [1, 1, 1]
fuzzy = 0
[[materials]]
name = "red"
type = "metal"
albedo = [1, 0, 0]
fuzzy = 1.5
name = "glass"
type = "dielectric"
ref_idx = 1.5
[[hitables]]
type = "cuboid"
min = [-10.0, -10.0, -10.0]
max = [10.0, 10.0, 10.0]
material_name = "green"
type = "sphere"
center = [-30.0, 0.0, 0.0]
radius = 10
material_name = "yellow"
[[hitables]]
type = "sphere"
center = [30.0, 0.0, 0.0]
radius = 10
material_name = "blue"
material_name = "green"
[[hitables]]
type = "sphere"
center = [0.0, -10.0, 0.0]
radius = 10
material_name = "metal"
[[hitables]]
type = "sphere"
center = [0.0, 0.0, -30.0]
radius = 10
material_name = "magenta"
[[hitables]]
type = "stl"
path = "/net/nasx.h.xinu.tv/x/3dprint/stl/stanford_dragon.stl"
scale = 200
material_name = "red"
material_name = "glass"
#[[hitables]]
#type = "sphere"
#center = [0.0, 50.0, -100.0]
#radius = 10
#material_name = "light1"
[envmap]
path = "/home/wathiede/src/xinu.tv/raytracers/rtiow/renderer/images/52681723945_e1d94d3df9_6k.jpg"

View File

@@ -80,7 +80,7 @@ fn main() -> Result<()> {
.start(pprof_path.to_str().unwrap().as_bytes())
.unwrap();
}
let res = render(scene, &opt.output);
let res = render(scene, &opt.output, &opt.tev_addr);
if let Some(pprof_path) = &opt.pprof {
info!("Saving pprof to {}", pprof_path.to_string_lossy());
PROFILER.lock().unwrap().stop().unwrap();