Continuing with the cleanup

This commit is contained in:
Wito Wiala 2026-02-15 14:48:50 +00:00
parent 30e8cf85f8
commit 45e866ebd1
57 changed files with 390 additions and 377 deletions

View file

@ -55,7 +55,8 @@ cuda_builder = { git = "https://github.com/Rust-GPU/Rust-CUDA", branch = "main",
cc = "1.2.53"
[workspace]
members = ["shared", "crates/ptex-filter"]
members = ["shared"]
exclude = ["crates/ptex-filter"]
[lints.clippy]
excessive_precision = "allow"

View file

@ -1,10 +1,10 @@
# PBRust
# PBRusT
## Description
A Rust implementation of the physically based renderer described in the tremendous book *Physically Based Rendering: From Theory to Implementation* by Matt Pharr, Wenzel Jakob, and Greg Humphreys. This project aims to explore modern Rust features, and create a performant and stable rendering engine.
This implementation is currently under development and serves as a learning exercise for both advanced rendering techniques and cutting-edge Rust programming.
This implementation is currently under development and serves as a learning exercise for both advanced rendering techniques and Rust programming.
## Getting Started
@ -16,7 +16,7 @@ rustup toolchain install nightly
rustup default nightly
```
To get a local copy up and running, follow these simple steps.
To get a local copy up and running:
1. **Clone the repository:**
```sh

View file

@ -1,3 +1,5 @@
#![allow(unused)]
[package]
name = "ptex-filter"
version = "0.1.0"

View file

@ -1,3 +1,6 @@
#![allow(unused)]
#![allow(dead_code)]
mod ffi;
pub use ffi::{ptex_filter_create, PtexFilterOptions, PtexFilterType};

View file

@ -264,6 +264,18 @@ pub struct RGB {
pub b: Float,
}
impl From<[Float; 3]> for RGB {
fn from(slice: [Float; 3]) -> Self {
RGB::new(slice[0], slice[1], slice[2])
}
}
impl From<&[Float; 3]> for RGB {
fn from(slice: &[Float; 3]) -> Self {
RGB::new(slice[0], slice[1], slice[2])
}
}
impl From<(Float, Float, Float)> for RGB {
fn from(triplet: (Float, Float, Float)) -> Self {
RGB::new(triplet.0, triplet.1, triplet.2)
@ -1027,6 +1039,16 @@ pub struct Coeffs {
pub c2: Float,
}
impl From<&[Float; 3]> for Coeffs {
fn from(slice: &[Float; 3]) -> Coeffs {
Coeffs {
c0: slice[0],
c1: slice[1],
c2: slice[2],
}
}
}
impl Add for Coeffs {
type Output = Self;
#[inline(always)]

View file

@ -14,7 +14,7 @@ use enum_dispatch::enum_dispatch;
use std::sync::Arc;
#[enum_dispatch]
pub trait PrimitiveTrait {
pub trait PrimitiveTrait: Send + Sync {
fn bounds(&self) -> Bounds3f;
fn intersect(&self, r: &Ray, t_max: Option<Float>) -> Option<ShapeIntersection>;
fn intersect_p(&self, r: &Ray, t_max: Option<Float>) -> bool;
@ -72,7 +72,7 @@ impl PrimitiveTrait for GeometricPrimitive {
si.set_intersection_properties(
self.material,
self.area_light,
self.medium_interface.clone(),
self.medium_interface,
r.medium,
);
@ -100,11 +100,11 @@ impl PrimitiveTrait for SimplePrimitive {
todo!()
}
fn intersect(&self, r: &Ray, t_max: Option<Float>) -> Option<ShapeIntersection> {
fn intersect(&self, _r: &Ray, _t_max: Option<Float>) -> Option<ShapeIntersection> {
todo!()
}
fn intersect_p(&self, r: &Ray, t_max: Option<Float>) -> bool {
fn intersect_p(&self, _r: &Ray, _t_max: Option<Float>) -> bool {
todo!()
}
}
@ -187,7 +187,7 @@ pub struct LinearBVHNode {
#[derive(Debug, Clone, Copy)]
pub struct BVHAggregatePrimitive {
max_prims_in_node: u32,
primitives: *const Ptr<Primitive>,
primitives: Ptr<[Primitive]>,
nodes: Ptr<LinearBVHNode>,
}
@ -200,18 +200,20 @@ impl PrimitiveTrait for BVHAggregatePrimitive {
}
}
fn intersect(&self, r: &Ray, t_max: Option<Float>) -> Option<ShapeIntersection> {
fn intersect(&self, _r: &Ray, _t_max: Option<Float>) -> Option<ShapeIntersection> {
if !self.nodes.is_null() {
return None;
}
self.intersect(r, t_max)
todo!()
// self.intersect(r, t_max)
}
fn intersect_p(&self, r: &Ray, t_max: Option<Float>) -> bool {
fn intersect_p(&self, _r: &Ray, _t_max: Option<Float>) -> bool {
if !self.nodes.is_null() {
return false;
}
self.intersect_p(r, t_max)
todo!()
// self.intersect_p(r, t_max)
}
}

View file

@ -16,3 +16,4 @@ pub mod textures;
pub mod utils;
pub use core::pbrt::*;
pub use utils::ptr::Ptr;

View file

@ -12,7 +12,7 @@ use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
#[repr(C)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SplitMethod {
AH,
SAH,
Hlbvh,
Middle,
EqualCounts,
@ -390,8 +390,8 @@ impl BVHAggregate {
}
let mask = 1 << bit_index;
let first_code = morton_prims[0].morton_code;
let last_match_index = find_interval(n_primitives, |index| {
let current_code = morton_prims[index].morton_code;
let last_match_index = find_interval(n_primitives.try_into().unwrap(), |index| {
let current_code = morton_prims[index as usize].morton_code;
(current_code & mask) == (first_code & mask)
});
let split_offset = (last_match_index + 1) as usize;

View file

@ -1,4 +1,5 @@
use shared::Float;
use shared::Ptr;
use shared::core::bssrdf::BSSRDFTable;
pub struct BSSRDFTableData {
@ -27,13 +28,13 @@ impl BSSRDFTableData {
pub fn view(&self, rho_ptr: *const Float, radius_ptr: *const Float) -> BSSRDFTable {
BSSRDFTable {
rho_samples: rho_ptr,
rho_samples: rho_ptr.into(),
n_rho: self.rho_samples.len() as u32,
radius_samples: radius_ptr,
radius_samples: radius_ptr.into(),
n_radius: self.radius_samples.len() as u32,
profile: self.profile,
profile_cdf: self.profile_cdf,
rho_eff: self.rho_eff,
profile: Ptr::from(self.profile.as_ptr()),
profile_cdf: Ptr::from(self.profile_cdf.as_ptr()),
rho_eff: Ptr::from(self.rho_eff.as_ptr()),
}
}
}

View file

@ -1,10 +1,12 @@
use crate::core::image::Image;
use crate::core::image::ImageMetadata;
use crate::core::image::{Image, ImageIO};
use crate::utils::read_float_file;
use crate::utils::{Arena, FileLoc, ParameterDictionary};
use anyhow::{Result, anyhow};
use shared::Ptr;
use shared::cameras::*;
use shared::core::camera::{Camera, CameraBase, CameraTrait, CameraTransform};
use shared::core::color::ColorEncoding::SRGB;
use shared::core::color::SRGB;
use shared::core::film::Film;
use shared::core::geometry::{Bounds2f, Point2f, Point2i, Vector2f, Vector3f};
use shared::core::image::PixelFormat;
@ -55,11 +57,11 @@ impl CameraBaseParameters {
pub trait CameraBaseFactory {
fn create(p: CameraBaseParameters) -> CameraBase {
CameraBase {
camera_transform: p.camera_transform.as_ref().clone(),
camera_transform: p.camera_transform.clone(),
shutter_open: p.shutter_open,
shutter_close: p.shutter_close,
film: p.film.clone(),
medium: p.medium.clone(),
film: Ptr::from(p.film.clone().as_ref()),
medium: Ptr::from(p.medium.clone().as_ref()),
min_pos_differential_x: Vector3f::default(),
min_pos_differential_y: Vector3f::default(),
min_dir_differential_x: Vector3f::default(),
@ -95,8 +97,10 @@ pub trait CameraFactory {
medium: Medium,
film: Arc<Film>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Self, String>;
arena: &Arena,
) -> Result<Self>
where
Self: Sized;
}
impl CameraFactory for Camera {
@ -107,14 +111,17 @@ impl CameraFactory for Camera {
medium: Medium,
film: Arc<Film>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Self, String> {
arena: &Arena,
) -> Result<Self>
where
Self: Sized,
{
match name {
"perspective" => {
let full_res = film.full_resolution();
let camera_params =
CameraBaseParameters::new(camera_transform, film, medium.into(), params, loc);
let base = CameraBase::new(camera_params);
let base = CameraBase::create(camera_params);
let lens_radius = params.get_one_float("lensradius", 0.);
let focal_distance = params.get_one_float("focaldistance", 1e6);
let frame = params.get_one_float(
@ -142,7 +149,7 @@ impl CameraFactory for Camera {
Point2f::new(sw[1], sw[3]),
);
} else {
return Err(format!(
return Err(anyhow!(
"{}: screenwindow param must have four values",
loc
));
@ -161,7 +168,7 @@ impl CameraFactory for Camera {
let full_res = film.full_resolution();
let camera_params =
CameraBaseParameters::new(camera_transform, film, medium.into(), params, loc);
let base = CameraBase::new(camera_params);
let base = CameraBase::create(camera_params);
let lens_radius = params.get_one_float("lensradius", 0.);
let focal_distance = params.get_one_float("focaldistance", 1e6);
let frame = params.get_one_float(
@ -189,7 +196,7 @@ impl CameraFactory for Camera {
Point2f::new(sw[1], sw[3]),
);
} else {
return Err(format!(
return Err(anyhow!(
"{}: screenwindow param must have four values",
loc
));
@ -203,18 +210,18 @@ impl CameraFactory for Camera {
"realistic" => {
let camera_params =
CameraBaseParameters::new(camera_transform, film, medium.into(), params, loc);
let base = CameraBase::new(camera_params);
let base = CameraBase::create(camera_params);
let aperture_diameter = params.get_one_float("aperturediameter", 1.);
let focal_distance = params.get_one_float("focaldistance", 10.);
let lens_file = params.get_one_string("lensfile", "");
if lens_file.is_empty() {
return Err(format!("{}: No lens file supplied", loc));
return Err(anyhow!("{}: No lens file supplied", loc));
}
let lens_params = read_float_file(lens_file.as_str()).map_err(|e| e.to_string())?;
let lens_params = read_float_file(lens_file.as_str()).map_err(|e| anyhow!(e))?;
if lens_params.len() % 4 != 0 {
return Err(format!(
return Err(anyhow!(
"{}: excess values in lens specification file; must be multiple-of-four values, read {}",
loc,
lens_params.len()
@ -227,7 +234,7 @@ impl CameraFactory for Camera {
PixelFormat::F32,
Point2i::new(builtin_res, builtin_res),
&["Y"],
SRGB,
SRGB.into(),
);
let res = image.resolution();
@ -278,7 +285,7 @@ impl CameraFactory for Camera {
PixelFormat::F32,
Point2i::new(builtin_res, builtin_res),
&["Y"],
SRGB,
SRGB.into(),
);
let res = img.resolution();
for y in 0..res.y() {
@ -300,7 +307,7 @@ impl CameraFactory for Camera {
PixelFormat::F32,
Point2i::new(builtin_res, builtin_res),
&["Y"],
SRGB,
SRGB.into(),
);
let low = (0.25 * builtin_res as Float) as i32;
let high = (0.75 * builtin_res as Float) as i32;
@ -349,15 +356,13 @@ impl CameraFactory for Camera {
PixelFormat::F32,
im.image.resolution(),
&["Y"],
SRGB,
SRGB.into(),
);
let res = mono.resolution();
for y in 0..res.y() {
for x in 0..res.x() {
let avg = im
.image
.get_channels_default(Point2i::new(x, y))
.average();
let avg =
im.image.get_channels(Point2i::new(x, y)).average();
mono.set_channel(Point2i::new(x, y), 0, avg);
}
}
@ -372,7 +377,7 @@ impl CameraFactory for Camera {
let camera = RealisticCamera::new(
base,
lens_params,
&lens_params,
focal_distance,
aperture_diameter,
aperture_image,
@ -385,7 +390,7 @@ impl CameraFactory for Camera {
let full_res = film.full_resolution();
let camera_params =
CameraBaseParameters::new(camera_transform, film, medium.into(), params, loc);
let base = CameraBase::new(camera_params);
let base = CameraBase::create(camera_params);
let lens_radius = params.get_one_float("lensradius", 0.);
let focal_distance = params.get_one_float("focaldistance", 1e30);
let frame = params.get_one_float(
@ -413,7 +418,7 @@ impl CameraFactory for Camera {
Point2f::new(sw[1], sw[3]),
);
} else {
return Err(format!(
return Err(anyhow!(
"{}: screenwindow param must have four values",
loc
));
@ -426,9 +431,10 @@ impl CameraFactory for Camera {
"equalarea" => Mapping::EqualArea,
"equirectangular" => Mapping::EquiRectangular,
_ => {
return Err(format!(
return Err(anyhow!(
"{}: unknown mapping for spherical camera at {}",
m, loc
m,
loc
));
}
};
@ -438,7 +444,7 @@ impl CameraFactory for Camera {
arena.alloc(camera);
Ok(Camera::Spherical(camera))
}
_ => Err(format!("Camera type '{}' unknown at {}", name, loc)),
_ => Err(anyhow!("Camera type '{}' unknown at {}", name, loc)),
}
}
}

View file

@ -1,7 +1,7 @@
use crate::utils::read_float_file;
use anyhow::Result;
use shared::Float;
use shared::core::color::{RES, RGBToSpectrumTable};
use shared::core::color::{Coeffs, RES, RGBToSpectrumTable};
use shared::{Float, Ptr};
use std::ops::Deref;
use std::path::Path;
@ -21,13 +21,14 @@ impl Deref for RGBToSpectrumTableData {
impl RGBToSpectrumTableData {
pub fn new(z_nodes: Vec<Float>, coeffs: Vec<Float>) -> Self {
assert_eq!(z_nodes.len(), RES);
assert_eq!(coeffs.len(), RES * RES * RES * 3 * 3); // bucket*z*y*x*3(coeffs)
assert_eq!(z_nodes.len(), RES as usize);
assert_eq!(coeffs.len(), (RES * RES * RES) as usize * 3 * 3); // bucket*z*y*x*3(coeffs)
let coeffs_struct = Coeffs::from(&[coeffs[0], coeffs[1], coeffs[2]]);
let view = RGBToSpectrumTable {
z_nodes: z_nodes.as_ptr(),
coeffs: coeffs.as_ptr().into(),
n_nodes: z_nodes.len(),
z_nodes: Ptr::from(z_nodes.as_ptr()),
coeffs: Ptr::from(&coeffs_struct),
n_nodes: z_nodes.len() as u32,
};
Self {
@ -41,8 +42,8 @@ impl RGBToSpectrumTableData {
let z_path = base_dir.join(format!("{}_znodes.dat", name));
let c_path = base_dir.join(format!("{}_coeffs.dat", name));
let z_nodes = read_float_file(&z_path)?;
let coeffs = read_float_file(&c_path)?;
let z_nodes = read_float_file(&z_path.to_str().unwrap())?;
let coeffs = read_float_file(&c_path.to_str().unwrap())?;
Ok(Self::new(z_nodes, coeffs))
}

View file

@ -1,25 +1,20 @@
use crate::Arena;
use crate::core::image::{Image, ImageChannelDesc, ImageChannelValues, ImageIO, ImageMetadata};
use crate::films::*;
use crate::spectra::{SRGB, data::get_named_spectrum};
use crate::spectra::data::get_named_spectrum;
use anyhow::{Result, anyhow};
use rayon::iter::ParallelIterator;
use rayon::prelude::IntoParallelIterator;
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::color::{RGB, XYZ, white_balance};
use shared::core::film::{
DevicePixelSensor, Film, FilmBase, GBufferFilm, RGBFilm, SpectralFilm, SpectralPixel,
};
use shared::core::color::{RGB, SRGB, XYZ, white_balance};
use shared::core::film::{DevicePixelSensor, Film, FilmBase, GBufferFilm, RGBFilm, SpectralFilm};
use shared::core::filter::{Filter, FilterTrait};
use shared::core::geometry::{Bounds2f, Bounds2i, Point2f, Point2i};
use shared::core::image::PixelFormat;
use shared::core::spectrum::Spectrum;
use shared::spectra::cie::CIE_Y;
use shared::spectra::{
DenselySampledSpectrum, PiecewiseLinearSpectrum, RGBColorSpace, cie::SWATCHES_RAW,
};
use shared::spectra::{PiecewiseLinearSpectrum, RGBColorSpace, cie::SWATCHES_RAW};
use shared::utils::math::{SquareMatrix, linear_least_squares};
use shared::{Float, Ptr};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, LazyLock};
@ -33,21 +28,24 @@ const SWATCH_REFLECTANCES: LazyLock<[Spectrum; N_SWATCH_REFLECTANCES]> = LazyLoc
std::array::from_fn(|i| {
let raw_data = SWATCHES_RAW[i];
let pls = PiecewiseLinearSpectrum::from_interleaved(raw_data, false);
Spectrum::PiecewiseLinear(pls)
Spectrum::Piecewise(pls)
})
});
pub trait PixelSensorTrait {
pub fn get_swatches() -> Arc<[Spectrum; N_SWATCH_REFLECTANCES]> {
Arc::new(SWATCH_REFLECTANCES)
pub trait PixelSensorTrait: Sized {
fn get_swatches() -> Arc<[Spectrum; N_SWATCH_REFLECTANCES]> {
Arc::new(*SWATCH_REFLECTANCES)
}
pub fn create(
fn create(
params: &ParameterDictionary,
output_colorspace: Arc<RGBColorSpace>,
exposure_time: Float,
loc: &FileLoc,
) -> Result<Self> {
) -> Result<Self>
where
Self: Sized,
{
let iso = params.get_one_float("iso", 100.);
let mut white_balance_temp = params.get_one_float("whitebalance", 0.);
let sensor_name = params.get_one_string("sensor", "cie1931");
@ -63,15 +61,15 @@ pub trait PixelSensorTrait {
};
let sensor_illum: Option<Arc<Spectrum>> = if white_balance_temp != 0. {
Some(Arc::new(Spectrum::DenselySampled(d_illum)))
Some(Spectrum::Dense(d_illum.device()).into())
} else {
None
};
if sensor_name == "cie1931" {
return Ok(DevicePixelSensor::new_with_white_balance(
Some(output_colorspace),
sensor_illum,
return Ok(Self::new_with_white_balance(
output_colorspace.as_ref(),
sensor_illum.as_deref(),
imaging_ratio,
));
} else {
@ -79,54 +77,74 @@ pub trait PixelSensorTrait {
let g_opt = get_named_spectrum(&format!("{}_g", sensor_name));
let b_opt = get_named_spectrum(&format!("{}_b", sensor_name));
if r_opt.is_none() || g_opt.is_none() || b_opt.is_none() {
return anyhow!(
return Err(anyhow!(
"{}: unknown sensor type '{}' (missing RGB spectral data)",
loc,
sensor_name
);
));
}
let r = Arc::new(r_opt.unwrap());
let g = Arc::new(g_opt.unwrap());
let b = Arc::new(b_opt.unwrap());
let r = r_opt.unwrap();
let g = g_opt.unwrap();
let b = b_opt.unwrap();
return DevicePixelSensor::new(
r,
g,
b,
return Ok(Self::new(
&r,
&g,
&b,
output_colorspace.clone(),
Some(sensor_illum),
Some(
sensor_illum
.as_deref()
.expect("Sensor must have illuminant"),
),
imaging_ratio,
)
.map_err(|e| e.to_string());
));
}
}
fn new(
r: Spectrum,
g: Spectrum,
b: Spectrum,
output_colorspace: RGBColorSpace,
r: &Spectrum,
g: &Spectrum,
b: &Spectrum,
output_colorspace: Arc<RGBColorSpace>,
sensor_illum: Option<&Spectrum>,
imaging_ratio: Float,
) -> DevicePixelSensor {
) -> Self;
fn new_with_white_balance(
output_colorspace: &RGBColorSpace,
sensor_illum: Option<&Spectrum>,
imaging_ratio: Float,
) -> Self;
}
impl PixelSensorTrait for DevicePixelSensor {
fn new(
r: &Spectrum,
g: &Spectrum,
b: &Spectrum,
output_colorspace: Arc<RGBColorSpace>,
sensor_illum: Option<&Spectrum>,
imaging_ratio: Float,
) -> Self {
// As seen in usages of this constructos, sensor_illum can be null
// Going with the colorspace's own illuminant, but this might not be the right choice
// TODO: Test this
let illum: &Spectrum = match sensor_illum {
Some(arc_illum) => &**arc_illum,
None => &output_colorspace.illuminant,
Some(arc_illum) => arc_illum,
None => &Spectrum::Dense(output_colorspace.as_ref().illuminant),
};
let r_bar = DenselySampledSpectrumBuffer::from_spectrum(&r).device();
let g_bar = DenselySampledSpectrumBuffer::from_spectrum(&g).device();
let b_bar = DenselySampledSpectrumBuffer::from_spectrum(&b).device();
let r_bar = DenselySampledSpectrumBuffer::from_spectrum(r).device();
let g_bar = DenselySampledSpectrumBuffer::from_spectrum(g).device();
let b_bar = DenselySampledSpectrumBuffer::from_spectrum(b).device();
let mut rgb_camera = [[0.; 3]; N_SWATCH_REFLECTANCES];
let swatches = Self::get_swatches();
for i in 0..N_SWATCH_REFLECTANCES {
let rgb = Self::project_reflectance::<RGB>(
let rgb = DevicePixelSensor::project_reflectance::<RGB>(
&swatches[i],
illum,
&Spectrum::Dense(r_bar),
@ -141,25 +159,25 @@ pub trait PixelSensorTrait {
let mut xyz_output = [[0.; 3]; N_SWATCH_REFLECTANCES];
let spectra = get_spectra_context();
let sensor_white_g = illum.inner_product(&Spectrum::Dense(g_bar.clone()));
let sensor_white_y = illum.inner_product(spectra.y);
let sensor_white_y = illum.inner_product(&Spectrum::Dense(spectra.y));
for i in 0..N_SWATCH_REFLECTANCES {
let s = swatches[i].clone();
let xyz = Self::project_reflectance::<XYZ>(
let xyz = DevicePixelSensor::project_reflectance::<XYZ>(
&s,
&Spectrum::Dense(output_colorspace.illuminant),
spectra.x,
spectra.y,
spectra.z,
illum,
&Spectrum::Dense(spectra.x),
&Spectrum::Dense(spectra.y),
&Spectrum::Dense(spectra.z),
) * (sensor_white_y / sensor_white_g);
for c in 0..3 {
xyz_output[i][c] = xyz[c];
for c in 0..3 as u32 {
xyz_output[i][c as usize] = xyz[c].try_into().unwrap();
}
}
let xyz_from_sensor_rgb = linear_least_squares(rgb_camera, xyz_output)
.expect("Could not convert sensor illuminance to XYZ space");
DevicePixelSensor {
Self {
xyz_from_sensor_rgb,
r_bar,
g_bar,
@ -174,9 +192,9 @@ pub trait PixelSensorTrait {
imaging_ratio: Float,
) -> Self {
let spectra = get_spectra_context();
let r_bar = CIE_X_DATA.clone();
let g_bar = CIE_Y_DATA.clone();
let b_bar = CIE_Z_DATA.clone();
let r_bar = CIE_X_DATA.device();
let g_bar = CIE_Y_DATA.device();
let b_bar = CIE_Z_DATA.device();
let xyz_from_sensor_rgb: SquareMatrix<Float, 3>;
if let Some(illum) = sensor_illum {
@ -187,7 +205,7 @@ pub trait PixelSensorTrait {
xyz_from_sensor_rgb = SquareMatrix::<Float, 3>::default();
}
Self {
DevicePixelSensor {
xyz_from_sensor_rgb,
r_bar,
g_bar,
@ -197,8 +215,6 @@ pub trait PixelSensorTrait {
}
}
impl PixelSensorTrait for DevicePixelSensor {}
pub trait CreateFilmBase {
fn create(
params: &ParameterDictionary,
@ -256,14 +272,14 @@ impl CreateFilmBase for FilmBase {
pixel_bounds = pixel_bounds.expand(expansion);
let diagonal_mm = params.get_one_float("diagonal", 35.0);
let filename = params.get_one_string("filename", "pbrt.exr");
// let filename = params.get_one_string("filename", "pbrt.exr");
Self {
full_resolution,
pixel_bounds,
filter,
diagonal: diagonal_mm * 0.001,
sensor,
sensor: Ptr::from(sensor.unwrap()),
}
}
}
@ -271,12 +287,9 @@ impl CreateFilmBase for FilmBase {
pub trait FilmTrait: Sync {
fn base(&self) -> &FilmBase;
fn get_pixel_rgb(&self, p: Point2i, splat_scale: Option<Float>) -> RGB;
// fn get_filename(&self) -> &str;
fn write_image(&self, metadata: &ImageMetadata, splat_scale: Float) {
fn write_image(&self, metadata: &ImageMetadata, splat_scale: Float, filename: &str) {
let image = self.get_image(metadata, splat_scale);
image
.write(self.get_filename(), metadata)
.expect("Something")
image.write(filename, metadata).expect("Something")
}
fn get_image(&self, _metadata: &ImageMetadata, splat_scale: Float) -> Image {
@ -327,14 +340,14 @@ pub trait FilmTrait: Sync {
})
.collect();
let mut image = Image::new(format, resolution, channel_names, SRGB);
let mut image = Image::new(format, resolution, channel_names, SRGB.into());
let rgb_desc = ImageChannelDesc::new(&[0, 1, 2]);
for (iy, row_data) in processed_rows.into_iter().enumerate() {
for (ix, rgb_chunk) in row_data.chunks_exact(3).enumerate() {
let p_offset = Point2i::new(ix as i32, iy as i32);
let values = ImageChannelValues::from(rgb_chunk);
image.set_channels(p_offset, &rgb_desc, &values);
image.set_channel(p_offset, &rgb_desc, &values);
}
}
@ -370,14 +383,6 @@ impl FilmTrait for Film {
Film::Spectral(f) => f.get_pixel_rgb(p, splat_scale),
}
}
// fn get_filename(&self) -> &str {
// match self {
// Film::RGB(f) => &f.base().filename,
// Film::GBuffer(f) => &f.base().filename,
// Film::Spectral(f) => &f.base().filename,
// }
// }
}
pub trait FilmFactory {
@ -388,8 +393,10 @@ pub trait FilmFactory {
filter: Filter,
_camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Self>;
arena: &Arena,
) -> Result<Self>
where
Self: Sized;
}
impl FilmFactory for Film {
@ -400,36 +407,19 @@ impl FilmFactory for Film {
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Self> {
arena: &Arena,
) -> Result<Self>
where
Self: Sized,
{
match name {
"gbuffer" => GBufferFilm::create(
name,
params,
exposure_time,
filter,
camera_transform,
loc,
arena,
),
"rgb" => RGBFilm::create(
name,
params,
exposure_time,
filter,
camera_transform,
loc,
arena,
),
"spectral" => SpectralFilm::create(
name,
params,
exposure_time,
filter,
camera_transform,
loc,
arena,
),
"gbuffer" => {
GBufferFilm::create(params, exposure_time, filter, camera_transform, loc, arena)
}
"rgb" => RGBFilm::create(params, exposure_time, filter, camera_transform, loc, arena),
"spectral" => {
SpectralFilm::create(params, exposure_time, filter, camera_transform, loc, arena)
}
_ => Err(anyhow!("Film type '{}' unknown at {}", name, loc)),
}
}

View file

@ -1,4 +1,5 @@
use crate::filters::*;
use crate::utils::containers::Array2D;
use crate::utils::sampling::PiecewiseConstant2D;
use crate::utils::{FileLoc, ParameterDictionary};
use shared::Float;
@ -68,10 +69,10 @@ impl CreateFilterSampler for FilterSampler {
Point2f::new(radius.x(), radius.y()),
);
let nx = (32.0 * radius.x()) as usize;
let ny = (32.0 * radius.y()) as usize;
let nx = (32.0 * radius.x()) as i32;
let ny = (32.0 * radius.y()) as i32;
let mut f = Array2D::new_with_dims(nx, ny);
let mut f = Array2D::new_dims(nx, ny);
for y in 0..f.y_size() {
for x in 0..f.x_size() {
let p = domain.lerp(Point2f::new(
@ -84,7 +85,7 @@ impl CreateFilterSampler for FilterSampler {
let distrib = PiecewiseConstant2D::new_with_bounds(&f, domain);
Self {
domain,
f,
f: *f.device(),
distrib: distrib.device,
}
}

View file

@ -7,7 +7,7 @@ use image_rs::{DynamicImage, ImageReader};
use shared::Float;
use shared::core::color::{ColorEncoding, LINEAR, SRGB};
use shared::core::geometry::Point2i;
use shared::core::image::{PixelFormat};
use shared::core::image::PixelFormat;
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::path::Path;
@ -187,7 +187,7 @@ impl ImageIO for Image {
// constructors/creation
fn to_u8_buffer(&self) -> Vec<u8> {
match &self.pixels {
PixelStorage::U8(data) => data,
PixelStorage::U8(data) => data.to_vec(),
PixelStorage::F16(data) => data
.iter()
.map(|v| (v.to_f32().clamp(0.0, 1.0) * 255.0 + 0.5) as u8)

View file

@ -69,9 +69,9 @@ impl DerefMut for ImageChannelValues {
#[derive(Debug, Clone)]
pub enum PixelStorage {
U8(Box<[u8]>),
F16(Box<[f16]>),
F32(Box<[f32]>),
U8(Vec<u8>),
F16(Vec<f16>),
F32(Vec<f32>),
}
impl PixelStorage {
@ -131,6 +131,10 @@ impl Image {
) -> Self {
let n_channels = channel_names.len() as i32;
let expected = (resolution.x() * resolution.y()) as usize * n_channels as usize;
let channel_names = channel_names
.iter()
.map(|s| s.as_ref().to_string())
.collect();
assert_eq!(storage.len(), expected, "Pixel data size mismatch");
let device = DeviceImage {
@ -156,26 +160,7 @@ impl Image {
channel_names: &[impl AsRef<str>],
encoding: ColorEncoding,
) -> Self {
Self::from_storage(
PixelStorage::U8(data.into_boxed_slice()),
resolution,
channel_names,
encoding,
)
}
pub fn from_u8(
data: Vec<u8>,
resolution: Point2i,
channel_names: &[impl AsRef<str>],
encoding: ColorEncoding,
) -> Self {
Self::from_storage(
PixelStorage::U8(data.into_boxed_slice()),
resolution,
channel_names,
encoding,
)
Self::from_storage(PixelStorage::U8(data), resolution, channel_names, encoding)
}
pub fn from_f16(
@ -183,12 +168,7 @@ impl Image {
resolution: Point2i,
channel_names: &[impl AsRef<str>],
) -> Self {
Self::from_storage(
PixelStorage::F16(data.into_boxed_slice()),
resolution,
channel_names,
LINEAR,
)
Self::from_storage(PixelStorage::F16(data), resolution, channel_names, LINEAR)
}
pub fn from_f32(
@ -196,12 +176,7 @@ impl Image {
resolution: Point2i,
channel_names: &[impl AsRef<str>],
) -> Self {
Self::from_storage(
PixelStorage::F32(data.into_boxed_slice()),
resolution,
channel_names,
LINEAR,
)
Self::from_storage(PixelStorage::F32(data), resolution, channel_names, LINEAR)
}
pub fn new(
@ -215,7 +190,7 @@ impl Image {
let storage = match format {
PixelFormat::U8 => PixelStorage::U8(vec![0; pixel_count].into()),
PixelFormat::F16 => PixelStorage::F16(vec![0; pixel_count].into()),
PixelFormat::F16 => PixelStorage::F16(vec![f16::ZERO; pixel_count].into()),
PixelFormat::F32 => PixelStorage::F32(vec![0.0; pixel_count].into()),
};
@ -411,8 +386,8 @@ impl Image {
) -> Result<ImageChannelDesc> {
let mut offset = Vec::with_capacity(requested_channels.len());
for &req in requested_channels.iter() {
match self.channel_names.iter().position(|n| n == req) {
for req in requested_channels.iter() {
match self.channel_names.iter().position(|n| n == req.as_ref()) {
Some(idx) => {
offset.push(idx);
}
@ -455,7 +430,7 @@ impl Image {
dst[i * dst_nc + out_idx] = src[i * src_nc + in_c];
}
}
PixelStorage::U8(dst.into_boxed_slice())
PixelStorage::U8(dst)
}
PixelStorage::F16(src) => {
let mut dst = vec![f16::ZERO; pixel_count * dst_nc];
@ -464,7 +439,7 @@ impl Image {
dst[i * dst_nc + out_idx] = src[i * src_nc + in_c];
}
}
PixelStorage::F16(dst.into_boxed_slice())
PixelStorage::F16(dst)
}
PixelStorage::F32(src) => {
let mut dst = vec![0.0f32; pixel_count * dst_nc];
@ -473,7 +448,7 @@ impl Image {
dst[i * dst_nc + out_idx] = src[i * src_nc + in_c];
}
}
PixelStorage::F32(dst.into_boxed_slice())
PixelStorage::F32(dst)
}
};

View file

@ -6,7 +6,6 @@ use shared::Float;
use shared::core::color::ColorEncoding;
use shared::core::geometry::{Bounds2i, Point2i};
use shared::core::image::{PixelFormat, WrapMode, WrapMode2D};
use shared::utils::Ptr;
use shared::utils::math::windowed_sinc;
use std::sync::{Arc, Mutex};
@ -272,7 +271,7 @@ fn copy_rect_in_kernel<T: PixelStorageTrait>(
}
fn downsample_kernel<T: PixelStorageTrait>(
dst: &mut Ptr<T>,
dst: &mut [T],
dst_res: Point2i,
prev: &Image,
wrap: WrapMode2D,

View file

@ -21,7 +21,6 @@ pub fn lookup_spectrum(s: &Spectrum) -> Arc<DenselySampledSpectrumBuffer> {
pub trait CreateLight {
fn create(
arena: &mut Arena,
render_from_light: Transform,
medium: Medium,
parameters: &ParameterDictionary,
@ -29,6 +28,7 @@ pub trait CreateLight {
shape: &Shape,
alpha_text: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
arena: &Arena,
) -> Result<Light>;
}
@ -44,7 +44,9 @@ pub trait LightFactory {
alpha_tex: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
camera_transform: CameraTransform,
) -> Result<Self>;
) -> Result<Self>
where
Self: Sized;
}
impl LightFactory for Light {
@ -59,10 +61,12 @@ impl LightFactory for Light {
alpha_tex: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
camera_transform: CameraTransform,
) -> Result<Self> {
) -> Result<Self>
where
Self: Sized,
{
match name {
"diffuse" => DiffuseAreaLight::create(
arena,
render_from_light,
medium,
parameters,
@ -70,9 +74,9 @@ impl LightFactory for Light {
shape,
alpha_tex,
colorspace,
arena,
),
"point" => PointLight::create(
arena,
render_from_light,
medium,
parameters,
@ -80,9 +84,9 @@ impl LightFactory for Light {
shape,
alpha_tex,
colorspace,
arena,
),
"spot" => SpotLight::create(
arena,
render_from_light,
medium,
parameters,
@ -90,9 +94,9 @@ impl LightFactory for Light {
shape,
alpha_tex,
colorspace,
arena,
),
"goniometric" => GoniometricLight::create(
arena,
render_from_light,
medium,
parameters,
@ -100,9 +104,9 @@ impl LightFactory for Light {
shape,
alpha_tex,
colorspace,
arena,
),
"projection" => ProjectionLight::create(
arena,
render_from_light,
medium,
parameters,
@ -110,9 +114,9 @@ impl LightFactory for Light {
shape,
alpha_tex,
colorspace,
arena,
),
"distant" => DistantLight::create(
arena,
render_from_light,
medium,
parameters,
@ -120,15 +124,16 @@ impl LightFactory for Light {
shape,
alpha_tex,
colorspace,
arena,
),
"infinite" => crate::lights::infinite::create(
arena,
render_from_light,
medium.into(),
camera_transform,
parameters,
colorspace,
loc,
arena,
),
_ => Err(anyhow!("{}: unknown light type: \"{}\"", loc, name)),
}

View file

@ -14,7 +14,7 @@ pub trait CreateMaterial: Sized {
normal_map: Option<Arc<Image>>,
named_materials: &HashMap<String, Material>,
loc: &FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Material>;
}
@ -25,8 +25,10 @@ pub trait MaterialFactory {
normal_map: Option<Arc<Image>>,
named_materials: &HashMap<String, Material>,
loc: FileLoc,
arena: &mut Arena,
) -> Result<Self>;
arena: &Arena,
) -> Result<Self>
where
Self: Sized;
}
impl MaterialFactory for Material {
@ -36,8 +38,8 @@ impl MaterialFactory for Material {
normal_map: Option<Arc<Image>>,
named_materials: &HashMap<String, Material>,
loc: FileLoc,
arena: &mut Arena,
) -> Result<Material> {
arena: &Arena,
) -> Result<Self> where Self: Sized {
match name {
"diffuse" => {
DiffuseMaterial::create(parameters, normal_map, named_materials, &loc, arena)
@ -77,7 +79,7 @@ impl MaterialFactory for Material {
}
"mix" => MixMaterial::create(parameters, normal_map, named_materials, &loc, arena),
_ => Err(anyhow!("Material type '{}' unknown at {}", $name, $loc)),
_ => Err(anyhow!("Material type '{}' unknown at {}", name, &loc)),
}
}
}

View file

@ -2,9 +2,9 @@ use shared::core::{
light::Light,
material::Material,
medium::MediumInterface,
texture::GPUFloatTexture
primitive::{GeometricPrimitive, SimplePrimitive},
shape::Shape,
texture::GPUFloatTexture,
};
use shared::utils::Ptr;

View file

@ -14,8 +14,10 @@ pub trait SamplerFactory {
params: &ParameterDictionary,
full_res: Point2i,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Self>;
arena: &Arena,
) -> Result<Self>
where
Self: Sized;
}
impl SamplerFactory for Sampler {
@ -24,8 +26,11 @@ impl SamplerFactory for Sampler {
params: &ParameterDictionary,
full_res: Point2i,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Self> {
arena: &Arena,
) -> Result<Self>
where
Self: Sized,
{
match name {
"zsobol" => ZSobolSampler::create(params, full_res, loc, arena),
"paddedsobol" => PaddedSobolSampler::create(params, full_res, loc, arena),

View file

@ -13,7 +13,7 @@ use shared::core::geometry::Vector3f;
use shared::core::options::RenderingCoordinateSystem;
use shared::spectra::RGBColorSpace;
use shared::utils::transform;
use shared::utils::transform::{self, AnimatedTransform, Transform};
use shared::utils::transform::{AnimatedTransform, Transform};
use std::collections::{HashMap, HashSet};
use std::ops::{Index, IndexMut};
use std::sync::Arc;
@ -404,10 +404,7 @@ impl ParserTarget for BasicSceneBuilder {
parameters,
};
self.scene
.named_materials
.lock()
.push((curr_name.to_string(), entity));
self.scene.add_named_material(&curr_name, entity);
}
fn medium_interface(&mut self, inside_name: &str, outside_name: &str, _loc: FileLoc) {
@ -429,7 +426,7 @@ impl ParserTarget for BasicSceneBuilder {
})
}
fn world_begin(&mut self, loc: FileLoc, arena: &mut Arena) {
fn world_begin(&mut self, loc: FileLoc, arena: &Arena) {
self.verify_options("WorldBegin", &loc);
self.current_block = BlockState::WorldBlock;
for i in 0..MAX_TRANSFORMS {
@ -572,7 +569,7 @@ impl ParserTarget for BasicSceneBuilder {
};
let entity = TextureSceneEntity {
base,
render_from_object: self.graphics_state.render_from_object.clone(),
render_from_object: self.graphics_state.ctm[0].clone(),
};
if type_name == "float" {

View file

@ -109,7 +109,7 @@ impl BasicScene {
sampler: SceneEntity,
integ: SceneEntity,
accel: SceneEntity,
arena: &mut Arena,
arena: &Arena,
) {
*self.integrator.lock() = Some(integ);
*self.accelerator.lock() = Some(accel);

View file

@ -1,9 +1,9 @@
use super::{SceneEntity, TextureSceneEntity};
use crate::core::image::Image;
use crate::core::medium::Medium;
use crate::core::texture::{FloatTexture, SpectrumTexture};
use crate::utils::parallel::AsyncJob;
use shared::core::light::Light;
use shared::core::medium::Medium;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;

View file

@ -1,5 +1,5 @@
use super::*;
use crate::core::film::{CreateFilmBase, PixelSensor};
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use anyhow::{Result, anyhow};
use shared::core::film::{DevicePixelSensor, FilmBase, GBufferFilm};
@ -49,18 +49,17 @@ impl GBufferFilmHost {
impl CreateFilm for GBufferFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
_arena: &Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let sensor = DevicePixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(&sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");

View file

@ -15,12 +15,11 @@ pub use spectral::*;
pub trait CreateFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Film>;
}

View file

@ -28,7 +28,7 @@ impl RGBFilmHost {
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let sensor = &*sensor_ptr;
let filter_integral = base.filter.integral();
let sensor_matrix = sensor.xyz_from_sensor_rgb;
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor_matrix;
@ -44,6 +44,7 @@ impl RGBFilmHost {
let pixels: Array2D<RGBPixel> = Array2D::new(base.pixel_bounds);
let device_pixels = pixels.device.clone();
let storage = RGBFilmStorage { pixels };
let device = RGBFilm {
@ -52,7 +53,7 @@ impl RGBFilmHost {
write_fp16,
filter_integral,
output_rgbf_from_sensor_rgb,
pixels: std::sync::Arc::new(pixels_array),
pixels: device_pixels,
};
Self { device, storage }
@ -61,13 +62,12 @@ impl RGBFilmHost {
impl CreateFilm for RGBFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
_camera_transform: Option<CameraTransform>,
loc: &FileLoc,
_ arena: &Arena,
_arena: &Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);

View file

@ -2,19 +2,17 @@ use super::*;
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use crate::{Arena, FileLoc, ParameterDictionary};
use anyhow::Result;
use anyhow::{Result, anyhow};
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::film::{
CreateFilm, DevicePixelSensor, FilmBase, PixelSensork, SpectralFilm, SpectralPixel,
};
use shared::core::film::{DevicePixelSensor, FilmBase, SpectralFilm, SpectralPixel};
use shared::core::filter::FilterTrait;
use shared::spectra::{LAMBDA_MAX, LAMBDA_MIN, RGBColorSpace};
use shared::utils::AtomicFloat;
use shared::utils::Ptr;
use shared::utils::containers::DeviceArray2D;
use shared::utils::math::SquareMatrix;
use std::path::Path;
use std::sync::Arc;
struct SpectralFilmStorage {
pixels: DeviceArray2D<SpectralPixel>,
@ -51,10 +49,6 @@ impl SpectralFilmHost {
let mut pixels = Array2D::<SpectralPixel>::new(base.pixel_bounds);
let p_sums_base = bucket_sums.as_ptr() as *mut f64;
let p_weights_base = weight_sums.as_ptr() as *mut f64;
let p_splats_base = bucket_splats.as_ptr() as *mut AtomicFloat;
for i in 0..n_pixels {
let pixel = pixels.get_linear_mut(i);
pixel.bucket_offset = i * n_buckets;
@ -79,7 +73,7 @@ impl SpectralFilmHost {
output_rgbf_from_sensor_rgb: SquareMatrix::identity(),
pixels: DeviceArray2D {
values: storage.pixels.as_ptr(),
values: pixels.values.as_mut_ptr(),
extent: base.pixel_bounds,
stride: base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x(),
},
@ -95,13 +89,12 @@ impl SpectralFilmHost {
impl CreateFilm for SpectralFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
_camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
_arena: &Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);

View file

@ -8,4 +8,4 @@ pub use boxf::*;
pub use gaussian::*;
pub use lanczos::*;
pub use mitchell::*;
pub use triangle::*;
// pub use triangle::*;

View file

@ -3,8 +3,8 @@ use bytemuck::cast_slice;
use once_cell::sync::Lazy;
use shared::Float;
static SRGB_SCALE_BYTES: &[u8] = include_bytes!("../../data/srgb_scale.dat");
static SRGB_COEFFS_BYTES: &[u8] = include_bytes!("../../data/srgb_coeffs.dat");
static SRGB_SCALE_BYTES: &[u8] = include_bytes!("../data/srgb_scale.dat");
static SRGB_COEFFS_BYTES: &[u8] = include_bytes!("../data/srgb_coeffs.dat");
pub static SRGB_SCALE: Lazy<&[Float]> = Lazy::new(|| cast_slice(SRGB_SCALE_BYTES));
@ -17,8 +17,8 @@ pub static SRGB_COEFFS: Lazy<&[Float]> =
}
});
static DCI_P3_SCALE_BYTES: &[u8] = include_bytes!("../../data/dcip3_scale.dat");
static DCI_P3_COEFFS_BYTES: &[u8] = include_bytes!("../../data/dcip3_coeffs.dat");
static DCI_P3_SCALE_BYTES: &[u8] = include_bytes!("../data/dcip3_scale.dat");
static DCI_P3_COEFFS_BYTES: &[u8] = include_bytes!("../data/dcip3_coeffs.dat");
pub static DCI_P3_SCALE: Lazy<&[Float]> = Lazy::new(|| cast_slice(DCI_P3_SCALE_BYTES));
pub static DCI_P3_COEFFS: Lazy<&[Float]> =
Lazy::new(|| match bytemuck::try_cast_slice(DCI_P3_COEFFS_BYTES) {
@ -29,8 +29,8 @@ pub static DCI_P3_COEFFS: Lazy<&[Float]> =
}
});
static ACES_SCALE_BYTES: &[u8] = include_bytes!("../../data/aces_scale.dat");
static ACES_COEFFS_BYTES: &[u8] = include_bytes!("../../data/aces_coeffs.dat");
static ACES_SCALE_BYTES: &[u8] = include_bytes!("../data/aces_scale.dat");
static ACES_COEFFS_BYTES: &[u8] = include_bytes!("../data/aces_coeffs.dat");
pub static ACES_SCALE: Lazy<&[Float]> = Lazy::new(|| cast_slice(ACES_SCALE_BYTES));
@ -43,8 +43,8 @@ pub static ACES_COEFFS: Lazy<&[Float]> =
}
});
static REC2020_SCALE_BYTES: &[u8] = include_bytes!("../../data/rec2020_scale.dat");
static REC2020_COEFFS_BYTES: &[u8] = include_bytes!("../../data/rec2020_coeffs.dat");
static REC2020_SCALE_BYTES: &[u8] = include_bytes!("../data/rec2020_scale.dat");
static REC2020_COEFFS_BYTES: &[u8] = include_bytes!("../data/rec2020_coeffs.dat");
pub static REC2020_SCALE: Lazy<&[Float]> = Lazy::new(|| cast_slice(REC2020_SCALE_BYTES));
pub static REC2020_COEFFS: Lazy<&[Float]> =

View file

@ -11,7 +11,6 @@ use shared::core::film::VisibleSurface;
use shared::core::geometry::{Point2i, Ray};
use shared::core::sampler::Sampler;
use shared::spectra::{SampledSpectrum, SampledWavelengths};
use std::sync::Arc;
pub trait IntegratorTrait {
fn render(&self);

View file

@ -206,7 +206,7 @@ impl RayIntegratorTrait for PathIntegrator {
lambda: &SampledWavelengths,
sampler: &mut Sampler,
want_visible: bool,
_arena: &mut Arena,
_arena: &Arena,
) -> (SampledSpectrum, Option<VisibleSurface>) {
let mut state = PathState::new();
let mut visible = None;
@ -292,7 +292,6 @@ impl RayIntegratorTrait for PathIntegrator {
state.prev_ctx = LightSampleContext::from(&*isect);
ray = isect.spawn_ray_with_differentials(&ray, bs.wi, bs.flags, bs.eta);
// Russian roulette
if state.russian_roulette(sampler, 1) {
break;
}
@ -306,7 +305,7 @@ impl RayIntegratorTrait for PathIntegrator {
p_pixel: Point2i,
sample_ind: usize,
sampler: &mut Sampler,
arena: &mut Arena,
arena: &Arena,
) {
crate::integrators::pipeline::evaluate_pixel_sample(
self,

View file

@ -9,7 +9,6 @@ use indicatif::{ProgressBar, ProgressStyle};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use shared::Float;
use shared::core::camera::{Camera, CameraTrait};
use shared::core::film::Film;
use shared::core::geometry::{Bounds2i, Point2i, VectorLike};
use shared::core::options::get_options;
use shared::core::sampler::get_camera_sample;
@ -198,9 +197,14 @@ pub fn render<T>(
if wave_start == spp || options.write_partial_images {
camera.init_metadata(&mut metadata);
camera
.get_film()
.write_image(&metadata, 1.0 / wave_start as Float);
if let Some(out_path) = &options.mse_reference_output {
camera.get_film().write_image(
&metadata,
1.0 / wave_start as Float,
out_path.as_str(),
);
}
}
if let Some(ref_img) = &reference_image {

View file

@ -7,7 +7,6 @@ use crate::core::texture::FloatTexture;
use crate::utils::{Arena, FileLoc, ParameterDictionary, Upload, resolve_filename};
use anyhow::{Result, anyhow};
use shared::core::geometry::Point2i;
use shared::core::image::DeviceImage;
use shared::core::light::{Light, LightBase, LightType};
use shared::core::medium::{Medium, MediumInterface};
use shared::core::shape::{Shape, ShapeTrait};
@ -27,7 +26,7 @@ pub trait CreateDiffuseLight {
scale: Float,
shape: Ptr<Shape>,
alpha: Ptr<GPUFloatTexture>,
image: Ptr<DeviceImage>,
image: Ptr<Image>,
colorspace: Ptr<RGBColorSpace>,
two_sided: bool,
) -> Self;
@ -104,7 +103,6 @@ impl CreateDiffuseLight for DiffuseAreaLight {
impl CreateLight for DiffuseAreaLight {
fn create(
arena: &mut Arena,
render_from_light: Transform,
medium: Medium,
params: &ParameterDictionary,
@ -112,6 +110,7 @@ impl CreateLight for DiffuseAreaLight {
shape: &Shape,
alpha: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
arena: &Arena,
) -> Result<Light> {
let mut l = params.get_one_spectrum("l", None, SpectrumType::Illuminant);
let illum_spec = Spectrum::Dense(colorspace.unwrap().illuminant);
@ -121,26 +120,22 @@ impl CreateLight for DiffuseAreaLight {
let filename = resolve_filename(&params.get_one_string("filename", ""));
let (image, image_color_space) = if !filename.is_empty() {
if l.is_some() {
return Err(anyhow!(loc, "both \"L\" and \"filename\" specified"));
return Err(anyhow!("{}: both \"L\" and \"filename\" specified", loc));
}
let im = Image::read(Path::new(&filename), None)?;
if im.image.has_any_infinite_pixels() {
return Err(anyhow!(
loc,
"{}: image has infinite pixel values",
filename
));
return Err(anyhow!("{}: image has infinite pixel values", loc));
}
if im.image.has_any_nan_pixels() {
return Err(anyhow!(loc, "{}: image has NaN pixel values", filename));
return Err(anyhow!("{}: image has NaN pixel values", loc));
}
let channel_desc = im
.image
.get_channel_desc(&["R", "G", "B"])
.map_err(|_| anyhow!(loc, "{}: image must have R, G, B channels", filename))?;
.map_err(|_| anyhow!("{}: image must have R, G, B channels", loc))?;
let image = im.image.select_channels(&channel_desc);
let cs = im.metadata.get_colorspace();
@ -198,7 +193,7 @@ impl CreateLight for DiffuseAreaLight {
scale,
shape.upload(arena),
alpha.upload(arena),
image.upload(arena),
Ptr::from(&image.unwrap()),
image_color_space.upload(arena),
true,
);

View file

@ -38,7 +38,6 @@ impl CreateDistantLight for DistantLight {
impl CreateLight for DistantLight {
fn create(
_arena: &mut Arena,
render_from_light: Transform,
_medium: Medium,
parameters: &ParameterDictionary,
@ -46,6 +45,7 @@ impl CreateLight for DistantLight {
_shape: &Shape,
_alpha_text: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
_arena: &Arena,
) -> Result<Light> {
let l = parameters
.get_one_spectrum(

View file

@ -56,14 +56,14 @@ impl CreateGoniometricLight for GoniometricLight {
impl CreateLight for GoniometricLight {
fn create(
arena: &mut Arena,
render_from_light: Transform,
medium: Medium,
params: &ParameterDictionary,
loc: &FileLoc,
shape: &Shape,
alpha_text: &FloatTexture,
_shape: &Shape,
_alpha_text: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
_arena: &Arena,
) -> Result<Light> {
let i = params
.get_one_spectrum(
@ -78,14 +78,13 @@ impl CreateLight for GoniometricLight {
Ptr::null()
} else {
let im = Image::read(Path::new(&filename), None)
.map_err(|e| anyhow!(loc, "could not load image '{}': {}", filename, e))?;
.map_err(|e| anyhow!("could not load image '{}': {}", filename, e))?;
let loaded = im.image;
let res = loaded.resolution();
if loaded.has_any_infinite_pixels() {
return Err(anyhow!(
loc,
"image '{}' has infinite pixels, not suitable for light",
filename
));
@ -93,10 +92,9 @@ impl CreateLight for GoniometricLight {
if res.x() != res.y() {
return Err(anyhow!(
loc,
"image resolution ({}, {}) is non-square; unlikely to be an equal-area map",
res.x,
res.y
res.x(),
res.y()
));
}
@ -132,8 +130,8 @@ fn convert_to_luminance_image(image: &Image, filename: &str, loc: &FileLoc) -> R
match (rgb_desc, y_desc) {
(Ok(_), Ok(_)) => Err(anyhow!(
"{}: Image '{}' has both RGB and Y channels; ambiguous",
loc,
"image '{}' has both RGB and Y channels; ambiguous",
filename
)),
@ -159,8 +157,8 @@ fn convert_to_luminance_image(image: &Image, filename: &str, loc: &FileLoc) -> R
}
(Err(_), Err(_)) => Err(anyhow!(
"{}: Image '{}' has neither RGB nor Y channels",
loc,
"image '{}' has neither RGB nor Y channels",
filename
)),
}

View file

@ -123,13 +123,13 @@ impl CreateUniformInfiniteLight for UniformInfiniteLight {
}
pub fn create(
arena: &mut Arena,
render_from_light: Transform,
_medium: MediumInterface,
camera_transform: CameraTransform,
parameters: &ParameterDictionary,
colorspace: Option<&RGBColorSpace>,
loc: &FileLoc,
arena: &Arena,
) -> Result<Light> {
let l = parameters.get_spectrum_array("L", SpectrumType::Illuminant);
let mut scale = parameters.get_one_float("scale", 1.0);
@ -142,7 +142,10 @@ pub fn create(
let has_portal = !portal.is_empty();
if has_spectrum && has_file {
return Err(anyhow!(loc, "cannot specify both \"L\" and \"filename\""));
return Err(anyhow!(
"{}: cannot specify both \"L\" and \"filename\"",
loc
));
}
// Uniform infinite light (no image)
@ -175,7 +178,6 @@ pub fn create(
if has_portal {
create_portal_light(
arena,
render_from_light,
scale,
image,
@ -183,18 +185,19 @@ pub fn create(
&portal,
camera_transform,
loc,
arena,
)
} else {
create_image_light(arena, render_from_light, scale, image, image_cs)
create_image_light(render_from_light, scale, image, image_cs, arena)
}
}
fn create_image_light(
arena: &mut Arena,
render_from_light: Transform,
scale: Float,
image: Image,
image_cs: RGBColorSpace,
arena: &Arena,
) -> Result<Light> {
let res = image.resolution();
assert_eq!(
@ -218,7 +221,7 @@ fn create_image_light(
})
.collect();
let distrib = PiecewiseConstant2D::from_slice(&data_u, n_u, n_v, Bounds2f::unit());
let distrib = PiecewiseConstant2D::from_slice(&data, n_u, n_v, Bounds2f::unit());
// Build compensated distribution
let average = data.iter().sum::<Float>() / data.len() as Float;
@ -245,7 +248,6 @@ fn create_image_light(
}
fn create_portal_light(
arena: &mut Arena,
render_from_light: Transform,
scale: Float,
image: Image,
@ -253,17 +255,18 @@ fn create_portal_light(
portal_points: &[Point3f],
camera_transform: CameraTransform,
loc: &FileLoc,
arena: &Arena,
) -> Result<Light> {
let res = image.resolution();
if res.x() != res.y() {
return Err(anyhow!(loc, "Portal light image must be square"));
return Err(anyhow!("{}: Portal light image must be square", loc));
}
// Validate portal
if portal_points.len() != 4 {
return Err(anyhow!(
"{}: Portal requires exactly 4 vertices, got {}",
loc,
"Portal requires exactly 4 vertices, got {}",
portal_points.len()
));
}
@ -311,7 +314,7 @@ fn validate_and_build_portal_frame(portal: &[Point3f; 4], loc: &FileLoc) -> Resu
let p03 = (portal[3] - portal[0]).normalize();
if (p01.dot(p32) - 1.0).abs() > 0.001 || (p12.dot(p03) - 1.0).abs() > 0.001 {
return Err(anyhow!(loc, "Portal edges not parallel"));
return Err(anyhow!("{}: Portal edges not parallel", loc));
}
if p01.dot(p12).abs() > 0.001
@ -319,7 +322,7 @@ fn validate_and_build_portal_frame(portal: &[Point3f; 4], loc: &FileLoc) -> Resu
|| p32.dot(p03).abs() > 0.001
|| p03.dot(p01).abs() > 0.001
{
return Err(anyhow!(loc, "Portal edges not perpendicular"));
return Err(anyhow!("{}: Portal edges not perpendicular", loc));
}
Ok(Frame::from_xy(p03, p01))
@ -377,16 +380,16 @@ fn load_image(
}
let im = Image::read(Path::new(filename), None)
.map_err(|e| anyhow!(loc, "failed to load '{}': {}", filename, e))?;
.map_err(|e| anyhow!("failed to load '{}': {}", filename, e))?;
if im.image.has_any_infinite_pixels() || im.image.has_any_nan_pixels() {
return Err(anyhow!(loc, "image '{}' has invalid pixels", filename));
return Err(anyhow!("{}: image '{}' has invalid pixels", loc, filename));
}
let desc = im
.image
.get_channel_desc(&["R", "G", "B"])
.map_err(|_| anyhow!(loc, "image '{}' must have R, G, B channels", filename))?;
.map_err(|_| anyhow!("image '{}' must have R, G, B channels", filename))?;
let cs = im.metadata.colorspace.unwrap_or_else(|| colorspace.clone());
Ok((im.image.select_channels(&desc), cs))

View file

@ -44,7 +44,6 @@ impl CreatePointLight for PointLight {
impl CreateLight for PointLight {
fn create(
_arena: &mut Arena,
render_from_light: Transform,
medium: Medium,
parameters: &ParameterDictionary,
@ -52,6 +51,7 @@ impl CreateLight for PointLight {
_shape: &Shape,
_alpha: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
_arena: &Arena,
) -> Result<Light> {
let l = parameters
.get_one_spectrum(

View file

@ -9,7 +9,6 @@ use shared::Float;
use shared::core::geometry::{
Bounds2f, Point2f, Point2i, Point3f, Vector3f, VectorLike, cos_theta,
};
use shared::core::image::DeviceImage;
use shared::core::light::{Light, LightBase, LightType};
use shared::core::medium::{Medium, MediumInterface};
use shared::core::shape::Shape;
@ -17,7 +16,6 @@ use shared::core::spectrum::Spectrum;
use shared::lights::ProjectionLight;
use shared::spectra::RGBColorSpace;
use shared::utils::math::{radians, square};
use shared::utils::sampling::DeviceiecewiseConstant2D;
use shared::utils::{Ptr, Transform};
use std::path::Path;
@ -26,7 +24,7 @@ pub trait CreateProjectionLight {
render_from_light: Transform,
medium_interface: MediumInterface,
scale: Float,
image: Ptr<DeviceImage>,
image: Ptr<Image>,
image_color_space: Ptr<RGBColorSpace>,
fov: Float,
) -> Self;
@ -96,14 +94,14 @@ impl CreateProjectionLight for ProjectionLight {
impl CreateLight for ProjectionLight {
fn create(
arena: &mut Arena,
render_from_light: Transform,
medium: Medium,
parameters: &ParameterDictionary,
loc: &FileLoc,
_shape: &Shape,
_alpha_text: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
_colorspace: Option<&RGBColorSpace>,
arena: &Arena,
) -> Result<Light> {
let mut scale = parameters.get_one_float("scale", 1.);
let power = parameters.get_one_float("power", -1.);
@ -111,24 +109,27 @@ impl CreateLight for ProjectionLight {
let filename = resolve_filename(&parameters.get_one_string("filename", ""));
if filename.is_empty() {
return Err(anyhow!(loc, "must provide filename for projection light"));
return Err(anyhow!(
"{}: must provide filename for projection light",
loc
));
}
let im = Image::read(Path::new(&filename), None)
.map_err(|e| anyhow!(loc, "could not load image '{}': {}", filename, e))?;
.map_err(|e| anyhow!("{}: could not load image '{}': {}", loc, filename, e))?;
if im.image.has_any_infinite_pixels() {
return Err(anyhow!(
"{}: image '{}' has infinite pixels, not suitable for light",
loc,
"image '{}' has infinite pixels, not suitable for light",
filename
));
}
if im.image.has_any_nan_pixels() {
return Err(anyhow!(
"{}: image '{}' has NaN pixels, not suitable for light",
loc,
"image '{}' has NaN pixels, not suitable for light",
filename
));
}
@ -136,17 +137,18 @@ impl CreateLight for ProjectionLight {
let channel_desc = im
.image
.get_channel_desc(&["R", "G", "B"])
.map_err(|_| anyhow!(loc, "image '{}' must have R, G, B channels", filename))?;
.map_err(|_| anyhow!("{}: image '{}' must have R, G, B channels", loc, filename))?;
let image = im.image.select_channels(&channel_desc);
let colorspace = im
.metadata
.colorspace
.ok_or_else(|| anyhow!(loc, "image '{}' missing colorspace metadata", filename))?;
.ok_or_else(|| anyhow!("{}: image '{}' missing colorspace metadata", loc, filename))?;
scale /= spectrum_to_photometric(Spectrum::Dense(colorspace.illuminant));
if power > 0. {
let k_e = compute_emissive_power(&image, &colorspace, fov);
scale /= k_e;
}
let flip = Transform::scale(1., -1., 1.);

View file

@ -56,7 +56,6 @@ impl CreateSpotLight for SpotLight {
impl CreateLight for SpotLight {
fn create(
arena: &mut Arena,
render_from_light: Transform,
medium: Medium,
parameters: &ParameterDictionary,
@ -64,6 +63,7 @@ impl CreateLight for SpotLight {
_shape: &Shape,
_alpha_tex: &FloatTexture,
colorspace: Option<&RGBColorSpace>,
arena: &Arena,
) -> Result<Light> {
let i = parameters
.get_one_spectrum(

View file

@ -19,7 +19,7 @@ impl CreateMaterial for CoatedDiffuseMaterial {
normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Material> {
let reflectance = parameters
.get_spectrum_texture("reflectance", None, SpectrumType::Albedo)
@ -78,7 +78,7 @@ impl CreateMaterial for CoatedConductorMaterial {
normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
loc: &FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Material> {
let interface_u_roughness = parameters
.get_float_texture_or_null("interface.uroughness")

View file

@ -1,11 +1,12 @@
use crate::core::image::Image;
use crate::core::material::CreateMaterial;
use crate::core::texture::SpectrumTexture;
use crate::spectra::get_colorspace_device;
use crate::utils::{Arena, FileLoc, TextureParameterDictionary, Upload};
use shared::bxdfs::HairBxDF;
use shared::core::material::Material;
use shared::core::spectrum::Spectrum;
use shared::core::texture::{SpectrumTexture, SpectrumType};
use shared::core::texture::SpectrumType;
use shared::materials::complex::*;
// use shared::spectra::SampledWavelengths;
use shared::textures::SpectrumConstantTexture;
@ -17,10 +18,10 @@ use std::sync::Arc;
impl CreateMaterial for HairMaterial {
fn create(
parameters: &TextureParameterDictionary,
normal_map: Option<Arc<Image>>,
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
loc: &FileLoc,
arena: &mut Arena,
_loc: &FileLoc,
arena: &Arena,
) -> Result<Material> {
let sigma_a = parameters.get_spectrum_texture_or_null("sigma_a", SpectrumType::Unbounded);
let reflectance = parameters
@ -66,7 +67,7 @@ impl CreateMaterial for SubsurfaceMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Material> {
todo!()
}
@ -78,7 +79,7 @@ impl CreateMaterial for MeasuredMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Material> {
todo!()
}

View file

@ -13,7 +13,7 @@ impl CreateMaterial for ConductorMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &std::collections::HashMap<String, shared::core::material::Material>,
_loc: &crate::utils::FileLoc,
_arena: &mut crate::Arena,
_arena: &crate::Arena,
) -> Result<Material> {
todo!()
}

View file

@ -14,7 +14,7 @@ impl CreateMaterial for DielectricMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Material> {
todo!()
}
@ -26,7 +26,7 @@ impl CreateMaterial for ThinDielectricMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Material> {
todo!()
}

View file

@ -14,7 +14,7 @@ impl CreateMaterial for DiffuseMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Material> {
todo!()
}
@ -26,7 +26,7 @@ impl CreateMaterial for DiffuseTransmissionMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Material> {
todo!()
}

View file

@ -13,7 +13,7 @@ impl CreateMaterial for MixMaterial {
_normal_map: Option<Arc<Image>>,
_named_materials: &HashMap<String, Material>,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Material> {
todo!()
}

View file

@ -68,7 +68,7 @@ impl CreateSampler for HaltonSampler {
params: &ParameterDictionary,
full_res: Point2i,
loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Sampler> {
let options = get_options();
let nsamp = options

View file

@ -1,4 +1,5 @@
use super::*;
use anyhow::Result;
use shared::core::options::get_options;
use shared::core::sampler::IndependentSampler;
@ -7,7 +8,7 @@ impl CreateSampler for IndependentSampler {
params: &ParameterDictionary,
_full_res: Point2i,
_loc: &FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Sampler> {
let options = get_options();
let nsamp = options

View file

@ -13,6 +13,6 @@ pub trait CreateSampler {
params: &ParameterDictionary,
full_res: Point2i,
loc: &FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Sampler>;
}

View file

@ -1,4 +1,5 @@
use super::*;
use anyhow::{Result, anyhow};
use shared::core::geometry::Point2i;
use shared::core::options::get_options;
use shared::core::sampler::{PaddedSobolSampler, RandomizeStrategy, SobolSampler, ZSobolSampler};
@ -8,7 +9,7 @@ impl CreateSampler for SobolSampler {
params: &ParameterDictionary,
full_res: Point2i,
loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Sampler> {
let options = get_options();
let nsamp = options
@ -38,7 +39,7 @@ impl CreateSampler for PaddedSobolSampler {
params: &ParameterDictionary,
_full_res: Point2i,
loc: &FileLoc,
arena: &mut Arena,
_arena: &Arena,
) -> Result<Sampler> {
let options = get_options();
let nsamp = options
@ -71,7 +72,7 @@ impl CreateSampler for ZSobolSampler {
params: &ParameterDictionary,
full_res: Point2i,
loc: &FileLoc,
arena: &mut Arena,
_arena: &Arena,
) -> Result<Sampler> {
let options = get_options();
let nsamp = options

View file

@ -1,4 +1,5 @@
use super::*;
use anyhow::Result;
use shared::core::options::get_options;
use shared::core::sampler::StratifiedSampler;
@ -7,7 +8,7 @@ impl CreateSampler for StratifiedSampler {
params: &ParameterDictionary,
_full_res: Point2i,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Sampler> {
let options = get_options();
let jitter = params.get_one_bool("jitter", true);

View file

@ -8,8 +8,3 @@ pub mod triangle;
pub use curves::*;
pub use mesh::*;
use std::sync::{Arc, Mutex};
pub static ALL_TRIANGLE_MESHES: Mutex<Vec<Arc<TriangleMesh>>> = Mutex::new(Vec::new());
pub static ALL_TRIANGLE_MESHES: Mutex<Vec<Arc<BilinearPatchMesh>>> = Mutex::new(Vec::new());

View file

@ -7,6 +7,7 @@ use shared::core::spectrum::Spectrum;
use shared::spectra::RGBColorSpace;
use shared::utils::math::SquareMatrix;
use shared::utils::ptr::Ptr;
use std::sync::Arc;
#[derive(Clone, Debug)]
pub struct RGBColorSpaceData {
@ -27,7 +28,7 @@ impl RGBColorSpaceData {
g: Point2f,
b: Point2f,
illuminant: Arc<DenselySampledSpectrumBuffer>,
rgb_to_spectrum_table: Arc<RGBToSpectrumTable>,
rgb_to_spectrum_table: Ptr<RGBToSpectrumTable>,
) -> Self {
let stdspec = get_spectra_context();
let w_xyz: XYZ = Spectrum::Dense(illuminant.device()).to_xyz(&stdspec);

View file

@ -83,7 +83,6 @@ pub static REC2020: LazyLock<Arc<RGBColorSpaceData>> = LazyLock::new(|| {
let b = Point2f::new(0.131, 0.046);
let table_ptr = Ptr::from(&REC2020_TABLE.clone());
Arc::new(RGBColorSpaceData::new(r, g, b, illum, table_ptr))
});
@ -94,7 +93,6 @@ pub static ACES: LazyLock<Arc<RGBColorSpaceData>> = LazyLock::new(|| {
let b = Point2f::new(0.0001, -0.0770);
let table_ptr = Ptr::from(&ACES_TABLE.clone());
Arc::new(RGBColorSpaceData::new(r, g, b, illum, table_ptr))
});

View file

@ -4,8 +4,9 @@ use crate::core::texture::{
SpectrumTextureTrait,
};
use anyhow::Result;
use shared::core::texture::SpectrumType;
use shared::core::texture::{SpectrumType, TextureEvalContext};
use shared::{
spectra::{SampledSpectrum, SampledWavelengths},
textures::{FloatBilerpTexture, SpectrumBilerpTexture},
utils::Transform,
};
@ -20,14 +21,14 @@ impl CreateFloatTexture for FloatBilerpTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}
}
impl FloatTextureTrait for FloatBilerpTexture {
fn evaluate(&self, _ctx: &shared::core::texture::TextureEvalContext) -> shared::Float {
fn evaluate(&self, _ctx: &TextureEvalContext) -> shared::Float {
todo!()
}
}
@ -46,9 +47,9 @@ impl CreateSpectrumTexture for SpectrumBilerpTexture {
impl SpectrumTextureTrait for SpectrumBilerpTexture {
fn evaluate(
&self,
_ctx: &shared::core::texture::TextureEvalContext,
_lambda: &shared::spectra::SampledWavelengths,
) -> shared::spectra::SampledSpectrum {
_ctx: &TextureEvalContext,
_lambda: &SampledWavelengths,
) -> SampledSpectrum {
todo!()
}
}

View file

@ -66,6 +66,10 @@ impl<T> Array2D<T> {
};
Self { device, values }
}
pub fn device(&self) -> &DeviceArray2D<T> {
&self.device
}
}
impl<T: Default + Clone> Array2D<T> {

View file

@ -704,12 +704,12 @@ fn read_spectrum_from_file(filename: &str) -> Result<Spectrum, String> {
pub struct TextureParameterDictionary {
dict: Arc<ParameterDictionary>,
textures: Option<&NamedTextures>,
textures: Option<NamedTextures>,
}
impl TextureParameterDictionary {
pub fn new(dict: Arc<ParameterDictionary>, textures: Option<&NamedTextures>) -> Self {
Self { dict, textures }
Self { dict, textures: textures.cloned() }
}
pub fn get_one_float(&self, name: &str, def: Float) -> Float {

View file

@ -62,7 +62,7 @@ pub trait ParserTarget {
tex_name: &str,
params: &ParsedParameterVector,
loc: FileLoc,
arena: &Arena,
arena: Arc<Arena>,
);
fn material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn make_named_material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
@ -499,7 +499,7 @@ impl ParserTarget for FormattingParserTarget {
tex_name: &str,
_params: &ParsedParameterVector,
_loc: FileLoc,
_arena: &mut Arena,
_arena: Arc<Arena>,
) {
println!(
"{}Texture \"{}\" \"{}\" \"{}\"",
@ -1012,8 +1012,14 @@ impl<'a> SceneParser<'a> {
let type_name = self.expect_quoted_string()?;
let tex_name = self.expect_quoted_string()?;
let params = self.parse_parameters()?;
self.target
.texture(&name, &type_name, &tex_name, &params, token.loc, &arena);
self.target.texture(
&name,
&type_name,
&tex_name,
&params,
token.loc,
arena.clone(),
);
}
_ => {
return Err(ParserError::Generic(