Fixing film creation, arena allocation separation from texture creation, bad function signatures

This commit is contained in:
pingu 2026-01-26 19:44:53 +00:00
parent 640e17110a
commit a32cd60e9f
35 changed files with 726 additions and 560 deletions

View file

@ -39,45 +39,6 @@ pub struct RGBPixel {
rgb_splat: [AtomicFloat; 3], rgb_splat: [AtomicFloat; 3],
} }
// #[cfg(not(target_os = "cuda"))]
// impl RGBFilm {
// pub fn new(
// base: FilmBase,
// colorspace: &RGBColorSpace,
// max_component_value: Float,
// write_fp16: bool,
// ) -> Self {
// let sensor_ptr = base.sensor;
// if sensor_ptr.is_null() {
// panic!("Film must have a sensor");
// }
// let sensor = unsafe { &*sensor_ptr };
// let filter_integral = base.filter.integral();
// let sensor_matrix = sensor.xyz_from_sensor_rgb;
// let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor_matrix;
//
// let width = base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x();
// let height = base.pixel_bounds.p_max.y() - base.pixel_bounds.p_min.y();
// let count = (width * height) as usize;
//
// let mut pixel_vec = Vec::with_capacity(count);
// for _ in 0..count {
// pixel_vec.push(RGBPixel::default());
// }
//
// let pixels_array = Array2D::(base.pixel_bounds);
//
// Self {
// base,
// max_component_value,
// write_fp16,
// filter_integral,
// output_rgbf_from_sensor_rgb,
// pixels: std::sync::Arc::new(pixels_array),
// }
// }
// }
//
impl RGBFilm { impl RGBFilm {
pub fn base(&self) -> &FilmBase { pub fn base(&self) -> &FilmBase {
&self.base &self.base
@ -87,7 +48,7 @@ impl RGBFilm {
&mut self.base &mut self.base
} }
pub fn get_sensor(&self) -> &PixelSensor { pub fn get_sensor(&self) -> &DevicePixelSensor {
#[cfg(not(target_os = "cuda"))] #[cfg(not(target_os = "cuda"))]
{ {
if self.base.sensor.is_null() { if self.base.sensor.is_null() {
@ -96,7 +57,7 @@ impl RGBFilm {
); );
} }
} }
&*self.base.sensor &self.base.sensor
} }
pub fn add_sample( pub fn add_sample(
@ -193,7 +154,7 @@ impl RGBFilm {
#[repr(C)] #[repr(C)]
#[derive(Debug, Default)] #[derive(Debug, Default)]
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))] #[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
struct GBufferPixel { pub struct GBufferPixel {
pub rgb_sum: [AtomicFloat; 3], pub rgb_sum: [AtomicFloat; 3],
pub weight_sum: AtomicFloat, pub weight_sum: AtomicFloat,
pub g_bugger_weight_sum: AtomicFloat, pub g_bugger_weight_sum: AtomicFloat,
@ -213,14 +174,14 @@ struct GBufferPixel {
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))] #[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
pub struct GBufferFilm { pub struct GBufferFilm {
pub base: FilmBase, pub base: FilmBase,
output_from_render: AnimatedTransform, pub output_from_render: AnimatedTransform,
apply_inverse: bool, pub apply_inverse: bool,
pixels: DeviceArray2D<GBufferPixel>, pub pixels: DeviceArray2D<GBufferPixel>,
colorspace: RGBColorSpace, pub colorspace: RGBColorSpace,
max_component_value: Float, pub max_component_value: Float,
write_fp16: bool, pub write_fp16: bool,
filter_integral: Float, pub filter_integral: Float,
output_rgbf_from_sensor_rgb: SquareMatrix<Float, 3>, pub output_rgbf_from_sensor_rgb: SquareMatrix<Float, 3>,
} }
impl GBufferFilm { impl GBufferFilm {
@ -232,7 +193,7 @@ impl GBufferFilm {
&mut self.base &mut self.base
} }
pub fn get_sensor(&self) -> &PixelSensor { pub fn get_sensor(&self) -> &DevicePixelSensor {
#[cfg(not(target_os = "cuda"))] #[cfg(not(target_os = "cuda"))]
{ {
if self.base.sensor.is_null() { if self.base.sensor.is_null() {
@ -241,7 +202,7 @@ impl GBufferFilm {
); );
} }
} }
&*self.base.sensor &self.base.sensor
} }
pub fn add_sample( pub fn add_sample(
@ -387,7 +348,7 @@ impl SpectralFilm {
#[repr(C)] #[repr(C)]
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub struct PixelSensor { pub struct DevicePixelSensor {
pub xyz_from_sensor_rgb: SquareMatrix<Float, 3>, pub xyz_from_sensor_rgb: SquareMatrix<Float, 3>,
pub r_bar: DenselySampledSpectrum, pub r_bar: DenselySampledSpectrum,
pub g_bar: DenselySampledSpectrum, pub g_bar: DenselySampledSpectrum,
@ -395,7 +356,7 @@ pub struct PixelSensor {
pub imaging_ratio: Float, pub imaging_ratio: Float,
} }
impl PixelSensor { impl DevicePixelSensor {
pub fn project_reflectance<T>( pub fn project_reflectance<T>(
refl: &Spectrum, refl: &Spectrum,
illum: &Spectrum, illum: &Spectrum,
@ -429,9 +390,7 @@ impl PixelSensor {
T::from([result[0], result[1], result[2]]) T::from([result[0], result[1], result[2]])
} }
}
impl PixelSensor {
pub fn to_sensor_rgb(&self, l: SampledSpectrum, lambda: &SampledWavelengths) -> RGB { pub fn to_sensor_rgb(&self, l: SampledSpectrum, lambda: &SampledWavelengths) -> RGB {
let l_norm = SampledSpectrum::safe_div(&l, &lambda.pdf()); let l_norm = SampledSpectrum::safe_div(&l, &lambda.pdf());
self.imaging_ratio self.imaging_ratio
@ -477,7 +436,7 @@ pub struct FilmBase {
pub pixel_bounds: Bounds2i, pub pixel_bounds: Bounds2i,
pub filter: Filter, pub filter: Filter,
pub diagonal: Float, pub diagonal: Float,
pub sensor: Ptr<PixelSensor>, pub sensor: Ptr<DevicePixelSensor>,
} }
#[repr(C)] #[repr(C)]

View file

@ -250,6 +250,12 @@ where
} }
} }
impl Bounds2f {
pub fn unit() -> Self {
Self::from_points(Point2f::new(0.0, 0.0), Point2f::new(1.0, 1.0))
}
}
impl Bounds3f { impl Bounds3f {
#[inline(always)] #[inline(always)]
pub fn intersect_p( pub fn intersect_p(

View file

@ -72,7 +72,7 @@ impl PixelFormat {
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub enum Pixels { pub enum Pixels {
U8(Ptr<u8>), U8(Ptr<u8>),
F16(Ptr<u16>), F16(Ptr<f16>),
F32(Ptr<f32>), F32(Ptr<f32>),
} }
@ -175,12 +175,12 @@ impl ImageAccess for DeviceImage {
unsafe { unsafe {
match self.pixels { match self.pixels {
Pixels::U8(ptr) => { Pixels::U8(ptr) => {
let raw_u8 = *ptr.add(offset as usize); let raw_val = *ptr.add(offset as usize);
self.base().encoding.to_linear_scalar(raw_u8) self.base().encoding.to_linear_scalar(raw_val)
} }
Pixels::F16(ptr) => { Pixels::F16(ptr) => {
let half_bits: u16 = *ptr.add(offset as usize); let raw_val = *ptr.add(offset as usize);
f16_to_f32(half_bits) raw_val.to_f32()
} }
Pixels::F32(ptr) => *ptr.add(offset as usize), Pixels::F32(ptr) => *ptr.add(offset as usize),
} }

View file

@ -1,27 +1,27 @@
use crate::core::image::{Image, ImageChannelDesc, ImageChannelValues, ImageMetadata}; use crate::Arena;
use crate::core::image::{Image, ImageChannelDesc, ImageChannelValues, ImageIO, ImageMetadata};
use crate::films::*;
use crate::spectra::{SRGB, data::get_named_spectrum}; use crate::spectra::{SRGB, data::get_named_spectrum};
use anyhow::{Result, anyhow};
use rayon::prelude::IntoParallelIterator;
use shared::Float; use shared::Float;
use shared::core::camera::CameraTransform; use shared::core::camera::CameraTransform;
use shared::core::color::{RGB, XYZ, white_balance}; use shared::core::color::{RGB, XYZ, white_balance};
use shared::core::film::SpectralPixel; use shared::core::film::{
use shared::core::filter::Filter; DevicePixelSensor, Film, FilmBase, GBufferFilm, RGBFilm, SpectralFilm, SpectralPixel,
};
use shared::core::filter::{Filter, FilterTrait};
use shared::core::geometry::{Bounds2f, Bounds2i, Point2f, Point2i}; use shared::core::geometry::{Bounds2f, Bounds2i, Point2f, Point2i};
use shared::core::image::PixelFormat; use shared::core::image::PixelFormat;
use shared::core::spectrum::{Spectrum, StandardSpectra}; use shared::core::spectrum::Spectrum;
use shared::film::{Film, FilmBase, GBufferFilm, PixelSensor, PixelSensor, RGBFilm, SpectralFilm};
use shared::spectra::cie::SWATCHES_RAW;
use shared::spectra::{ use shared::spectra::{
DenselySampledSpectrum, LAMBDA_MAX, LAMBDA_MIN, PiecewiseLinearSpectrum, RGBColorSpace, DenselySampledSpectrum, PiecewiseLinearSpectrum, RGBColorSpace, cie::SWATCHES_RAW,
}; };
use shared::utils::containers::DeviceArray2D;
use shared::utils::math::{SquareMatrix, linear_least_squares}; use shared::utils::math::{SquareMatrix, linear_least_squares};
use shared::utils::{AnimatedTransform, AtomicFloat}; use std::sync::atomic::{AtomicUsize, Ordering};
use std::cmp::Ordering;
use std::path::Path;
use std::sync::atomic::AtomicUsize;
use std::sync::{Arc, LazyLock}; use std::sync::{Arc, LazyLock};
use crate::spectra::DenselySampledSpectrumBuffer; use crate::spectra::{DenselySampledSpectrumBuffer, get_spectra_context};
use crate::utils::{FileLoc, ParameterDictionary}; use crate::utils::{FileLoc, ParameterDictionary};
const N_SWATCH_REFLECTANCES: usize = 24; const N_SWATCH_REFLECTANCES: usize = 24;
@ -33,7 +33,7 @@ const SWATCH_REFLECTANCES: LazyLock<[Spectrum; N_SWATCH_REFLECTANCES]> = LazyLoc
}) })
}); });
pub trait PixelSensorHost { pub trait PixelSensorTrait {
pub fn get_swatches() -> Arc<[Spectrum; N_SWATCH_REFLECTANCES]> { pub fn get_swatches() -> Arc<[Spectrum; N_SWATCH_REFLECTANCES]> {
Arc::new(SWATCH_REFLECTANCES) Arc::new(SWATCH_REFLECTANCES)
} }
@ -65,8 +65,8 @@ pub trait PixelSensorHost {
}; };
if sensor_name == "cie1931" { if sensor_name == "cie1931" {
return Ok(PixelSensor::new_with_white_balance( return Ok(DevicePixelSensor::new_with_white_balance(
output_colorspace, Some(output_colorspace),
sensor_illum, sensor_illum,
imaging_ratio, imaging_ratio,
)); ));
@ -86,12 +86,12 @@ pub trait PixelSensorHost {
let g = Arc::new(g_opt.unwrap()); let g = Arc::new(g_opt.unwrap());
let b = Arc::new(b_opt.unwrap()); let b = Arc::new(b_opt.unwrap());
return PixelSensor::new( return DevicePixelSensor::new(
r, r,
g, g,
b, b,
output_colorspace.clone(), output_colorspace.clone(),
sensor_illum, Some(sensor_illum),
imaging_ratio, imaging_ratio,
) )
.map_err(|e| e.to_string()); .map_err(|e| e.to_string());
@ -103,11 +103,9 @@ pub trait PixelSensorHost {
g: Spectrum, g: Spectrum,
b: Spectrum, b: Spectrum,
output_colorspace: RGBColorSpace, output_colorspace: RGBColorSpace,
sensor_illum: &Spectrum, sensor_illum: Option<&Spectrum>,
imaging_ratio: Float, imaging_ratio: Float,
spectra: *const StandardSpectra, ) -> DevicePixelSensor {
swatches: &[Spectrum; 24],
) -> Self {
// As seen in usages of this constructos, sensor_illum can be null // As seen in usages of this constructos, sensor_illum can be null
// Going with the colorspace's own illuminant, but this might not be the right choice // Going with the colorspace's own illuminant, but this might not be the right choice
// TODO: Test this // TODO: Test this
@ -119,11 +117,11 @@ pub trait PixelSensorHost {
let r_bar = DenselySampledSpectrum::from_spectrum(&r); let r_bar = DenselySampledSpectrum::from_spectrum(&r);
let g_bar = DenselySampledSpectrum::from_spectrum(&g); let g_bar = DenselySampledSpectrum::from_spectrum(&g);
let b_bar = DenselySampledSpectrum::from_spectrum(&b); let b_bar = DenselySampledSpectrum::from_spectrum(&b);
let mut rgb_camera = [[0.; 3]; Self::N_SWATCH_REFLECTANCES]; let mut rgb_camera = [[0.; 3]; N_SWATCH_REFLECTANCES];
let swatches = Self::get_swatches(); let swatches = Self::get_swatches();
for i in 0..Self::N_SWATCH_REFLECTANCES { for i in 0..N_SWATCH_REFLECTANCES {
let rgb = Self::project_reflectance::<RGB>( let rgb = Self::project_reflectance::<RGB>(
&swatches[i], &swatches[i],
illum, illum,
@ -136,10 +134,11 @@ pub trait PixelSensorHost {
} }
} }
let mut xyz_output = [[0.; 3]; Self::N_SWATCH_REFLECTANCES]; let mut xyz_output = [[0.; 3]; N_SWATCH_REFLECTANCES];
let spectra = get_spectra_context();
let sensor_white_g = illum.inner_product(&Spectrum::Dense(g_bar.clone())); let sensor_white_g = illum.inner_product(&Spectrum::Dense(g_bar.clone()));
let sensor_white_y = illum.inner_product(spectra.y); let sensor_white_y = illum.inner_product(spectra.y);
for i in 0..Self::N_SWATCH_REFLECTANCES { for i in 0..N_SWATCH_REFLECTANCES {
let s = swatches[i].clone(); let s = swatches[i].clone();
let xyz = Self::project_reflectance::<XYZ>( let xyz = Self::project_reflectance::<XYZ>(
&s, &s,
@ -153,23 +152,24 @@ pub trait PixelSensorHost {
} }
} }
let xyz_from_sensor_rgb = linear_least_squares(rgb_camera, xyz_output)?; let xyz_from_sensor_rgb = linear_least_squares(rgb_camera, xyz_output)
.expect("Could not convert sensor illuminance to XYZ space");
Ok(Self { DevicePixelSensor {
xyz_from_sensor_rgb, xyz_from_sensor_rgb,
r_bar, r_bar,
g_bar, g_bar,
b_bar, b_bar,
imaging_ratio, imaging_ratio,
}) }
} }
fn new_with_white_balance( fn new_with_white_balance(
output_colorspace: &RGBColorSpace, output_colorspace: &RGBColorSpace,
sensor_illum: &Spectrum, sensor_illum: Option<&Spectrum>,
imaging_ratio: Float, imaging_ratio: Float,
spectra: *const StandardSpectra,
) -> Self { ) -> Self {
let spectra = get_spectra_context();
let r_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.x); let r_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.x);
let g_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.y); let g_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.y);
let b_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.z); let b_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.z);
@ -193,150 +193,22 @@ pub trait PixelSensorHost {
} }
} }
impl PixelSensorHost for PixelSensor {} impl PixelSensorTrait for DevicePixelSensor {}
struct SpectralFilmStorage { pub trait CreateFilmBase {
pixels: DeviceArray2D<SpectralPixel>,
bucket_sums: Vec<f64>,
weight_sums: Vec<f64>,
bucket_splats: Vec<AtomicFloat>,
}
pub struct SpectralFilmHost {
pub view: SpectralFilm,
_storage: Box<SpectralFilmStorage>,
}
impl SpectralFilmHost {
pub fn new(
base: &FilmBase,
lambda_min: Float,
lambda_max: Float,
n_buckets: usize,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
let n_pixels = base.pixel_bounds.area() as usize;
let total_buckets = n_pixels * n_buckets;
let bucket_sums = vec![0.0; total_buckets];
let weight_sums = vec![0.0; total_buckets];
let mut bucket_splats = Vec::with_capacity(total_buckets);
for _ in 0..total_buckets {
bucket_splats.push(AtomicFloat::new(0.0));
}
let mut pixels = DeviceArray2D::<SpectralPixel>::new(base.pixel_bounds);
let p_sums_base = bucket_sums.as_ptr() as *mut f64;
let p_weights_base = weight_sums.as_ptr() as *mut f64;
let p_splats_base = bucket_splats.as_ptr() as *mut AtomicFloat;
for i in 0..n_pixels {
let pixel = pixels.get_linear_mut(i);
pixel.bucket_offset = i * n_buckets;
unsafe {
let offset = i * n_buckets;
pixel.bucket_sums = p_sums_base.add(offset);
pixel.weight_sums = p_weights_base.add(offset);
pixel.bucket_splats = p_splats_base.add(offset);
}
}
let storage = Box::new(SpectralFilmStorage {
pixels,
bucket_sums,
weight_sums,
bucket_splats,
});
let view = SpectralFilm {
base: base.clone(),
colorspace: colorspace.clone(),
lambda_min,
lambda_max,
n_buckets: n_buckets as u32,
max_component_value,
write_fp16,
filter_integral: base.filter.integral(),
output_rgbf_from_sensor_rgb: SquareMatrix::identity(), // Logic omitted
pixels: DeviceArray2D {
values: storage.pixels.as_mut_ptr(),
extent: base.pixel_bounds,
stride: base.pixel_bounds.max.x - base.pixel_bounds.min.x,
},
bucket_sums: storage.bucket_sums.as_ptr() as *mut f64,
weight_sums: storage.weight_sums.as_ptr() as *mut f64,
bucket_splats: storage.bucket_splats.as_ptr() as *mut AtomicFloat,
};
Self {
view,
_storage: storage,
}
}
}
pub struct GBufferFilmHost {
pub device: GBufferFilm,
}
impl GBufferFilmHost {
pub fn new(
base: &FilmBase,
output_from_render: &AnimatedTransform,
apply_inverse: bool,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
assert!(!base.pixel_bounds.is_empty());
let sensor_ptr = base.sensor;
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor.xyz_from_sensor_rgb;
let filter_integral = base.filter.integral();
let pixels = DeviceArray2D::new(base.pixel_bounds);
let device = GBufferFilm {
base: base.clone(),
output_from_render: output_from_render.clone(),
apply_inverse,
pixels,
colorspace: colorspace.clone(),
max_component_value,
write_fp16,
filter_integral,
output_rgbf_from_sensor_rgb,
};
Self { device }
}
}
pub trait FilmBaseHost {
fn create( fn create(
params: &ParameterDictionary, params: &ParameterDictionary,
filter: Filter, filter: Filter,
sensor: Option<&PixelSensor>, sensor: Option<&DevicePixelSensor>,
loc: &FileLoc, loc: &FileLoc,
) -> Self; ) -> Self;
} }
impl FilmBaseHost for FilmBase { impl CreateFilmBase for FilmBase {
fn create( fn create(
params: &ParameterDictionary, params: &ParameterDictionary,
filter: Filter, filter: Filter,
sensor: Option<&PixelSensor>, sensor: Option<&DevicePixelSensor>,
loc: &FileLoc, loc: &FileLoc,
) -> Self { ) -> Self {
let x_res = params.get_one_int("xresolution", 1280); let x_res = params.get_one_int("xresolution", 1280);
@ -395,7 +267,7 @@ impl FilmBaseHost for FilmBase {
pub trait FilmTrait: Sync { pub trait FilmTrait: Sync {
fn base(&self) -> &FilmBase; fn base(&self) -> &FilmBase;
fn get_pixel_rgb(&self, p: Point2i, splat_scale: Option<Float>) -> RGB; fn get_pixel_rgb(&self, p: Point2i, splat_scale: Option<Float>) -> RGB;
fn get_filename(&self) -> &str; // fn get_filename(&self) -> &str;
fn write_image(&self, metadata: &ImageMetadata, splat_scale: Float) { fn write_image(&self, metadata: &ImageMetadata, splat_scale: Float) {
let image = self.get_image(metadata, splat_scale); let image = self.get_image(metadata, splat_scale);
image image
@ -495,13 +367,13 @@ impl FilmTrait for Film {
} }
} }
fn get_filename(&self) -> &str { // fn get_filename(&self) -> &str {
match self { // match self {
Film::RGB(f) => &f.filename, // Film::RGB(f) => &f.base().filename,
Film::GBuffer(f) => &f.filename, // Film::GBuffer(f) => &f.base().filename,
Film::Spectral(f) => &f.filename, // Film::Spectral(f) => &f.base().filename,
} // }
} // }
} }
pub trait FilmFactory { pub trait FilmFactory {
@ -512,7 +384,8 @@ pub trait FilmFactory {
filter: Filter, filter: Filter,
_camera_transform: Option<CameraTransform>, _camera_transform: Option<CameraTransform>,
loc: &FileLoc, loc: &FileLoc,
) -> Result<Self, String>; arena: &mut Arena,
) -> Result<Self>;
} }
impl FilmFactory for Film { impl FilmFactory for Film {
@ -523,104 +396,37 @@ impl FilmFactory for Film {
filter: Filter, filter: Filter,
camera_transform: Option<CameraTransform>, camera_transform: Option<CameraTransform>,
loc: &FileLoc, loc: &FileLoc,
) -> Result<Self, String> { arena: &mut Arena,
) -> Result<Self> {
match name { match name {
"rgb" => { "gbuffer" => GBufferFilm::create(
let colorspace = params.color_space.as_ref().unwrap(); name,
let max_component_value = params,
params.get_one_float("maxcomponentvalue", Float::INFINITY); exposure_time,
let write_fp16 = params.get_one_bool("savefp16", true); filter,
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?; camera_transform,
let film_base = FilmBase::create(params, filter, Some(sensor), loc); loc,
Ok(RGBFilm::new( arena,
film_base, ),
&colorspace, "rgb" => RGBFilm::create(
max_component_value, name,
write_fp16, params,
)) exposure_time,
} filter,
"gbuffer" => { camera_transform,
let colorspace = params.color_space.as_ref().unwrap(); loc,
let max_component_value = arena,
params.get_one_float("maxcomponentvalue", Float::INFINITY); ),
let write_fp16 = params.get_one_bool("savefp16", true); "spectral" => SpectralFilm::create(
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?; name,
let film_base = FilmBase::create(params, filter, Some(sensor), loc); params,
exposure_time,
let filename = params.get_one_string("filename", "pbrt.exr"); filter,
if Path::new(&filename).extension() != Some("exr".as_ref()) { camera_transform,
return Err(format!( loc,
"{}: EXR is the only format supported by GBufferFilm", arena,
loc ),
) _ => Err(anyhow!("Film type '{}' unknown at {}", name, loc)),
.into());
}
let coords_system = params.get_one_string("coordinatesystem", "camera");
let mut apply_inverse = false;
let camera_transform = camera_transform
.ok_or_else(|| "GBufferFilm requires a camera_transform".to_string())?;
let output_from_render = if coords_system == "camera" {
apply_inverse = true;
camera_transform.render_from_camera
} else if coords_system == "world" {
AnimatedTransform::from_transform(&camera_transform.world_from_render)
} else {
return Err(format!(
"{}: unknown coordinate system for GBufferFilm. (Expecting camera
or world",
loc
)
.into());
};
Ok(GBufferFilm::new(
&film_base,
&output_from_render,
apply_inverse,
colorspace,
max_component_value,
write_fp16,
))
}
"spectral" => {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value =
params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(format!(
"{}: EXR is the only format supported by GBufferFilm",
loc
)
.into());
}
let n_buckets = params.get_one_int("nbuckets", 16) as usize;
let lambda_min = params.get_one_float("lambdamin", LAMBDA_MIN as Float);
let lambda_max = params.get_one_float("lambdamin", LAMBDA_MAX as Float);
if lambda_min < LAMBDA_MIN as Float && lambda_max > LAMBDA_MAX as Float {
return Err(format!(
"{}: PBRT must be recompiled with different values of LAMBDA_MIN and LAMBDA_MAX",
loc
));
}
Ok(SpectralFilm::new(
&film_base,
lambda_min,
lambda_max,
n_buckets,
colorspace,
max_component_value,
write_fp16,
))
}
_ => Err(format!("Film type '{}' unknown at {}", name, loc)),
} }
} }
} }

View file

@ -1,10 +1,10 @@
use crate::filters::*;
use crate::utils::sampling::PiecewiseConstant2D; use crate::utils::sampling::PiecewiseConstant2D;
use crate::utils::{FileLoc, ParameterDictionary}; use crate::utils::{FileLoc, ParameterDictionary};
use shared::Float; use shared::Float;
use shared::core::filter::{Filter, FilterSampler}; use shared::core::filter::{Filter, FilterSampler};
use shared::core::geometry::{Bounds2f, Point2f, Vector2f}; use shared::core::geometry::{Bounds2f, Point2f, Vector2f};
use shared::filters::*; use shared::filters::*;
use shared::utils::containers::DeviceArray2D;
pub trait FilterFactory { pub trait FilterFactory {
fn create(name: &str, params: &ParameterDictionary, loc: &FileLoc) -> Result<Filter, String>; fn create(name: &str, params: &ParameterDictionary, loc: &FileLoc) -> Result<Filter, String>;
@ -71,7 +71,7 @@ impl CreateFilterSampler for FilterSampler {
let nx = (32.0 * radius.x()) as usize; let nx = (32.0 * radius.x()) as usize;
let ny = (32.0 * radius.y()) as usize; let ny = (32.0 * radius.y()) as usize;
let mut f = DeviceArray2D::new_with_dims(nx, ny); let mut f = Array2D::new_with_dims(nx, ny);
for y in 0..f.y_size() { for y in 0..f.y_size() {
for x in 0..f.x_size() { for x in 0..f.x_size() {
let p = domain.lerp(Point2f::new( let p = domain.lerp(Point2f::new(
@ -82,6 +82,10 @@ impl CreateFilterSampler for FilterSampler {
} }
} }
let distrib = PiecewiseConstant2D::new_with_bounds(&f, domain); let distrib = PiecewiseConstant2D::new_with_bounds(&f, domain);
Self { domain, f, distrib } Self {
domain,
f,
distrib: distrib.device,
}
} }
} }

View file

@ -1,14 +1,13 @@
use super::{Image, ImageAndMetadata, ImageMetadata}; use super::{Image, ImageAndMetadata, ImageMetadata};
use crate::core::image::{PixelStorage, WrapMode}; use crate::core::image::{PixelStorage, WrapMode};
use crate::utils::error::ImageError; use crate::utils::error::ImageError;
use anyhow::Error;
use anyhow::{Context, Result, bail}; use anyhow::{Context, Result, bail};
use exr::prelude::{read_first_rgba_layer_from_file, write_rgba_file}; use exr::prelude::{read_first_rgba_layer_from_file, write_rgba_file};
use image_rs::{DynamicImage, ImageReader}; use image_rs::{DynamicImage, ImageReader};
use shared::Float; use shared::Float;
use shared::core::color::{ColorEncoding, LINEAR}; use shared::core::color::{ColorEncoding, LINEAR, SRGB};
use shared::core::geometry::Point2i; use shared::core::geometry::Point2i;
use shared::core::image::{DeviceImage, ImageBase, PixelFormat}; use shared::core::image::{PixelFormat};
use std::fs::File; use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Read, Write}; use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::path::Path; use std::path::Path;
@ -38,7 +37,7 @@ impl ImageIO for Image {
} }
} }
fn write(&self, filename: &str, metadata: &ImageMetadata) -> Result<(), Error> { fn write(&self, filename: &str, metadata: &ImageMetadata) -> Result<()> {
let path = Path::new(filename); let path = Path::new(filename);
let ext = path.extension().and_then(|s| s.to_str()).unwrap_or(""); let ext = path.extension().and_then(|s| s.to_str()).unwrap_or("");
let res = match ext.to_lowercase().as_str() { let res = match ext.to_lowercase().as_str() {
@ -48,12 +47,13 @@ impl ImageIO for Image {
"qoi" => self.write_qoi(path), "qoi" => self.write_qoi(path),
_ => Err(anyhow::anyhow!("Unsupported write format: {}", ext)), _ => Err(anyhow::anyhow!("Unsupported write format: {}", ext)),
}; };
res.map_err(|e| ImageError::Io(std::io::Error::other(e))) res.map_err(|e| ImageError::Io(std::io::Error::other(e)))?;
Ok(())
} }
fn write_png(&self, path: &Path) -> Result<()> { fn write_png(&self, path: &Path) -> Result<()> {
let w = self.resolution.x() as u32; let w = self.resolution().x() as u32;
let h = self.resolution.y() as u32; let h = self.resolution().y() as u32;
// Convert whatever we have to u8 [0..255] // Convert whatever we have to u8 [0..255]
let data = self.to_u8_buffer(); let data = self.to_u8_buffer();
@ -99,8 +99,8 @@ impl ImageIO for Image {
} }
fn write_qoi(&self, path: &Path) -> Result<()> { fn write_qoi(&self, path: &Path) -> Result<()> {
let w = self.resolution.x() as u32; let w = self.resolution().x() as u32;
let h = self.resolution.y() as u32; let h = self.resolution().y() as u32;
let data = self.to_u8_buffer(); let data = self.to_u8_buffer();
let color_type = match self.n_channels() { let color_type = match self.n_channels() {
@ -122,8 +122,8 @@ impl ImageIO for Image {
fn write_exr(&self, path: &Path, _metadata: &ImageMetadata) -> Result<()> { fn write_exr(&self, path: &Path, _metadata: &ImageMetadata) -> Result<()> {
// EXR requires F32 // EXR requires F32
let w = self.resolution.x() as usize; let w = self.resolution().x() as usize;
let h = self.resolution.y() as usize; let h = self.resolution().y() as usize;
let c = self.n_channels(); let c = self.n_channels();
write_rgba_file(path, w, h, |x, y| { write_rgba_file(path, w, h, |x, y| {
@ -159,8 +159,9 @@ impl ImageIO for Image {
} }
// Header // Header
let res = self.resolution();
writeln!(writer, "PF")?; writeln!(writer, "PF")?;
writeln!(writer, "{} {}", self.resolution.x(), self.resolution.y())?; writeln!(writer, "{} {}", res.x(), res.y())?;
let scale = if cfg!(target_endian = "little") { let scale = if cfg!(target_endian = "little") {
-1.0 -1.0
} else { } else {
@ -169,8 +170,8 @@ impl ImageIO for Image {
writeln!(writer, "{}", scale)?; writeln!(writer, "{}", scale)?;
// PBRT stores top-to-bottom. // PBRT stores top-to-bottom.
for y in (0..self.resolution.y()).rev() { for y in (0..res.y()).rev() {
for x in 0..self.resolution.x() { for x in 0..res.x() {
for c in 0..3 { for c in 0..3 {
let val = let val =
self.get_channel_with_wrap(Point2i::new(x, y), c, WrapMode::Clamp.into()); self.get_channel_with_wrap(Point2i::new(x, y), c, WrapMode::Clamp.into());
@ -182,9 +183,11 @@ impl ImageIO for Image {
Ok(()) Ok(())
} }
// TODO: Change Image to use Vec for data, always. Only convert to Device types on
// constructors/creation
fn to_u8_buffer(&self) -> Vec<u8> { fn to_u8_buffer(&self) -> Vec<u8> {
match &self.pixels { match &self.pixels {
PixelStorage::U8(data) => data.clone(), PixelStorage::U8(data) => data,
PixelStorage::F16(data) => data PixelStorage::F16(data) => data
.iter() .iter()
.map(|v| (v.to_f32().clamp(0.0, 1.0) * 255.0 + 0.5) as u8) .map(|v| (v.to_f32().clamp(0.0, 1.0) * 255.0 + 0.5) as u8)
@ -207,27 +210,20 @@ fn read_generic(path: &Path, encoding: Option<ColorEncoding>) -> Result<ImageAnd
let res = Point2i::new(w, h); let res = Point2i::new(w, h);
// Check if it was loaded as high precision or standard // Check if it was loaded as high precision or standard
let rgb_names = || vec!["R".to_string(), "G".to_string(), "B".to_string()]; let rgb_names = vec!["R", "G", "B"];
let rgba_names = || { let rgba_names = vec!["R", "G", "B", "A"];
vec![
"R".to_string(),
"G".to_string(),
"B".to_string(),
"A".to_string(),
]
};
let image = match dyn_img { let image = match dyn_img {
DynamicImage::ImageRgb32F(buf) => Image::from_f32(buf.into_raw(), res, rgb_names()), DynamicImage::ImageRgb32F(buf) => Image::from_f32(buf.into_raw(), res, &rgb_names),
DynamicImage::ImageRgba32F(buf) => Image::from_f32(buf.into_raw(), res, rgba_names()), DynamicImage::ImageRgba32F(buf) => Image::from_f32(buf.into_raw(), res, &rgba_names),
_ => { _ => {
// Default to RGB8 for everything else // Default to RGB8 for everything else
let enc = encoding.unwrap_or(ColorEncoding::sRGB); let enc = encoding.unwrap_or(SRGB);
if dyn_img.color().has_alpha() { if dyn_img.color().has_alpha() {
let buf = dyn_img.to_rgba8(); let buf = dyn_img.to_rgba8();
Image::from_u8(buf.into_raw(), res, rgba_names(), enc) Image::from_u8(buf.into_raw(), res, &rgba_names, enc)
} else { } else {
let buf = dyn_img.to_rgb8(); let buf = dyn_img.to_rgb8();
Image::from_u8(buf.into_raw(), res, rgb_names(), enc) Image::from_u8(buf.into_raw(), res, &rgb_names, enc)
} }
} }
}; };
@ -258,13 +254,11 @@ fn read_exr(path: &Path) -> Result<ImageAndMetadata> {
let w = image.layer_data.size.width() as i32; let w = image.layer_data.size.width() as i32;
let h = image.layer_data.size.height() as i32; let h = image.layer_data.size.height() as i32;
let image = Image { let image = Image::from_f32(
format: PixelFormat::F32, image.layer_data.channel_data.pixels,
resolution: Point2i::new(w, h), Point2i::new(w, h),
channel_names: vec!["R".into(), "G".into(), "B".into(), "A".into()], &vec!["R", "G", "B", "A"],
encoding: LINEAR, );
pixels: PixelStorage::F32(image.layer_data.channel_data.pixels),
};
let metadata = ImageMetadata::default(); let metadata = ImageMetadata::default();
Ok(ImageAndMetadata { image, metadata }) Ok(ImageAndMetadata { image, metadata })
@ -338,12 +332,12 @@ fn read_pfm(path: &Path) -> Result<ImageAndMetadata> {
} }
let names = if channels == 1 { let names = if channels == 1 {
vec!["Y".into()] vec!["Y"]
} else { } else {
vec!["R".into(), "G".into(), "B".into()] vec!["R", "G", "B"]
}; };
let image = Image::new(PixelFormat::F32, Point2i::new(w, h), names, LINEAR); let image = Image::new(PixelFormat::F32, Point2i::new(w, h), &names, LINEAR.into());
let metadata = ImageMetadata::default(); let metadata = ImageMetadata::default();
Ok(ImageAndMetadata { image, metadata }) Ok(ImageAndMetadata { image, metadata })

View file

@ -1,7 +1,7 @@
use crate::utils::containers::Array2D; use crate::utils::containers::Array2D;
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
use half::f16; use half::f16;
use rayon::prelude::ParallelIterator; use rayon::prelude::{IndexedParallelIterator, ParallelIterator, ParallelSliceMut};
use shared::Float; use shared::Float;
use shared::core::color::{ColorEncoding, ColorEncodingTrait, LINEAR}; use shared::core::color::{ColorEncoding, ColorEncodingTrait, LINEAR};
use shared::core::geometry::{Bounds2f, Point2f, Point2i}; use shared::core::geometry::{Bounds2f, Point2f, Point2i};
@ -78,7 +78,7 @@ impl PixelStorage {
pub fn as_pixels(&self) -> Pixels { pub fn as_pixels(&self) -> Pixels {
match self { match self {
PixelStorage::U8(data) => Pixels::U8(data.as_ptr().into()), PixelStorage::U8(data) => Pixels::U8(data.as_ptr().into()),
PixelStorage::F16(data) => Pixels::F16((data.as_ptr() as *const u16).into()), PixelStorage::F16(data) => Pixels::F16((data.as_ptr() as *const f16).into()),
PixelStorage::F32(data) => Pixels::F32(data.as_ptr().into()), PixelStorage::F32(data) => Pixels::F32(data.as_ptr().into()),
} }
} }
@ -145,7 +145,7 @@ impl Image {
Self { Self {
storage, storage,
channel_names: String::from(channel_names), channel_names,
device, device,
} }
} }
@ -212,7 +212,6 @@ impl Image {
) -> Self { ) -> Self {
let n_channels = channel_names.len(); let n_channels = channel_names.len();
let pixel_count = (resolution.x() * resolution.y()) as usize * n_channels; let pixel_count = (resolution.x() * resolution.y()) as usize * n_channels;
let owned_names: Vec<String> = channel_names.iter().map(|s| s.to_string()).collect();
let storage = match format { let storage = match format {
PixelFormat::U8 => PixelStorage::U8(vec![0; pixel_count].into()), PixelFormat::U8 => PixelStorage::U8(vec![0; pixel_count].into()),
@ -220,7 +219,7 @@ impl Image {
PixelFormat::F32 => PixelStorage::F32(vec![0.0; pixel_count].into()), PixelFormat::F32 => PixelStorage::F32(vec![0.0; pixel_count].into()),
}; };
Self::from_storage(storage, resolution, owned_names, encoding) Self::from_storage(storage, resolution, channel_names, *encoding)
} }
pub fn new_constant( pub fn new_constant(
@ -245,9 +244,7 @@ impl Image {
data.extend_from_slice(values); data.extend_from_slice(values);
} }
let owned_names: Vec<String> = channel_names.iter().map(|s| s.to_string()).collect(); Self::from_f32(data, resolution, channel_names)
Self::from_f32(data, resolution, owned_names)
} }
// Access // Access
@ -585,7 +582,7 @@ impl Image {
pub fn update_view_pointers(&mut self) { pub fn update_view_pointers(&mut self) {
self.device.pixels = match &self.storage { self.device.pixels = match &self.storage {
PixelStorage::U8(vec) => Pixels::U8(vec.as_ptr().into()), PixelStorage::U8(vec) => Pixels::U8(vec.as_ptr().into()),
PixelStorage::F16(vec) => Pixels::F16((vec.as_ptr() as *const u16).into()), PixelStorage::F16(vec) => Pixels::F16((vec.as_ptr() as *const f16).into()),
PixelStorage::F32(vec) => Pixels::F32(vec.as_ptr().into()), PixelStorage::F32(vec) => Pixels::F32(vec.as_ptr().into()),
}; };
} }

View file

@ -1,10 +1,11 @@
use super::Image; use super::Image;
use crate::core::image::pixel::PixelStorage; use crate::core::image::PixelStorage;
use crate::core::image::pixel::PixelStorageTrait;
use rayon::prelude::*; use rayon::prelude::*;
use shared::Float; use shared::Float;
use shared::core::color::ColorEncoding; use shared::core::color::ColorEncoding;
use shared::core::geometry::{Bounds2i, Point2i}; use shared::core::geometry::{Bounds2i, Point2i};
use shared::core::image::{PixelFormat, Pixels, WrapMode, WrapMode2D}; use shared::core::image::{PixelFormat, WrapMode, WrapMode2D};
use shared::utils::Ptr; use shared::utils::Ptr;
use shared::utils::math::windowed_sinc; use shared::utils::math::windowed_sinc;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
@ -18,35 +19,41 @@ pub struct ResampleWeight {
impl Image { impl Image {
pub fn flip_y(&mut self) { pub fn flip_y(&mut self) {
let res = self.resolution(); let res = self.resolution();
let nc = self.n_channels(); let nc = self.n_channels() as usize;
match &mut self.pixels { match &mut self.pixels {
Pixels::U8(d) => flip_y_kernel(d, res, nc), PixelStorage::U8(d) => flip_y_kernel(d, res, nc),
Pixels::F16(d) => flip_y_kernel(d, res, nc), PixelStorage::F16(d) => flip_y_kernel(d, res, nc),
Pixels::F32(d) => flip_y_kernel(d, res, nc), PixelStorage::F32(d) => flip_y_kernel(d, res, nc),
} }
} }
pub fn crop(&self, bounds: Bounds2i) -> Image { pub fn crop(&self, bounds: Bounds2i) -> Image {
let res = self.resolution(); let res = self.resolution();
let n_channels = self.n_channels(); let n_channels = self.n_channels() as usize;
let new_res = Point2i::new( let new_res = Point2i::new(
bounds.p_max.x() - bounds.p_min.x(), bounds.p_max.x() - bounds.p_min.x(),
bounds.p_max.y() - bounds.p_min.y(), bounds.p_max.y() - bounds.p_min.y(),
); );
let mut new_image = Image::from_vector( let mut new_image = Image::new(
self.format, self.format(),
new_res, new_res,
self.channel_names.clone(), &self.channel_names,
self.encoding(), self.encoding().into(),
); );
match (&self.pixels, &mut new_image.pixels) { match (&self.pixels, &mut new_image.pixels) {
(Pixels::U8(src), Pixels::U8(dst)) => crop_kernel(src, dst, res, bounds, n_channels), (PixelStorage::U8(src), PixelStorage::U8(dst)) => {
(Pixels::F16(src), Pixels::F16(dst)) => crop_kernel(src, dst, res, bounds, n_channels), crop_kernel(src, dst, res, bounds, n_channels)
(Pixels::F32(src), Pixels::F32(dst)) => crop_kernel(src, dst, res, bounds, n_channels), }
(PixelStorage::F16(src), PixelStorage::F16(dst)) => {
crop_kernel(src, dst, res, bounds, n_channels)
}
(PixelStorage::F32(src), PixelStorage::F32(dst)) => {
crop_kernel(src, dst, res, bounds, n_channels)
}
_ => panic!("Format mismatch in crop"), _ => panic!("Format mismatch in crop"),
} }
@ -55,9 +62,9 @@ impl Image {
pub fn copy_rect_out(&self, extent: Bounds2i, buf: &mut [Float], wrap: WrapMode2D) { pub fn copy_rect_out(&self, extent: Bounds2i, buf: &mut [Float], wrap: WrapMode2D) {
match &self.pixels { match &self.pixels {
Pixels::U8(d) => copy_rect_out_kernel(d, self, extent, buf, wrap), PixelStorage::U8(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
Pixels::F16(d) => copy_rect_out_kernel(d, self, extent, buf, wrap), PixelStorage::F16(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
Pixels::F32(d) => copy_rect_out_kernel(d, self, extent, buf, wrap), PixelStorage::F32(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
} }
} }
@ -67,9 +74,9 @@ impl Image {
let encoding = self.encoding(); let encoding = self.encoding();
match &mut self.pixels { match &mut self.pixels {
Pixels::U8(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf), PixelStorage::U8(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
Pixels::F16(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf), PixelStorage::F16(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
Pixels::F32(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf), PixelStorage::F32(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
} }
} }
@ -81,11 +88,11 @@ impl Image {
"ResizeUp requires Float format" "ResizeUp requires Float format"
); );
let resampled_image = Arc::new(Mutex::new(Image::from_vector( let resampled_image = Arc::new(Mutex::new(Image::new(
PixelFormat::F32, // Force float output PixelFormat::F32,
new_res, new_res,
self.channel_names.clone(), &self.channel_names,
self.encoding(), self.encoding().into(),
))); )));
let x_weights = resample_weights(res.x() as usize, new_res.x() as usize); let x_weights = resample_weights(res.x() as usize, new_res.x() as usize);
@ -104,7 +111,7 @@ impl Image {
let in_extent = let in_extent =
Bounds2i::from_points(Point2i::new(x_start, y_start), Point2i::new(x_end, y_end)); Bounds2i::from_points(Point2i::new(x_start, y_start), Point2i::new(x_end, y_end));
let mut in_buf = vec![0.0; in_extent.area() as usize * n_channels]; let mut in_buf = vec![0.0; in_extent.area() as usize * n_channels as usize];
self.copy_rect_out(in_extent, &mut in_buf, wrap_mode); self.copy_rect_out(in_extent, &mut in_buf, wrap_mode);
let out_buf = compute_resize_tile( let out_buf = compute_resize_tile(
@ -143,14 +150,14 @@ impl Image {
let mut next = Image::new( let mut next = Image::new(
prev.format(), prev.format(),
new_res, new_res,
prev.channel_names.clone(), &prev.channel_names,
prev.encoding(), prev.encoding().into(),
); );
match &mut next.pixels { match &mut next.pixels {
Pixels::U8(d) => downsample_kernel(d, new_res, prev, internal_wrap), PixelStorage::U8(d) => downsample_kernel(d, new_res, prev, internal_wrap),
Pixels::F16(d) => downsample_kernel(d, new_res, prev, internal_wrap), PixelStorage::F16(d) => downsample_kernel(d, new_res, prev, internal_wrap),
Pixels::F32(d) => downsample_kernel(d, new_res, prev, internal_wrap), PixelStorage::F32(d) => downsample_kernel(d, new_res, prev, internal_wrap),
} }
levels.push(next); levels.push(next);
} }
@ -158,7 +165,7 @@ impl Image {
} }
} }
fn flip_y_kernel<T: PixelStorage>(pixels: &mut [T], res: Point2i, channels: usize) { fn flip_y_kernel<T: PixelStorageTrait>(pixels: &mut [T], res: Point2i, channels: usize) {
let w = res.x() as usize; let w = res.x() as usize;
let h = res.y() as usize; let h = res.y() as usize;
let stride = w * channels; let stride = w * channels;
@ -171,7 +178,7 @@ fn flip_y_kernel<T: PixelStorage>(pixels: &mut [T], res: Point2i, channels: usiz
} }
} }
fn crop_kernel<T: PixelStorage>( fn crop_kernel<T: PixelStorageTrait>(
src: &[T], src: &[T],
dst: &mut [T], dst: &mut [T],
src_res: Point2i, src_res: Point2i,
@ -194,7 +201,7 @@ fn crop_kernel<T: PixelStorage>(
}); });
} }
fn copy_rect_out_kernel<T: PixelStorage>( fn copy_rect_out_kernel<T: PixelStorageTrait>(
src: &[T], src: &[T],
image: &Image, image: &Image,
extent: Bounds2i, extent: Bounds2i,
@ -223,14 +230,15 @@ fn copy_rect_out_kernel<T: PixelStorage>(
// We fall back to get_channel which handles the wrapping math. // We fall back to get_channel which handles the wrapping math.
let p = Point2i::new(x, y); let p = Point2i::new(x, y);
for c in 0..channels { for c in 0..channels {
row_buf[x_rel * channels + c] = image.get_channel_with_wrap(p, c, wrap); row_buf[x_rel * channels + c] =
image.get_channel_with_wrap(p, c.try_into().unwrap(), wrap);
} }
} }
} }
}); });
} }
fn copy_rect_in_kernel<T: PixelStorage>( fn copy_rect_in_kernel<T: PixelStorageTrait>(
dst: &mut [T], dst: &mut [T],
res: Point2i, res: Point2i,
channels: usize, channels: usize,
@ -263,7 +271,7 @@ fn copy_rect_in_kernel<T: PixelStorage>(
} }
} }
fn downsample_kernel<T: PixelStorage>( fn downsample_kernel<T: PixelStorageTrait>(
dst: &mut Ptr<T>, dst: &mut Ptr<T>,
dst_res: Point2i, dst_res: Point2i,
prev: &Image, prev: &Image,

View file

@ -3,12 +3,12 @@ use shared::Float;
use shared::core::color::{ColorEncoding, ColorEncodingTrait}; use shared::core::color::{ColorEncoding, ColorEncodingTrait};
// Allows writing generic algorithms that work on any image format. // Allows writing generic algorithms that work on any image format.
pub trait PixelStorage: Copy + Send + Sync + 'static + PartialEq { pub trait PixelStorageTrait: Copy + Send + Sync + 'static + PartialEq {
fn from_linear(val: Float, encoding: ColorEncoding) -> Self; fn from_linear(val: Float, encoding: ColorEncoding) -> Self;
fn to_linear(self, encoding: ColorEncoding) -> Float; fn to_linear(self, encoding: ColorEncoding) -> Float;
} }
impl PixelStorage for f32 { impl PixelStorageTrait for f32 {
#[inline(always)] #[inline(always)]
fn from_linear(val: Float, _enc: ColorEncoding) -> Self { fn from_linear(val: Float, _enc: ColorEncoding) -> Self {
val val
@ -19,7 +19,7 @@ impl PixelStorage for f32 {
} }
} }
impl PixelStorage for f16 { impl PixelStorageTrait for f16 {
#[inline(always)] #[inline(always)]
fn from_linear(val: Float, _enc: ColorEncoding) -> Self { fn from_linear(val: Float, _enc: ColorEncoding) -> Self {
f16::from_f32(val) f16::from_f32(val)
@ -30,7 +30,7 @@ impl PixelStorage for f16 {
} }
} }
impl PixelStorage for u8 { impl PixelStorageTrait for u8 {
#[inline(always)] #[inline(always)]
fn from_linear(val: Float, enc: ColorEncoding) -> Self { fn from_linear(val: Float, enc: ColorEncoding) -> Self {
let mut out = [0u8]; let mut out = [0u8];

View file

@ -23,7 +23,7 @@ pub trait MaterialFactory {
name: &str, name: &str,
params: &TextureParameterDictionary, params: &TextureParameterDictionary,
normal_map: Option<Arc<Image>>, normal_map: Option<Arc<Image>>,
named_materials: Arc<HashMap<String, Material>>, named_materials: &HashMap<String, Material>,
loc: FileLoc, loc: FileLoc,
arena: &mut Arena, arena: &mut Arena,
) -> Result<Self>; ) -> Result<Self>;
@ -34,52 +34,48 @@ impl MaterialFactory for Material {
name: &str, name: &str,
parameters: &TextureParameterDictionary, parameters: &TextureParameterDictionary,
normal_map: Option<Arc<Image>>, normal_map: Option<Arc<Image>>,
named_materials: Arc<HashMap<String, Material>>, named_materials: &HashMap<String, Material>,
loc: FileLoc, loc: FileLoc,
arena: &mut Arena, arena: &mut Arena,
) -> Result<Material> { ) -> Result<Material> {
match name { match name {
"diffuse" => { "diffuse" => {
DiffuseMaterial::create(parameters, normal_map, &named_materials, &loc, arena) DiffuseMaterial::create(parameters, normal_map, named_materials, &loc, arena)
} }
"coateddiffuse" => { "coateddiffuse" => {
CoatedDiffuseMaterial::create(parameters, normal_map, &named_materials, &loc, arena) CoatedDiffuseMaterial::create(parameters, normal_map, named_materials, &loc, arena)
} }
"coatedconductor" => CoatedConductorMaterial::create( "coatedconductor" => CoatedConductorMaterial::create(
parameters, parameters,
normal_map, normal_map,
&named_materials, named_materials,
&loc, &loc,
arena, arena,
), ),
"diffusetransmission" => DiffuseTransmissionMaterial::create( "diffusetransmission" => DiffuseTransmissionMaterial::create(
parameters, parameters,
normal_map, normal_map,
&named_materials, named_materials,
&loc, &loc,
arena, arena,
), ),
"dielectric" => { "dielectric" => {
DielectricMaterial::create(parameters, normal_map, &named_materials, &loc, arena) DielectricMaterial::create(parameters, normal_map, named_materials, &loc, arena)
} }
"thindielectric" => ThinDielectricMaterial::create( "thindielectric" => {
parameters, ThinDielectricMaterial::create(parameters, normal_map, named_materials, &loc, arena)
normal_map, }
&named_materials, "hair" => HairMaterial::create(parameters, normal_map, named_materials, &loc, arena),
&loc,
arena,
),
"hair" => HairMaterial::create(parameters, normal_map, &named_materials, &loc, arena),
"conductor" => { "conductor" => {
ConductorMaterial::create(parameters, normal_map, &named_materials, &loc, arena) ConductorMaterial::create(parameters, normal_map, named_materials, &loc, arena)
} }
"measured" => { "measured" => {
MeasuredMaterial::create(parameters, normal_map, &named_materials, &loc, arena) MeasuredMaterial::create(parameters, normal_map, named_materials, &loc, arena)
} }
"subsurface" => { "subsurface" => {
SubsurfaceMaterial::create(parameters, normal_map, &named_materials, &loc, arena) SubsurfaceMaterial::create(parameters, normal_map, named_materials, &loc, arena)
} }
"mix" => MixMaterial::create(parameters, normal_map, &named_materials, &loc, arena), "mix" => MixMaterial::create(parameters, normal_map, named_materials, &loc, arena),
_ => Err(anyhow!("Material type '{}' unknown at {}", $name, $loc)), _ => Err(anyhow!("Material type '{}' unknown at {}", $name, $loc)),
} }

View file

@ -1,6 +1,7 @@
use super::BasicScene; use super::BasicScene;
use super::entities::*; use super::entities::*;
use crate::spectra::get_colorspace_context; use crate::Arena;
use crate::spectra::get_colorspace_device;
use crate::utils::error::FileLoc; use crate::utils::error::FileLoc;
use crate::utils::normalize_utf8; use crate::utils::normalize_utf8;
use crate::utils::parameters::error_exit; use crate::utils::parameters::error_exit;
@ -188,7 +189,7 @@ impl ParserTarget for BasicSceneBuilder {
} }
fn color_space(&mut self, name: &str, loc: FileLoc) { fn color_space(&mut self, name: &str, loc: FileLoc) {
let stdcs = get_colorspace_context(); let stdcs = get_colorspace_device();
let _ = match stdcs.get_named(name) { let _ = match stdcs.get_named(name) {
Ok(cs) => { Ok(cs) => {
self.graphics_state.color_space = Some(cs); self.graphics_state.color_space = Some(cs);
@ -427,7 +428,7 @@ impl ParserTarget for BasicSceneBuilder {
}) })
} }
fn world_begin(&mut self, loc: FileLoc) { fn world_begin(&mut self, loc: FileLoc, arena: &mut Arena) {
self.verify_options("WorldBegin", &loc); self.verify_options("WorldBegin", &loc);
self.current_block = BlockState::WorldBlock; self.current_block = BlockState::WorldBlock;
for i in 0..MAX_TRANSFORMS { for i in 0..MAX_TRANSFORMS {
@ -456,6 +457,7 @@ impl ParserTarget for BasicSceneBuilder {
self.current_accelerator self.current_accelerator
.take() .take()
.expect("Accelerator not set before WorldBegin"), .expect("Accelerator not set before WorldBegin"),
arena,
); );
} }
@ -529,6 +531,7 @@ impl ParserTarget for BasicSceneBuilder {
tex_name: &str, tex_name: &str,
params: &ParsedParameterVector, params: &ParsedParameterVector,
loc: FileLoc, loc: FileLoc,
arena: &mut Arena,
) { ) {
let name = normalize_utf8(orig_name); let name = normalize_utf8(orig_name);
self.verify_world("Texture", &loc); self.verify_world("Texture", &loc);
@ -572,9 +575,11 @@ impl ParserTarget for BasicSceneBuilder {
}; };
if type_name == "float" { if type_name == "float" {
self.scene.add_float_texture(name.to_string(), entity); self.scene
.add_float_texture(name.to_string(), entity, arena);
} else { } else {
self.scene.add_spectrum_texture(name.to_string(), entity); self.scene
.add_spectrum_texture(name.to_string(), entity, arena);
} }
} }

View file

@ -1,6 +1,7 @@
use super::entities::*; use super::entities::*;
use super::state::*; use super::state::*;
use crate::core::camera::CameraFactory; use crate::core::camera::CameraFactory;
use crate::core::film::FilmFactory;
use crate::core::filter::FilterFactory; use crate::core::filter::FilterFactory;
use crate::core::image::{Image, io::ImageIO}; use crate::core::image::{Image, io::ImageIO};
use crate::core::material::MaterialFactory; use crate::core::material::MaterialFactory;
@ -8,12 +9,10 @@ use crate::core::primitive::{CreateGeometricPrimitive, CreateSimplePrimitive};
use crate::core::sampler::SamplerFactory; use crate::core::sampler::SamplerFactory;
use crate::core::shape::ShapeFactory; use crate::core::shape::ShapeFactory;
use crate::core::texture::{FloatTexture, SpectrumTexture}; use crate::core::texture::{FloatTexture, SpectrumTexture};
use crate::utils::arena;
use crate::utils::arena::Arena;
use crate::utils::error::FileLoc;
use crate::utils::parallel::{AsyncJob, run_async}; use crate::utils::parallel::{AsyncJob, run_async};
use crate::utils::parameters::{NamedTextures, ParameterDictionary, TextureParameterDictionary}; use crate::utils::parameters::{NamedTextures, ParameterDictionary, TextureParameterDictionary};
use crate::utils::{Upload, resolve_filename}; use crate::utils::{Upload, resolve_filename};
use crate::{Arena, FileLoc};
use parking_lot::Mutex; use parking_lot::Mutex;
use rayon::prelude::*; use rayon::prelude::*;
use shared::core::camera::Camera; use shared::core::camera::Camera;
@ -132,6 +131,7 @@ impl BasicScene {
filt.expect("Must have a filter"), filt.expect("Must have a filter"),
Some(camera.camera_transform.clone()), Some(camera.camera_transform.clone()),
&film.loc, &film.loc,
arena,
) )
.expect("Must have a film"), .expect("Must have a film"),
); );
@ -239,6 +239,7 @@ impl BasicScene {
pub fn add_float_texture(&self, name: String, texture: TextureSceneEntity, arena: &mut Arena) { pub fn add_float_texture(&self, name: String, texture: TextureSceneEntity, arena: &mut Arena) {
let mut state = self.texture_state.lock(); let mut state = self.texture_state.lock();
let arena = arena.clone();
self.add_texture_generic( self.add_texture_generic(
name, name,
texture, texture,
@ -404,15 +405,17 @@ impl BasicScene {
} }
let normal_map = self.get_normal_map(&state, &entity.parameters); let normal_map = self.get_normal_map(&state, &entity.parameters);
let tex_dict = let tex_dict = TextureParameterDictionary::new(
TextureParameterDictionary::new(entity.parameters.into(), Some(*textures)); Arc::new(entity.parameters.clone()),
Some(textures),
);
let mat = Material::create( let mat = Material::create(
&mat_type, &mat_type,
&tex_dict, &tex_dict,
normal_map, normal_map,
named_materials.into(), &named_materials, // Reference for now
entity.loc, entity.loc.clone(),
arena, arena,
) )
.expect("Could not create material"); .expect("Could not create material");
@ -434,7 +437,7 @@ impl BasicScene {
&entity.name, &entity.name,
&tex_dict, &tex_dict,
normal_map, normal_map,
&named_materials, // Reference &named_materials,
entity.loc.clone(), entity.loc.clone(),
arena, arena,
) )
@ -447,7 +450,6 @@ impl BasicScene {
pub fn create_aggregate( pub fn create_aggregate(
&self, &self,
arena: &mut Arena,
textures: &NamedTextures, textures: &NamedTextures,
named_materials: &HashMap<String, Material>, named_materials: &HashMap<String, Material>,
materials: &Vec<Material>, materials: &Vec<Material>,
@ -496,11 +498,12 @@ impl BasicScene {
*sh.render_from_object.as_ref(), *sh.render_from_object.as_ref(),
*sh.object_from_render.as_ref(), *sh.object_from_render.as_ref(),
sh.reverse_orientation, sh.reverse_orientation,
sh.base.parameters, sh.base.parameters.clone(),
lookup.textures.float_textures, &lookup.textures.float_textures,
sh.base.loc, sh.base.loc.clone(),
arena, arena,
) )
.expect("Could not create shape")
}) })
.collect() .collect()
} }
@ -509,7 +512,7 @@ impl BasicScene {
&self, &self,
entities: &[AnimatedShapeSceneEntity], entities: &[AnimatedShapeSceneEntity],
lookup: &SceneLookup, lookup: &SceneLookup,
arena: &mut Arena, arena: &Arena,
) -> Vec<Vec<Shape>> { ) -> Vec<Vec<Shape>> {
entities entities
.par_iter() .par_iter()
@ -519,23 +522,19 @@ impl BasicScene {
*sh.identity.as_ref(), *sh.identity.as_ref(),
*sh.identity.as_ref(), *sh.identity.as_ref(),
sh.reverse_orientation, sh.reverse_orientation,
sh.transformed_base.base.parameters, sh.transformed_base.base.parameters.clone(),
lookup.textures.float_textures, &lookup.textures.float_textures,
sh.transformed_base.base.loc, sh.transformed_base.base.loc.clone(),
arena, arena,
) )
.map_err(|e| { .expect("Could not create shape")
log::error!("{}: Failed to create shape: {}", sh.base.loc, e);
e
})
.ok()
}) })
.collect() .collect()
} }
fn upload_shapes( fn upload_shapes(
&self, &self,
arena: &mut Arena, arena: &Arena,
entities: &[ShapeSceneEntity], entities: &[ShapeSceneEntity],
loaded: Vec<Vec<Shape>>, loaded: Vec<Vec<Shape>>,
lookup: &SceneLookup, lookup: &SceneLookup,

View file

@ -7,7 +7,7 @@ use shared::core::light::Light;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::sync::Arc; use std::sync::Arc;
#[derive(Debug)] #[derive(Debug, Default)]
pub struct TextureState { pub struct TextureState {
pub serial_float_textures: Vec<(String, TextureSceneEntity)>, pub serial_float_textures: Vec<(String, TextureSceneEntity)>,
pub serial_spectrum_textures: Vec<(String, TextureSceneEntity)>, pub serial_spectrum_textures: Vec<(String, TextureSceneEntity)>,
@ -18,7 +18,7 @@ pub struct TextureState {
pub n_missing_textures: usize, pub n_missing_textures: usize,
} }
#[derive(Debug)] #[derive(Debug, Default)]
pub struct MaterialState { pub struct MaterialState {
pub named_materials: Vec<(String, SceneEntity)>, pub named_materials: Vec<(String, SceneEntity)>,
pub materials: Vec<SceneEntity>, pub materials: Vec<SceneEntity>,
@ -26,13 +26,13 @@ pub struct MaterialState {
pub normal_maps: HashMap<String, Arc<Image>>, pub normal_maps: HashMap<String, Arc<Image>>,
} }
#[derive(Debug)] #[derive(Debug, Default)]
pub struct LightState { pub struct LightState {
pub light_jobs: Vec<AsyncJob<Light>>, pub light_jobs: Vec<AsyncJob<Light>>,
pub area_lights: Vec<SceneEntity>, pub area_lights: Vec<SceneEntity>,
} }
#[derive(Debug)] #[derive(Debug, Default)]
pub struct MediaState { pub struct MediaState {
pub jobs: HashMap<String, AsyncJob<Medium>>, pub jobs: HashMap<String, AsyncJob<Medium>>,
pub map: HashMap<String, Arc<Medium>>, pub map: HashMap<String, Arc<Medium>>,

View file

@ -1,11 +1,10 @@
use crate::core::texture::FloatTexture; use crate::core::texture::FloatTexture;
use crate::shapes::{BilinearPatchMesh, TriangleMesh}; use crate::shapes::{BilinearPatchMesh, TriangleMesh};
use crate::utils::{Arena, FileLoc, ParameterDictionary}; use crate::utils::{Arena, FileLoc, ParameterDictionary};
use anyhow::{Result, anyhow};
use parking_lot::Mutex;
use shared::core::shape::*; use shared::core::shape::*;
use shared::shapes::*; use shared::shapes::*;
// use shared::spectra::*;
use anyhow::Result;
use parking_lot::Mutex;
use shared::utils::Transform; use shared::utils::Transform;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
@ -19,9 +18,9 @@ pub trait CreateShape {
object_from_render: Transform, object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
float_textures: HashMap<String, Arc<FloatTexture>>, float_textures: &HashMap<String, Arc<FloatTexture>>,
loc: FileLoc, loc: FileLoc,
arena: &mut Arena, arena: &Arena,
) -> Result<Vec<Shape>>; ) -> Result<Vec<Shape>>;
} }
@ -32,9 +31,9 @@ pub trait ShapeFactory {
object_from_render: Transform, object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
float_textures: HashMap<String, Arc<FloatTexture>>, float_textures: &HashMap<String, Arc<FloatTexture>>,
loc: FileLoc, loc: FileLoc,
arena: &mut Arena, arena: &Arena,
) -> Result<Vec<Shape>>; ) -> Result<Vec<Shape>>;
} }
@ -45,9 +44,9 @@ impl ShapeFactory for Shape {
object_from_render: Transform, object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
float_textures: HashMap<String, Arc<FloatTexture>>, float_textures: &HashMap<String, Arc<FloatTexture>>,
loc: FileLoc, loc: FileLoc,
arena: &mut Arena, arena: &Arena,
) -> Result<Vec<Shape>> { ) -> Result<Vec<Shape>> {
match name { match name {
"sphere" => SphereShape::create( "sphere" => SphereShape::create(

99
src/films/gbuffer.rs Normal file
View file

@ -0,0 +1,99 @@
use super::*;
use crate::core::film::{CreateFilmBase, PixelSensor, PixelSensorTrait};
use crate::utils::containers::Array2D;
use shared::core::film::{DevicePixelSensor, FilmBase, GBufferFilm};
use shared::core::filter::FilterTrait;
use shared::spectra::RGBColorSpace;
use shared::utils::AnimatedTransform;
use std::path::Path;
pub struct GBufferFilmHost {
pub device: GBufferFilm,
}
impl GBufferFilmHost {
pub fn new(
base: &FilmBase,
output_from_render: &AnimatedTransform,
apply_inverse: bool,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
assert!(!base.pixel_bounds.is_empty());
let sensor_ptr = base.sensor;
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor.xyz_from_sensor_rgb;
let filter_integral = base.filter.integral();
let pixels = Array2D::new(base.pixel_bounds);
let device = GBufferFilm {
base: base.clone(),
output_from_render: *output_from_render,
apply_inverse,
pixels: pixels.device,
colorspace: colorspace.clone(),
max_component_value,
write_fp16,
filter_integral,
output_rgbf_from_sensor_rgb,
};
Self { device }
}
}
impl CreateFilm for GBufferFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(&sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(format!("{}: EXR is the only format supported by GBufferFilm", loc).into());
}
let coords_system = params.get_one_string("coordinatesystem", "camera");
let mut apply_inverse = false;
let camera_transform = camera_transform
.ok_or_else(|| "GBufferFilm requires a camera_transform".to_string())?;
let output_from_render = if coords_system == "camera" {
apply_inverse = true;
camera_transform.render_from_camera
} else if coords_system == "world" {
AnimatedTransform::from_transform(&camera_transform.world_from_render)
} else {
return Err(anyhow!(
"{}: unknown coordinate system for GBufferFilm. (Expecting camera
or world",
loc
)
.into());
};
let film = GBufferFilmHost::new(
&film_base,
&output_from_render,
apply_inverse,
colorspace,
max_component_value,
write_fp16,
);
Ok(Film::GBuffer(film.device))
}
}

26
src/films/mod.rs Normal file
View file

@ -0,0 +1,26 @@
use crate::{Arena, FileLoc, ParameterDictionary};
use anyhow::Result;
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::film::Film;
use shared::core::filter::Filter;
pub mod gbuffer;
pub mod rgb;
pub mod spectral;
pub use gbuffer::*;
pub use rgb::*;
pub use spectral::*;
pub trait CreateFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Film>;
}

79
src/films/rgb.rs Normal file
View file

@ -0,0 +1,79 @@
use super::*;
use crate::Arena;
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use shared::core::camera::CameraTransform;
use shared::core::film::{DevicePixelSensor, Film, FilmBase, RGBFilm, RGBPixel};
use shared::core::filter::FilterTrait;
use shared::spectra::RGBColorSpace;
struct RGBFilmStorage {
pixels: Array2D<RGBPixel>,
}
pub struct RGBFilmHost {
pub device: RGBFilm,
storage: RGBFilmStorage,
}
impl RGBFilmHost {
pub fn new(
base: FilmBase,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
let sensor_ptr = base.sensor;
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let filter_integral = base.filter.integral();
let sensor_matrix = sensor.xyz_from_sensor_rgb;
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor_matrix;
let width = base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x();
let height = base.pixel_bounds.p_max.y() - base.pixel_bounds.p_min.y();
let count = (width * height) as usize;
let mut pixel_vec = Vec::with_capacity(count);
for _ in 0..count {
pixel_vec.push(RGBPixel::default());
}
let pixels: Array2D<RGBPixel> = Array2D::new(base.pixel_bounds);
let storage = RGBFilmStorage { pixels };
let device = RGBFilm {
base,
max_component_value,
write_fp16,
filter_integral,
output_rgbf_from_sensor_rgb,
pixels: std::sync::Arc::new(pixels_array),
};
Self { device, storage }
}
}
impl CreateFilm for RGBFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> anyhow::Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = DevicePixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(&sensor), loc);
let film = RGBFilmHost::new(film_base, &colorspace, max_component_value, write_fp16);
Ok(Film::RGB(film.device))
}
}

147
src/films/spectral.rs Normal file
View file

@ -0,0 +1,147 @@
use super::*;
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use crate::{Arena, FileLoc, ParameterDictionary};
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::film::{
CreateFilm, DevicePixelSensor, FilmBase, PixelSensork, SpectralFilm, SpectralPixel,
};
use shared::core::filter::FilterTrait;
use shared::spectra::{LAMBDA_MAX, LAMBDA_MIN, RGBColorSpace};
use shared::utils::AtomicFloat;
use shared::utils::Ptr;
use shared::utils::containers::DeviceArray2D;
use shared::utils::math::SquareMatrix;
use std::path::Path;
struct SpectralFilmStorage {
pixels: DeviceArray2D<SpectralPixel>,
bucket_sums: Vec<f64>,
weight_sums: Vec<f64>,
bucket_splats: Vec<AtomicFloat>,
}
pub struct SpectralFilmHost {
pub device: SpectralFilm,
storage: Box<SpectralFilmStorage>,
}
impl SpectralFilmHost {
pub fn new(
base: &FilmBase,
lambda_min: Float,
lambda_max: Float,
n_buckets: usize,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
let n_pixels = base.pixel_bounds.area() as usize;
let total_buckets = n_pixels * n_buckets;
let bucket_sums = vec![0.0; total_buckets];
let weight_sums = vec![0.0; total_buckets];
let mut bucket_splats = Vec::with_capacity(total_buckets);
for _ in 0..total_buckets {
bucket_splats.push(AtomicFloat::new(0.0));
}
let mut pixels = Array2D::<SpectralPixel>::new(base.pixel_bounds);
let p_sums_base = bucket_sums.as_ptr() as *mut f64;
let p_weights_base = weight_sums.as_ptr() as *mut f64;
let p_splats_base = bucket_splats.as_ptr() as *mut AtomicFloat;
for i in 0..n_pixels {
let pixel = pixels.get_linear_mut(i);
pixel.bucket_offset = i * n_buckets;
unsafe {
let offset = i * n_buckets;
pixel.bucket_sums = p_sums_base.add(offset);
pixel.weight_sums = p_weights_base.add(offset);
pixel.bucket_splats = p_splats_base.add(offset);
}
}
let storage = Box::new(SpectralFilmStorage {
pixels,
bucket_sums,
weight_sums,
bucket_splats,
});
let device = SpectralFilm {
base: *base,
colorspace: colorspace.clone(),
lambda_min,
lambda_max,
n_buckets,
max_component_value,
write_fp16,
filter_integral: base.filter.integral(),
output_rgbf_from_sensor_rgb: SquareMatrix::identity(),
pixels: DeviceArray2D {
values: Ptr::from(&storage.pixels),
extent: base.pixel_bounds,
stride: base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x(),
},
bucket_sums: storage.bucket_sums.as_ptr() as *mut f64,
weight_sums: storage.weight_sums.as_ptr() as *mut f64,
bucket_splats: storage.bucket_splats.as_ptr() as *mut AtomicFloat,
};
Self { device, storage };
}
}
impl CreateFilm for SpectralFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = DevicePixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(&sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(anyhow!("{}: EXR is the only format supported by GBufferFilm", loc).into());
}
let n_buckets = params.get_one_int("nbuckets", 16) as usize;
let lambda_min = params.get_one_float("lambdamin", LAMBDA_MIN as Float);
let lambda_max = params.get_one_float("lambdamin", LAMBDA_MAX as Float);
if lambda_min < LAMBDA_MIN as Float && lambda_max > LAMBDA_MAX as Float {
return Err(anyhow!(
"{}: PBRT must be recompiled with different values of LAMBDA_MIN and LAMBDA_MAX",
loc
));
}
let film = SpectralFilmHost::new(
&film_base,
lambda_min,
lambda_max,
n_buckets,
colorspace,
max_component_value,
write_fp16,
);
Ok(Film::Spectral(film.device))
}
}

View file

@ -3,3 +3,9 @@ pub mod gaussian;
pub mod lanczos; pub mod lanczos;
pub mod mitchell; pub mod mitchell;
pub mod triangle; pub mod triangle;
pub use boxf::*;
pub use gaussian::*;
pub use lanczos::*;
pub use mitchell::*;
pub use triangle::*;

View file

@ -4,7 +4,7 @@ use shared::core::interaction::{Interaction, InteractionTrait};
use shared::core::light::{Light, LightTrait}; use shared::core::light::{Light, LightTrait};
use shared::core::primitive::{Primitive, PrimitiveTrait}; use shared::core::primitive::{Primitive, PrimitiveTrait};
use shared::core::shape::ShapeIntersection; use shared::core::shape::ShapeIntersection;
use shared::lights::LightSampler; use shared::lights::sampler::LightSampler;
use shared::spectra::SampledWavelengths; use shared::spectra::SampledWavelengths;
use shared::utils::sampling::power_heuristic; use shared::utils::sampling::power_heuristic;
use shared::{Float, SHADOW_EPSILON}; use shared::{Float, SHADOW_EPSILON};

View file

@ -1,5 +1,6 @@
#[allow(dead_code)] #[allow(dead_code)]
pub mod core; pub mod core;
pub mod films;
pub mod filters; pub mod filters;
pub mod globals; pub mod globals;
pub mod integrators; pub mod integrators;

View file

@ -15,12 +15,11 @@ use shared::core::medium::MediumInterface;
use shared::core::spectrum::Spectrum; use shared::core::spectrum::Spectrum;
use shared::core::texture::SpectrumType; use shared::core::texture::SpectrumType;
use shared::lights::{ImageInfiniteLight, PortalInfiniteLight, UniformInfiniteLight}; use shared::lights::{ImageInfiniteLight, PortalInfiniteLight, UniformInfiniteLight};
use shared::spectra::RGBColorSpace; use shared::spectra::{DenselySampledSpectrum, RGBColorSpace};
use shared::utils::math::{equal_area_sphere_to_square, equal_area_square_to_sphere}; use shared::utils::math::{equal_area_sphere_to_square, equal_area_square_to_sphere};
use shared::utils::sampling::{DevicePiecewiseConstant2D, DeviceWindowedPiecewiseConstant2D}; use shared::utils::sampling::{DevicePiecewiseConstant2D, DeviceWindowedPiecewiseConstant2D};
use shared::utils::{Ptr, Transform}; use shared::utils::{Ptr, Transform};
use shared::{Float, PI}; use shared::{Float, PI};
use shareed::spectra::DenselySampledSpectrum;
use std::path::Path; use std::path::Path;
pub trait CreateImageInfiniteLight { pub trait CreateImageInfiniteLight {
@ -219,7 +218,7 @@ fn create_image_light(
}) })
.collect(); .collect();
let distrib = PiecewiseConstant2D::new(&data, n_u, n_v); let distrib = PiecewiseConstant2D::from_slice(&data_u, n_u, n_v, Bounds2f::unit());
// Build compensated distribution // Build compensated distribution
let average = data.iter().sum::<Float>() / data.len() as Float; let average = data.iter().sum::<Float>() / data.len() as Float;
@ -231,7 +230,7 @@ fn create_image_light(
if all_zero { if all_zero {
data.fill(1.0); data.fill(1.0);
} }
let compensated_distrib = PiecewiseConstant2D::new(&data, n_u, n_v); let compensated_distrib = PiecewiseConstant2D::from_slice(&data, n_u, n_v, Bounds2f::unit());
let light = ImageInfiniteLight::new( let light = ImageInfiniteLight::new(
render_from_light, render_from_light,

View file

@ -37,7 +37,7 @@ impl CreateProjectionLight for ProjectionLight {
render_from_light: Transform, render_from_light: Transform,
medium_interface: MediumInterface, medium_interface: MediumInterface,
scale: Float, scale: Float,
image: Ptr<DeviceImage>, image: Ptr<Image>,
image_color_space: Ptr<RGBColorSpace>, image_color_space: Ptr<RGBColorSpace>,
fov: Float, fov: Float,
) -> Self { ) -> Self {
@ -72,12 +72,16 @@ impl CreateProjectionLight for ProjectionLight {
}; };
let d = image.get_sampling_distribution(dwda, screen_bounds); let d = image.get_sampling_distribution(dwda, screen_bounds);
let distrib = let distrib = PiecewiseConstant2D::from_slice(
PiecewiseConstant2D::new(d.as_slice(), d.x_size() as usize, d.y_size() as usize); d.as_slice(),
d.x_size() as usize,
d.y_size() as usize,
screen_bounds,
);
Self { Self {
base, base,
image, image: Ptr::from(image.device_image()),
image_color_space, image_color_space,
distrib: Ptr::from(&distrib.device), distrib: Ptr::from(&distrib.device),
screen_bounds, screen_bounds,

View file

@ -4,7 +4,7 @@ use crate::core::texture::FloatTexture;
use crate::shapes::mesh::BilinearPatchMesh; use crate::shapes::mesh::BilinearPatchMesh;
use crate::utils::sampling::PiecewiseConstant2D; use crate::utils::sampling::PiecewiseConstant2D;
use crate::utils::{Arena, FileLoc, ParameterDictionary}; use crate::utils::{Arena, FileLoc, ParameterDictionary};
use anyhow::Result; use anyhow::{Result, anyhow};
use log::warn; use log::warn;
use shared::core::shape::Shape; use shared::core::shape::Shape;
use shared::shapes::BilinearPatchShape; use shared::shapes::BilinearPatchShape;
@ -19,9 +19,9 @@ impl CreateShape for BilinearPatchShape {
_object_from_render: Transform, _object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>, _float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc, _loc: FileLoc,
_arena: &mut Arena, _arena: &Arena,
) -> Result<Vec<Shape>> { ) -> Result<Vec<Shape>> {
let mut vertex_indices = parameters.get_int_array("indices"); let mut vertex_indices = parameters.get_int_array("indices");
let p = parameters.get_point3f_array("P"); let p = parameters.get_point3f_array("P");

View file

@ -62,9 +62,9 @@ impl CreateShape for CurveShape {
object_from_render: Transform, object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>, _float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc, _loc: FileLoc,
_arena: &mut Arena, _arena: &Arena,
) -> Result<Vec<Shape>> { ) -> Result<Vec<Shape>> {
let width = parameters.get_one_float("width", 1.0); let width = parameters.get_one_float("width", 1.0);
let width0 = parameters.get_one_float("width0", width); let width0 = parameters.get_one_float("width0", width);

View file

@ -14,9 +14,9 @@ impl CreateShape for CylinderShape {
object_from_render: Transform, object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>, _float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc, _loc: FileLoc,
arena: &mut Arena, arena: &Arena,
) -> Result<Vec<Shape>> { ) -> Result<Vec<Shape>> {
let radius = parameters.get_one_float("radius", 1.); let radius = parameters.get_one_float("radius", 1.);
let z_min = parameters.get_one_float("zmin", -1.); let z_min = parameters.get_one_float("zmin", -1.);

View file

@ -14,9 +14,9 @@ impl CreateShape for DiskShape {
object_from_render: Transform, object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>, _float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc, _loc: FileLoc,
arena: &mut Arena, arena: &Arena,
) -> Result<Vec<Shape>> { ) -> Result<Vec<Shape>> {
let height = parameters.get_one_float("height", 0.); let height = parameters.get_one_float("height", 0.);
let radius = parameters.get_one_float("radius", 1.); let radius = parameters.get_one_float("radius", 1.);

View file

@ -14,7 +14,7 @@ impl CreateShape for SphereShape {
object_from_render: Transform, object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>, _float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc, _loc: FileLoc,
arena: &mut Arena, arena: &mut Arena,
) -> Result<Vec<Shape>> { ) -> Result<Vec<Shape>> {

View file

@ -16,9 +16,9 @@ impl CreateShape for TriangleShape {
_object_from_render: Transform, _object_from_render: Transform,
reverse_orientation: bool, reverse_orientation: bool,
parameters: ParameterDictionary, parameters: ParameterDictionary,
_float_texture: HashMap<String, Arc<FloatTexture>>, _float_texture: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc, _loc: FileLoc,
_arena: &mut Arena, _arena: &Arena,
) -> Result<Vec<Shape>> { ) -> Result<Vec<Shape>> {
let mut vertex_indices = parameters.get_int_array("indices"); let mut vertex_indices = parameters.get_int_array("indices");
let p = parameters.get_point3f_array("P"); let p = parameters.get_point3f_array("P");

View file

@ -10,7 +10,7 @@ use shared::utils::ptr::Ptr;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct RGBColorSpaceData { pub struct RGBColorSpaceData {
_illuminant: DenselySampledSpectrumBuffer, illuminant: Arc<DenselySampledSpectrumBuffer>,
pub view: RGBColorSpace, pub view: RGBColorSpace,
} }
@ -26,8 +26,8 @@ impl RGBColorSpaceData {
r: Point2f, r: Point2f,
g: Point2f, g: Point2f,
b: Point2f, b: Point2f,
illuminant: DenselySampledSpectrumBuffer, illuminant: Arc<DenselySampledSpectrumBuffer>,
rgb_to_spectrum_table: Ptr<RGBToSpectrumTable>, rgb_to_spectrum_table: Arc<RGBToSpectrumTable>,
) -> Self { ) -> Self {
let stdspec = get_spectra_context(); let stdspec = get_spectra_context();
let w_xyz: XYZ = Spectrum::Dense(illuminant.device()).to_xyz(&stdspec); let w_xyz: XYZ = Spectrum::Dense(illuminant.device()).to_xyz(&stdspec);
@ -56,8 +56,9 @@ impl RGBColorSpaceData {
rgb_from_xyz, rgb_from_xyz,
rgb_to_spectrum_table, rgb_to_spectrum_table,
}; };
Self { Self {
_illuminant: illuminant, illuminant: illuminant.into(),
view, view,
} }
} }

View file

@ -17,10 +17,6 @@ pub mod piecewise;
pub use dense::DenselySampledSpectrumBuffer; pub use dense::DenselySampledSpectrumBuffer;
fn get_d65_illuminant_buffer() -> &DenselySampledSpectrumBuffer {
&CIE_D65_DATA
}
static CIE_X_DATA: LazyLock<DenselySampledSpectrumBuffer> = static CIE_X_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_X)); LazyLock::new(|| data::create_cie_buffer(&CIE_X));
static CIE_Y_DATA: LazyLock<DenselySampledSpectrumBuffer> = static CIE_Y_DATA: LazyLock<DenselySampledSpectrumBuffer> =
@ -30,6 +26,10 @@ static CIE_Z_DATA: LazyLock<DenselySampledSpectrumBuffer> =
static CIE_D65_DATA: LazyLock<DenselySampledSpectrumBuffer> = static CIE_D65_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_D65)); LazyLock::new(|| data::create_cie_buffer(&CIE_D65));
fn get_d65_illuminant_buffer() -> Arc<DenselySampledSpectrumBuffer> {
Arc::from(*&CIE_D65_DATA)
}
pub fn cie_x() -> Spectrum { pub fn cie_x() -> Spectrum {
Spectrum::Dense(CIE_X_DATA.device()) Spectrum::Dense(CIE_X_DATA.device())
} }

View file

@ -4,6 +4,7 @@ use crate::shapes::{BilinearPatchMesh, TriangleMesh};
use crate::spectra::DenselySampledSpectrumBuffer; use crate::spectra::DenselySampledSpectrumBuffer;
use crate::utils::mipmap::MIPMap; use crate::utils::mipmap::MIPMap;
use crate::utils::sampling::{PiecewiseConstant2D, WindowedPiecewiseConstant2D}; use crate::utils::sampling::{PiecewiseConstant2D, WindowedPiecewiseConstant2D};
use parking_lot::Mutex;
use shared::core::color::RGBToSpectrumTable; use shared::core::color::RGBToSpectrumTable;
use shared::core::image::DeviceImage; use shared::core::image::DeviceImage;
use shared::core::light::Light; use shared::core::light::Light;
@ -23,6 +24,15 @@ use std::collections::HashMap;
use std::slice::from_raw_parts; use std::slice::from_raw_parts;
use std::sync::Arc; use std::sync::Arc;
pub struct Arena {
inner: Mutex<ArenaInner>,
}
struct ArenaInner {
buffer: Vec<(*mut u8, Layout)>,
texture_cache: HashMap<usize, u64>,
}
pub struct Arena { pub struct Arena {
buffer: Vec<(*mut u8, Layout)>, buffer: Vec<(*mut u8, Layout)>,
texture_cache: HashMap<usize, u64>, texture_cache: HashMap<usize, u64>,
@ -31,14 +41,15 @@ pub struct Arena {
impl Arena { impl Arena {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
buffer: Vec::new(), inner: Mutex::new(ArenaInner {
texture_cache: HashMap::new(), buffer: Vec::new(),
texture_cache: HashMap::new(),
}),
} }
} }
pub fn alloc<T>(&mut self, value: T) -> Ptr<T> { pub fn alloc<T>(&self, value: T) -> Ptr<T> {
let layout = Layout::new::<T>(); let layout = Layout::new::<T>();
let ptr = unsafe { self.alloc_unified(layout) } as *mut T; let ptr = unsafe { self.alloc_unified(layout) } as *mut T;
unsafe { unsafe {
@ -48,14 +59,14 @@ impl Arena {
Ptr::from_raw(ptr) Ptr::from_raw(ptr)
} }
pub fn alloc_opt<T>(&mut self, value: Option<T>) -> Ptr<T> { pub fn alloc_opt<T>(&self, value: Option<T>) -> Ptr<T> {
match value { match value {
Some(v) => self.alloc(v), Some(v) => self.alloc(v),
None => Ptr::null(), None => Ptr::null(),
} }
} }
pub fn alloc_slice<T: Copy>(&mut self, values: &[T]) -> (Ptr<T>, usize) { pub fn alloc_slice<T: Copy>(&self, values: &[T]) -> (Ptr<T>, usize) {
if values.is_empty() { if values.is_empty() {
return (Ptr::null(), 0); return (Ptr::null(), 0);
} }
@ -71,7 +82,7 @@ impl Arena {
} }
#[cfg(feature = "cuda")] #[cfg(feature = "cuda")]
unsafe fn alloc_unified(&mut self, layout: Layout) -> *mut u8 { unsafe fn alloc_unified(&self, layout: Layout) -> *mut u8 {
use cuda_runtime_sys::*; use cuda_runtime_sys::*;
let mut ptr: *mut std::ffi::c_void = std::ptr::null_mut(); let mut ptr: *mut std::ffi::c_void = std::ptr::null_mut();
@ -87,10 +98,18 @@ impl Arena {
ptr as *mut u8 ptr as *mut u8
} }
pub fn get_texture_object(&mut self, mipmap: &Arc<MIPMap>) -> u64 { #[cfg(not(feature = "cuda"))]
let key = Arc::as_ptr(mipmap) as usize; unsafe fn alloc_unified(&self, layout: Layout) -> *mut u8 {
let ptr = unsafe { std::alloc::alloc(layout) };
self.buffer.push((ptr, layout));
ptr
}
if let Some(&tex_obj) = self.texture_cache.get(&key) { pub fn get_texture_object(&self, mipmap: &Arc<MIPMap>) -> u64 {
let key = Arc::as_ptr(mipmap) as usize;
let mut inner = self.inner.lock();
if let Some(&tex_obj) = inner.texture_cache.get(&key) {
return tex_obj; return tex_obj;
} }
@ -115,13 +134,6 @@ impl Arena {
0 0
} }
#[cfg(not(feature = "cuda"))]
unsafe fn alloc_unified(&mut self, layout: Layout) -> *mut u8 {
let ptr = unsafe { std::alloc::alloc(layout) };
self.buffer.push((ptr, layout));
ptr
}
// pub fn raw_data(&self) -> &[u8] { // pub fn raw_data(&self) -> &[u8] {
// &self.buffer // &self.buffer
// } // }
@ -129,6 +141,7 @@ impl Arena {
impl Drop for Arena { impl Drop for Arena {
fn drop(&mut self) { fn drop(&mut self) {
let inner = self.inner.get_mut().unwrap();
for (ptr, layout) in self.buffer.drain(..) { for (ptr, layout) in self.buffer.drain(..) {
unsafe { unsafe {
#[cfg(feature = "cuda")] #[cfg(feature = "cuda")]
@ -144,57 +157,60 @@ impl Drop for Arena {
} }
} }
unsafe impl Send for Arena {}
unsafe impl Sync for Arena {}
pub trait Upload { pub trait Upload {
type Target: Copy; type Target: Copy;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target>; fn upload(&self, arena: &Arena) -> Ptr<Self::Target>;
} }
impl Upload for Shape { impl Upload for Shape {
type Target = Shape; type Target = Shape;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone()) arena.alloc(self.clone())
} }
} }
impl Upload for Light { impl Upload for Light {
type Target = Light; type Target = Light;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone()) arena.alloc(self.clone())
} }
} }
impl Upload for Image { impl Upload for Image {
type Target = DeviceImage; type Target = DeviceImage;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(*self.device_image()) arena.alloc(*self.device_image())
} }
} }
impl Upload for Spectrum { impl Upload for Spectrum {
type Target = Spectrum; type Target = Spectrum;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone()) arena.alloc(self.clone())
} }
} }
impl Upload for Material { impl Upload for Material {
type Target = Material; type Target = Material;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone()) arena.alloc(self.clone())
} }
} }
impl Upload for DenselySampledSpectrumBuffer { impl Upload for DenselySampledSpectrumBuffer {
type Target = DenselySampledSpectrum; type Target = DenselySampledSpectrum;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.device()) arena.alloc(self.device())
} }
} }
impl Upload for SpectrumTexture { impl Upload for SpectrumTexture {
type Target = GPUSpectrumTexture; type Target = GPUSpectrumTexture;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let gpu_variant = match self { let gpu_variant = match self {
SpectrumTexture::Constant(tex) => GPUSpectrumTexture::Constant(tex.clone()), SpectrumTexture::Constant(tex) => GPUSpectrumTexture::Constant(tex.clone()),
SpectrumTexture::Checkerboard(tex) => GPUSpectrumTexture::Checkerboard(tex.clone()), SpectrumTexture::Checkerboard(tex) => GPUSpectrumTexture::Checkerboard(tex.clone()),
@ -265,7 +281,7 @@ impl Upload for SpectrumTexture {
impl Upload for FloatTexture { impl Upload for FloatTexture {
type Target = GPUFloatTexture; type Target = GPUFloatTexture;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let gpu_variant = match self { let gpu_variant = match self {
FloatTexture::Constant(tex) => GPUFloatTexture::Constant(tex.clone()), FloatTexture::Constant(tex) => GPUFloatTexture::Constant(tex.clone()),
FloatTexture::Checkerboard(tex) => GPUFloatTexture::Checkerboard(tex.clone()), FloatTexture::Checkerboard(tex) => GPUFloatTexture::Checkerboard(tex.clone()),
@ -327,7 +343,7 @@ impl Upload for FloatTexture {
impl Upload for RGBToSpectrumTable { impl Upload for RGBToSpectrumTable {
type Target = RGBToSpectrumTable; type Target = RGBToSpectrumTable;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let n_nodes = self.n_nodes as usize; let n_nodes = self.n_nodes as usize;
let z_slice = unsafe { from_raw_parts(self.z_nodes.as_raw(), n_nodes) }; let z_slice = unsafe { from_raw_parts(self.z_nodes.as_raw(), n_nodes) };
let coeffs_slice = unsafe { from_raw_parts(self.coeffs.as_raw(), n_nodes) }; let coeffs_slice = unsafe { from_raw_parts(self.coeffs.as_raw(), n_nodes) };
@ -347,7 +363,7 @@ impl Upload for RGBToSpectrumTable {
impl Upload for RGBColorSpace { impl Upload for RGBColorSpace {
type Target = RGBColorSpace; type Target = RGBColorSpace;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let table_ptr = self.rgb_to_spectrum_table.upload(arena); let table_ptr = self.rgb_to_spectrum_table.upload(arena);
let shared_space = RGBColorSpace { let shared_space = RGBColorSpace {
@ -368,7 +384,7 @@ impl Upload for RGBColorSpace {
impl Upload for DeviceStandardColorSpaces { impl Upload for DeviceStandardColorSpaces {
type Target = DeviceStandardColorSpaces; type Target = DeviceStandardColorSpaces;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let srgb_ptr = self.srgb.upload(arena); let srgb_ptr = self.srgb.upload(arena);
let dci_ptr = self.dci_p3.upload(arena); let dci_ptr = self.dci_p3.upload(arena);
let rec_ptr = self.rec2020.upload(arena); let rec_ptr = self.rec2020.upload(arena);
@ -388,7 +404,7 @@ impl Upload for DeviceStandardColorSpaces {
impl Upload for PiecewiseConstant2D { impl Upload for PiecewiseConstant2D {
type Target = DevicePiecewiseConstant2D; type Target = DevicePiecewiseConstant2D;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let marginal_shared = self.marginal.to_shared(arena); let marginal_shared = self.marginal.to_shared(arena);
let conditionals_shared: Vec<DevicePiecewiseConstant1D> = self let conditionals_shared: Vec<DevicePiecewiseConstant1D> = self
@ -411,7 +427,7 @@ impl Upload for PiecewiseConstant2D {
impl Upload for WindowedPiecewiseConstant2D { impl Upload for WindowedPiecewiseConstant2D {
type Target = DeviceWindowedPiecewiseConstant2D; type Target = DeviceWindowedPiecewiseConstant2D;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let specific = DeviceWindowedPiecewiseConstant2D { let specific = DeviceWindowedPiecewiseConstant2D {
sat: self.sat, sat: self.sat,
func: self.func, func: self.func,
@ -423,7 +439,7 @@ impl Upload for WindowedPiecewiseConstant2D {
impl Upload for TriangleMesh { impl Upload for TriangleMesh {
type Target = DeviceTriangleMesh; type Target = DeviceTriangleMesh;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let storage = &self.storage; let storage = &self.storage;
// Upload all arrays to arena // Upload all arrays to arena
@ -471,7 +487,7 @@ impl Upload for TriangleMesh {
impl Upload for BilinearPatchMesh { impl Upload for BilinearPatchMesh {
type Target = DeviceBilinearPatchMesh; type Target = DeviceBilinearPatchMesh;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let storage = &self.storage; let storage = &self.storage;
let (vertex_indices_ptr, _) = arena.alloc_slice(&storage.vertex_indices); let (vertex_indices_ptr, _) = arena.alloc_slice(&storage.vertex_indices);
@ -506,7 +522,7 @@ impl Upload for BilinearPatchMesh {
impl<T: Upload> Upload for Option<T> { impl<T: Upload> Upload for Option<T> {
type Target = T::Target; type Target = T::Target;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
match self { match self {
Some(val) => val.upload(arena), Some(val) => val.upload(arena),
None => Ptr::null(), None => Ptr::null(),
@ -517,7 +533,7 @@ impl<T: Upload> Upload for Option<T> {
impl<T: Upload> Upload for Arc<T> { impl<T: Upload> Upload for Arc<T> {
type Target = T::Target; type Target = T::Target;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> { fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
(**self).upload(arena) (**self).upload(arena)
} }
} }

View file

@ -36,7 +36,7 @@ where
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Array2D<T> { pub struct Array2D<T> {
pub device: DeviceArray2D<T>, pub device: DeviceArray2D<T>,
values: Vec<T>, pub values: Vec<T>,
} }
impl<T> Deref for Array2D<T> { impl<T> Deref for Array2D<T> {

View file

@ -6,6 +6,7 @@ use std::io::{self, Read};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use crate::Arena;
use crate::utils::error::FileLoc; use crate::utils::error::FileLoc;
use crate::utils::parameters::{ParameterDictionary, ParsedParameter, ParsedParameterVector}; use crate::utils::parameters::{ParameterDictionary, ParsedParameter, ParsedParameterVector};
use shared::Float; use shared::Float;
@ -49,7 +50,7 @@ pub trait ParserTarget {
fn medium_interface(&mut self, inside_name: &str, outside_name: &str, loc: FileLoc); fn medium_interface(&mut self, inside_name: &str, outside_name: &str, loc: FileLoc);
fn sampler(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc); fn sampler(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn world_begin(&mut self, loc: FileLoc); fn world_begin(&mut self, loc: FileLoc, arena: &mut Arena);
fn attribute_begin(&mut self, loc: FileLoc); fn attribute_begin(&mut self, loc: FileLoc);
fn attribute_end(&mut self, loc: FileLoc); fn attribute_end(&mut self, loc: FileLoc);
fn attribute(&mut self, target: &str, params: ParsedParameterVector, loc: FileLoc); fn attribute(&mut self, target: &str, params: ParsedParameterVector, loc: FileLoc);
@ -61,6 +62,7 @@ pub trait ParserTarget {
tex_name: &str, tex_name: &str,
params: &ParsedParameterVector, params: &ParsedParameterVector,
loc: FileLoc, loc: FileLoc,
arena: &mut Arena,
); );
fn material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc); fn material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn make_named_material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc); fn make_named_material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
@ -457,7 +459,7 @@ impl ParserTarget for FormattingParserTarget {
println!("{}CoordSysTransform \"{}\"", self.indent(0), name); println!("{}CoordSysTransform \"{}\"", self.indent(0), name);
} }
fn world_begin(&mut self, _loc: FileLoc) { fn world_begin(&mut self, _loc: FileLoc, _arena: &mut Arena) {
println!("{}WorldBegin", self.indent(0)); println!("{}WorldBegin", self.indent(0));
self.cat_indent_count += 4; self.cat_indent_count += 4;
} }
@ -497,6 +499,7 @@ impl ParserTarget for FormattingParserTarget {
tex_name: &str, tex_name: &str,
_params: &ParsedParameterVector, _params: &ParsedParameterVector,
_loc: FileLoc, _loc: FileLoc,
_arena: &mut Arena,
) { ) {
println!( println!(
"{}Texture \"{}\" \"{}\" \"{}\"", "{}Texture \"{}\" \"{}\" \"{}\"",
@ -741,6 +744,7 @@ impl<'a> SceneParser<'a> {
} }
pub fn run(&mut self) -> Result<(), ParserError> { pub fn run(&mut self) -> Result<(), ParserError> {
let mut arena = Arena::new();
loop { loop {
let token = match self.next_token()? { let token = match self.next_token()? {
Some(t) => t, Some(t) => t,
@ -1009,7 +1013,7 @@ impl<'a> SceneParser<'a> {
let tex_name = self.expect_quoted_string()?; let tex_name = self.expect_quoted_string()?;
let params = self.parse_parameters()?; let params = self.parse_parameters()?;
self.target self.target
.texture(&name, &type_name, &tex_name, &params, token.loc); .texture(&name, &type_name, &tex_name, &params, token.loc, arena);
} }
_ => { _ => {
return Err(ParserError::Generic( return Err(ParserError::Generic(
@ -1020,7 +1024,7 @@ impl<'a> SceneParser<'a> {
}, },
'W' => match token.text.as_str() { 'W' => match token.text.as_str() {
"WorldBegin" => self.target.world_begin(token.loc), "WorldBegin" => self.target.world_begin(token.loc, &mut arena),
"WorldEnd" => {} "WorldEnd" => {}
_ => { _ => {
return Err(ParserError::Generic( return Err(ParserError::Generic(

View file

@ -2,7 +2,7 @@ use crate::core::image::Image;
use crate::utils::Arena; use crate::utils::Arena;
use crate::utils::containers::Array2D; use crate::utils::containers::Array2D;
use shared::Float; use shared::Float;
use shared::core::geometry::{Point2i, Vector2f, Vector2i}; use shared::core::geometry::{Bounds2f, Point2i, Vector2f, Vector2i};
use shared::utils::Ptr; use shared::utils::Ptr;
use shared::utils::sampling::{ use shared::utils::sampling::{
AliasTable, Bin, DevicePiecewiseConstant1D, DevicePiecewiseConstant2D, DeviceSummedAreaTable, AliasTable, Bin, DevicePiecewiseConstant1D, DevicePiecewiseConstant2D, DeviceSummedAreaTable,
@ -133,26 +133,37 @@ impl std::ops::Deref for PiecewiseConstant2D {
} }
impl PiecewiseConstant2D { impl PiecewiseConstant2D {
pub fn new(data: &[Float], n_u: usize, n_v: usize) -> Self { pub fn new(data: &Array2D<Float>) -> Self {
Self::new_with_bounds(data, Bounds2f::unit())
}
pub fn new_with_bounds(data: &Array2D<Float>, domain: Bounds2f) -> Self {
Self::from_slice(
data.as_slice(),
data.x_size() as usize,
data.y_size() as usize,
domain,
)
}
pub fn from_slice(data: &[Float], n_u: usize, n_v: usize, domain: Bounds2f) -> Self {
assert_eq!(data.len(), n_u * n_v); assert_eq!(data.len(), n_u * n_v);
// Build conditional distributions p(u|v) for each row
let mut conditionals = Vec::with_capacity(n_v); let mut conditionals = Vec::with_capacity(n_v);
let mut marginal_func = Vec::with_capacity(n_v); let mut marginal_func = Vec::with_capacity(n_v);
for v in 0..n_v { for v in 0..n_v {
let row_start = v * n_u; let row_start = v * n_u;
let row: Vec<Float> = data[row_start..row_start + n_u].to_vec(); let row: Vec<Float> = data[row_start..row_start + n_u].to_vec();
let conditional =
let conditional = PiecewiseConstant1D::new_with_bounds(row, 0.0, 1.0); PiecewiseConstant1D::new_with_bounds(row, domain.p_min.x(), domain.p_max.x());
marginal_func.push(conditional.integral()); marginal_func.push(conditional.integral());
conditionals.push(conditional); conditionals.push(conditional);
} }
// Build marginal distribution p(v) let marginal =
let marginal = PiecewiseConstant1D::new_with_bounds(marginal_func, 0.0, 1.0); PiecewiseConstant1D::new_with_bounds(marginal_func, domain.p_min.y(), domain.p_max.y());
// Create array of device structs
let conditional_devices: Box<[DevicePiecewiseConstant1D]> = conditionals let conditional_devices: Box<[DevicePiecewiseConstant1D]> = conditionals
.iter() .iter()
.map(|c| c.device) .map(|c| c.device)
@ -189,7 +200,7 @@ impl PiecewiseConstant2D {
} }
} }
Self::new(&data, n_u, n_v) Self::from_slice(&data, n_u, n_v, Bounds2f::unit())
} }
pub fn integral(&self) -> Float { pub fn integral(&self) -> Float {