Fixing film creation, arena allocation separation from texture creation, bad function signatures

This commit is contained in:
pingu 2026-01-26 19:44:53 +00:00
parent 640e17110a
commit a32cd60e9f
35 changed files with 726 additions and 560 deletions

View file

@ -39,45 +39,6 @@ pub struct RGBPixel {
rgb_splat: [AtomicFloat; 3],
}
// #[cfg(not(target_os = "cuda"))]
// impl RGBFilm {
// pub fn new(
// base: FilmBase,
// colorspace: &RGBColorSpace,
// max_component_value: Float,
// write_fp16: bool,
// ) -> Self {
// let sensor_ptr = base.sensor;
// if sensor_ptr.is_null() {
// panic!("Film must have a sensor");
// }
// let sensor = unsafe { &*sensor_ptr };
// let filter_integral = base.filter.integral();
// let sensor_matrix = sensor.xyz_from_sensor_rgb;
// let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor_matrix;
//
// let width = base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x();
// let height = base.pixel_bounds.p_max.y() - base.pixel_bounds.p_min.y();
// let count = (width * height) as usize;
//
// let mut pixel_vec = Vec::with_capacity(count);
// for _ in 0..count {
// pixel_vec.push(RGBPixel::default());
// }
//
// let pixels_array = Array2D::(base.pixel_bounds);
//
// Self {
// base,
// max_component_value,
// write_fp16,
// filter_integral,
// output_rgbf_from_sensor_rgb,
// pixels: std::sync::Arc::new(pixels_array),
// }
// }
// }
//
impl RGBFilm {
pub fn base(&self) -> &FilmBase {
&self.base
@ -87,7 +48,7 @@ impl RGBFilm {
&mut self.base
}
pub fn get_sensor(&self) -> &PixelSensor {
pub fn get_sensor(&self) -> &DevicePixelSensor {
#[cfg(not(target_os = "cuda"))]
{
if self.base.sensor.is_null() {
@ -96,7 +57,7 @@ impl RGBFilm {
);
}
}
&*self.base.sensor
&self.base.sensor
}
pub fn add_sample(
@ -193,7 +154,7 @@ impl RGBFilm {
#[repr(C)]
#[derive(Debug, Default)]
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
struct GBufferPixel {
pub struct GBufferPixel {
pub rgb_sum: [AtomicFloat; 3],
pub weight_sum: AtomicFloat,
pub g_bugger_weight_sum: AtomicFloat,
@ -213,14 +174,14 @@ struct GBufferPixel {
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
pub struct GBufferFilm {
pub base: FilmBase,
output_from_render: AnimatedTransform,
apply_inverse: bool,
pixels: DeviceArray2D<GBufferPixel>,
colorspace: RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
filter_integral: Float,
output_rgbf_from_sensor_rgb: SquareMatrix<Float, 3>,
pub output_from_render: AnimatedTransform,
pub apply_inverse: bool,
pub pixels: DeviceArray2D<GBufferPixel>,
pub colorspace: RGBColorSpace,
pub max_component_value: Float,
pub write_fp16: bool,
pub filter_integral: Float,
pub output_rgbf_from_sensor_rgb: SquareMatrix<Float, 3>,
}
impl GBufferFilm {
@ -232,7 +193,7 @@ impl GBufferFilm {
&mut self.base
}
pub fn get_sensor(&self) -> &PixelSensor {
pub fn get_sensor(&self) -> &DevicePixelSensor {
#[cfg(not(target_os = "cuda"))]
{
if self.base.sensor.is_null() {
@ -241,7 +202,7 @@ impl GBufferFilm {
);
}
}
&*self.base.sensor
&self.base.sensor
}
pub fn add_sample(
@ -387,7 +348,7 @@ impl SpectralFilm {
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct PixelSensor {
pub struct DevicePixelSensor {
pub xyz_from_sensor_rgb: SquareMatrix<Float, 3>,
pub r_bar: DenselySampledSpectrum,
pub g_bar: DenselySampledSpectrum,
@ -395,7 +356,7 @@ pub struct PixelSensor {
pub imaging_ratio: Float,
}
impl PixelSensor {
impl DevicePixelSensor {
pub fn project_reflectance<T>(
refl: &Spectrum,
illum: &Spectrum,
@ -429,9 +390,7 @@ impl PixelSensor {
T::from([result[0], result[1], result[2]])
}
}
impl PixelSensor {
pub fn to_sensor_rgb(&self, l: SampledSpectrum, lambda: &SampledWavelengths) -> RGB {
let l_norm = SampledSpectrum::safe_div(&l, &lambda.pdf());
self.imaging_ratio
@ -477,7 +436,7 @@ pub struct FilmBase {
pub pixel_bounds: Bounds2i,
pub filter: Filter,
pub diagonal: Float,
pub sensor: Ptr<PixelSensor>,
pub sensor: Ptr<DevicePixelSensor>,
}
#[repr(C)]

View file

@ -250,6 +250,12 @@ where
}
}
impl Bounds2f {
pub fn unit() -> Self {
Self::from_points(Point2f::new(0.0, 0.0), Point2f::new(1.0, 1.0))
}
}
impl Bounds3f {
#[inline(always)]
pub fn intersect_p(

View file

@ -72,7 +72,7 @@ impl PixelFormat {
#[derive(Clone, Copy, Debug)]
pub enum Pixels {
U8(Ptr<u8>),
F16(Ptr<u16>),
F16(Ptr<f16>),
F32(Ptr<f32>),
}
@ -175,12 +175,12 @@ impl ImageAccess for DeviceImage {
unsafe {
match self.pixels {
Pixels::U8(ptr) => {
let raw_u8 = *ptr.add(offset as usize);
self.base().encoding.to_linear_scalar(raw_u8)
let raw_val = *ptr.add(offset as usize);
self.base().encoding.to_linear_scalar(raw_val)
}
Pixels::F16(ptr) => {
let half_bits: u16 = *ptr.add(offset as usize);
f16_to_f32(half_bits)
let raw_val = *ptr.add(offset as usize);
raw_val.to_f32()
}
Pixels::F32(ptr) => *ptr.add(offset as usize),
}

View file

@ -1,27 +1,27 @@
use crate::core::image::{Image, ImageChannelDesc, ImageChannelValues, ImageMetadata};
use crate::Arena;
use crate::core::image::{Image, ImageChannelDesc, ImageChannelValues, ImageIO, ImageMetadata};
use crate::films::*;
use crate::spectra::{SRGB, data::get_named_spectrum};
use anyhow::{Result, anyhow};
use rayon::prelude::IntoParallelIterator;
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::color::{RGB, XYZ, white_balance};
use shared::core::film::SpectralPixel;
use shared::core::filter::Filter;
use shared::core::film::{
DevicePixelSensor, Film, FilmBase, GBufferFilm, RGBFilm, SpectralFilm, SpectralPixel,
};
use shared::core::filter::{Filter, FilterTrait};
use shared::core::geometry::{Bounds2f, Bounds2i, Point2f, Point2i};
use shared::core::image::PixelFormat;
use shared::core::spectrum::{Spectrum, StandardSpectra};
use shared::film::{Film, FilmBase, GBufferFilm, PixelSensor, PixelSensor, RGBFilm, SpectralFilm};
use shared::spectra::cie::SWATCHES_RAW;
use shared::core::spectrum::Spectrum;
use shared::spectra::{
DenselySampledSpectrum, LAMBDA_MAX, LAMBDA_MIN, PiecewiseLinearSpectrum, RGBColorSpace,
DenselySampledSpectrum, PiecewiseLinearSpectrum, RGBColorSpace, cie::SWATCHES_RAW,
};
use shared::utils::containers::DeviceArray2D;
use shared::utils::math::{SquareMatrix, linear_least_squares};
use shared::utils::{AnimatedTransform, AtomicFloat};
use std::cmp::Ordering;
use std::path::Path;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, LazyLock};
use crate::spectra::DenselySampledSpectrumBuffer;
use crate::spectra::{DenselySampledSpectrumBuffer, get_spectra_context};
use crate::utils::{FileLoc, ParameterDictionary};
const N_SWATCH_REFLECTANCES: usize = 24;
@ -33,7 +33,7 @@ const SWATCH_REFLECTANCES: LazyLock<[Spectrum; N_SWATCH_REFLECTANCES]> = LazyLoc
})
});
pub trait PixelSensorHost {
pub trait PixelSensorTrait {
pub fn get_swatches() -> Arc<[Spectrum; N_SWATCH_REFLECTANCES]> {
Arc::new(SWATCH_REFLECTANCES)
}
@ -65,8 +65,8 @@ pub trait PixelSensorHost {
};
if sensor_name == "cie1931" {
return Ok(PixelSensor::new_with_white_balance(
output_colorspace,
return Ok(DevicePixelSensor::new_with_white_balance(
Some(output_colorspace),
sensor_illum,
imaging_ratio,
));
@ -86,12 +86,12 @@ pub trait PixelSensorHost {
let g = Arc::new(g_opt.unwrap());
let b = Arc::new(b_opt.unwrap());
return PixelSensor::new(
return DevicePixelSensor::new(
r,
g,
b,
output_colorspace.clone(),
sensor_illum,
Some(sensor_illum),
imaging_ratio,
)
.map_err(|e| e.to_string());
@ -103,11 +103,9 @@ pub trait PixelSensorHost {
g: Spectrum,
b: Spectrum,
output_colorspace: RGBColorSpace,
sensor_illum: &Spectrum,
sensor_illum: Option<&Spectrum>,
imaging_ratio: Float,
spectra: *const StandardSpectra,
swatches: &[Spectrum; 24],
) -> Self {
) -> DevicePixelSensor {
// As seen in usages of this constructos, sensor_illum can be null
// Going with the colorspace's own illuminant, but this might not be the right choice
// TODO: Test this
@ -119,11 +117,11 @@ pub trait PixelSensorHost {
let r_bar = DenselySampledSpectrum::from_spectrum(&r);
let g_bar = DenselySampledSpectrum::from_spectrum(&g);
let b_bar = DenselySampledSpectrum::from_spectrum(&b);
let mut rgb_camera = [[0.; 3]; Self::N_SWATCH_REFLECTANCES];
let mut rgb_camera = [[0.; 3]; N_SWATCH_REFLECTANCES];
let swatches = Self::get_swatches();
for i in 0..Self::N_SWATCH_REFLECTANCES {
for i in 0..N_SWATCH_REFLECTANCES {
let rgb = Self::project_reflectance::<RGB>(
&swatches[i],
illum,
@ -136,10 +134,11 @@ pub trait PixelSensorHost {
}
}
let mut xyz_output = [[0.; 3]; Self::N_SWATCH_REFLECTANCES];
let mut xyz_output = [[0.; 3]; N_SWATCH_REFLECTANCES];
let spectra = get_spectra_context();
let sensor_white_g = illum.inner_product(&Spectrum::Dense(g_bar.clone()));
let sensor_white_y = illum.inner_product(spectra.y);
for i in 0..Self::N_SWATCH_REFLECTANCES {
for i in 0..N_SWATCH_REFLECTANCES {
let s = swatches[i].clone();
let xyz = Self::project_reflectance::<XYZ>(
&s,
@ -153,23 +152,24 @@ pub trait PixelSensorHost {
}
}
let xyz_from_sensor_rgb = linear_least_squares(rgb_camera, xyz_output)?;
let xyz_from_sensor_rgb = linear_least_squares(rgb_camera, xyz_output)
.expect("Could not convert sensor illuminance to XYZ space");
Ok(Self {
DevicePixelSensor {
xyz_from_sensor_rgb,
r_bar,
g_bar,
b_bar,
imaging_ratio,
})
}
}
fn new_with_white_balance(
output_colorspace: &RGBColorSpace,
sensor_illum: &Spectrum,
sensor_illum: Option<&Spectrum>,
imaging_ratio: Float,
spectra: *const StandardSpectra,
) -> Self {
let spectra = get_spectra_context();
let r_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.x);
let g_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.y);
let b_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.z);
@ -193,150 +193,22 @@ pub trait PixelSensorHost {
}
}
impl PixelSensorHost for PixelSensor {}
impl PixelSensorTrait for DevicePixelSensor {}
struct SpectralFilmStorage {
pixels: DeviceArray2D<SpectralPixel>,
bucket_sums: Vec<f64>,
weight_sums: Vec<f64>,
bucket_splats: Vec<AtomicFloat>,
}
pub struct SpectralFilmHost {
pub view: SpectralFilm,
_storage: Box<SpectralFilmStorage>,
}
impl SpectralFilmHost {
pub fn new(
base: &FilmBase,
lambda_min: Float,
lambda_max: Float,
n_buckets: usize,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
let n_pixels = base.pixel_bounds.area() as usize;
let total_buckets = n_pixels * n_buckets;
let bucket_sums = vec![0.0; total_buckets];
let weight_sums = vec![0.0; total_buckets];
let mut bucket_splats = Vec::with_capacity(total_buckets);
for _ in 0..total_buckets {
bucket_splats.push(AtomicFloat::new(0.0));
}
let mut pixels = DeviceArray2D::<SpectralPixel>::new(base.pixel_bounds);
let p_sums_base = bucket_sums.as_ptr() as *mut f64;
let p_weights_base = weight_sums.as_ptr() as *mut f64;
let p_splats_base = bucket_splats.as_ptr() as *mut AtomicFloat;
for i in 0..n_pixels {
let pixel = pixels.get_linear_mut(i);
pixel.bucket_offset = i * n_buckets;
unsafe {
let offset = i * n_buckets;
pixel.bucket_sums = p_sums_base.add(offset);
pixel.weight_sums = p_weights_base.add(offset);
pixel.bucket_splats = p_splats_base.add(offset);
}
}
let storage = Box::new(SpectralFilmStorage {
pixels,
bucket_sums,
weight_sums,
bucket_splats,
});
let view = SpectralFilm {
base: base.clone(),
colorspace: colorspace.clone(),
lambda_min,
lambda_max,
n_buckets: n_buckets as u32,
max_component_value,
write_fp16,
filter_integral: base.filter.integral(),
output_rgbf_from_sensor_rgb: SquareMatrix::identity(), // Logic omitted
pixels: DeviceArray2D {
values: storage.pixels.as_mut_ptr(),
extent: base.pixel_bounds,
stride: base.pixel_bounds.max.x - base.pixel_bounds.min.x,
},
bucket_sums: storage.bucket_sums.as_ptr() as *mut f64,
weight_sums: storage.weight_sums.as_ptr() as *mut f64,
bucket_splats: storage.bucket_splats.as_ptr() as *mut AtomicFloat,
};
Self {
view,
_storage: storage,
}
}
}
pub struct GBufferFilmHost {
pub device: GBufferFilm,
}
impl GBufferFilmHost {
pub fn new(
base: &FilmBase,
output_from_render: &AnimatedTransform,
apply_inverse: bool,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
assert!(!base.pixel_bounds.is_empty());
let sensor_ptr = base.sensor;
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor.xyz_from_sensor_rgb;
let filter_integral = base.filter.integral();
let pixels = DeviceArray2D::new(base.pixel_bounds);
let device = GBufferFilm {
base: base.clone(),
output_from_render: output_from_render.clone(),
apply_inverse,
pixels,
colorspace: colorspace.clone(),
max_component_value,
write_fp16,
filter_integral,
output_rgbf_from_sensor_rgb,
};
Self { device }
}
}
pub trait FilmBaseHost {
pub trait CreateFilmBase {
fn create(
params: &ParameterDictionary,
filter: Filter,
sensor: Option<&PixelSensor>,
sensor: Option<&DevicePixelSensor>,
loc: &FileLoc,
) -> Self;
}
impl FilmBaseHost for FilmBase {
impl CreateFilmBase for FilmBase {
fn create(
params: &ParameterDictionary,
filter: Filter,
sensor: Option<&PixelSensor>,
sensor: Option<&DevicePixelSensor>,
loc: &FileLoc,
) -> Self {
let x_res = params.get_one_int("xresolution", 1280);
@ -395,7 +267,7 @@ impl FilmBaseHost for FilmBase {
pub trait FilmTrait: Sync {
fn base(&self) -> &FilmBase;
fn get_pixel_rgb(&self, p: Point2i, splat_scale: Option<Float>) -> RGB;
fn get_filename(&self) -> &str;
// fn get_filename(&self) -> &str;
fn write_image(&self, metadata: &ImageMetadata, splat_scale: Float) {
let image = self.get_image(metadata, splat_scale);
image
@ -495,13 +367,13 @@ impl FilmTrait for Film {
}
}
fn get_filename(&self) -> &str {
match self {
Film::RGB(f) => &f.filename,
Film::GBuffer(f) => &f.filename,
Film::Spectral(f) => &f.filename,
}
}
// fn get_filename(&self) -> &str {
// match self {
// Film::RGB(f) => &f.base().filename,
// Film::GBuffer(f) => &f.base().filename,
// Film::Spectral(f) => &f.base().filename,
// }
// }
}
pub trait FilmFactory {
@ -512,7 +384,8 @@ pub trait FilmFactory {
filter: Filter,
_camera_transform: Option<CameraTransform>,
loc: &FileLoc,
) -> Result<Self, String>;
arena: &mut Arena,
) -> Result<Self>;
}
impl FilmFactory for Film {
@ -523,104 +396,37 @@ impl FilmFactory for Film {
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
) -> Result<Self, String> {
arena: &mut Arena,
) -> Result<Self> {
match name {
"rgb" => {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value =
params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(sensor), loc);
Ok(RGBFilm::new(
film_base,
&colorspace,
max_component_value,
write_fp16,
))
}
"gbuffer" => {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value =
params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(format!(
"{}: EXR is the only format supported by GBufferFilm",
loc
)
.into());
}
let coords_system = params.get_one_string("coordinatesystem", "camera");
let mut apply_inverse = false;
let camera_transform = camera_transform
.ok_or_else(|| "GBufferFilm requires a camera_transform".to_string())?;
let output_from_render = if coords_system == "camera" {
apply_inverse = true;
camera_transform.render_from_camera
} else if coords_system == "world" {
AnimatedTransform::from_transform(&camera_transform.world_from_render)
} else {
return Err(format!(
"{}: unknown coordinate system for GBufferFilm. (Expecting camera
or world",
loc
)
.into());
};
Ok(GBufferFilm::new(
&film_base,
&output_from_render,
apply_inverse,
colorspace,
max_component_value,
write_fp16,
))
}
"spectral" => {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value =
params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(format!(
"{}: EXR is the only format supported by GBufferFilm",
loc
)
.into());
}
let n_buckets = params.get_one_int("nbuckets", 16) as usize;
let lambda_min = params.get_one_float("lambdamin", LAMBDA_MIN as Float);
let lambda_max = params.get_one_float("lambdamin", LAMBDA_MAX as Float);
if lambda_min < LAMBDA_MIN as Float && lambda_max > LAMBDA_MAX as Float {
return Err(format!(
"{}: PBRT must be recompiled with different values of LAMBDA_MIN and LAMBDA_MAX",
loc
));
}
Ok(SpectralFilm::new(
&film_base,
lambda_min,
lambda_max,
n_buckets,
colorspace,
max_component_value,
write_fp16,
))
}
_ => Err(format!("Film type '{}' unknown at {}", name, loc)),
"gbuffer" => GBufferFilm::create(
name,
params,
exposure_time,
filter,
camera_transform,
loc,
arena,
),
"rgb" => RGBFilm::create(
name,
params,
exposure_time,
filter,
camera_transform,
loc,
arena,
),
"spectral" => SpectralFilm::create(
name,
params,
exposure_time,
filter,
camera_transform,
loc,
arena,
),
_ => Err(anyhow!("Film type '{}' unknown at {}", name, loc)),
}
}
}

View file

@ -1,10 +1,10 @@
use crate::filters::*;
use crate::utils::sampling::PiecewiseConstant2D;
use crate::utils::{FileLoc, ParameterDictionary};
use shared::Float;
use shared::core::filter::{Filter, FilterSampler};
use shared::core::geometry::{Bounds2f, Point2f, Vector2f};
use shared::filters::*;
use shared::utils::containers::DeviceArray2D;
pub trait FilterFactory {
fn create(name: &str, params: &ParameterDictionary, loc: &FileLoc) -> Result<Filter, String>;
@ -71,7 +71,7 @@ impl CreateFilterSampler for FilterSampler {
let nx = (32.0 * radius.x()) as usize;
let ny = (32.0 * radius.y()) as usize;
let mut f = DeviceArray2D::new_with_dims(nx, ny);
let mut f = Array2D::new_with_dims(nx, ny);
for y in 0..f.y_size() {
for x in 0..f.x_size() {
let p = domain.lerp(Point2f::new(
@ -82,6 +82,10 @@ impl CreateFilterSampler for FilterSampler {
}
}
let distrib = PiecewiseConstant2D::new_with_bounds(&f, domain);
Self { domain, f, distrib }
Self {
domain,
f,
distrib: distrib.device,
}
}
}

View file

@ -1,14 +1,13 @@
use super::{Image, ImageAndMetadata, ImageMetadata};
use crate::core::image::{PixelStorage, WrapMode};
use crate::utils::error::ImageError;
use anyhow::Error;
use anyhow::{Context, Result, bail};
use exr::prelude::{read_first_rgba_layer_from_file, write_rgba_file};
use image_rs::{DynamicImage, ImageReader};
use shared::Float;
use shared::core::color::{ColorEncoding, LINEAR};
use shared::core::color::{ColorEncoding, LINEAR, SRGB};
use shared::core::geometry::Point2i;
use shared::core::image::{DeviceImage, ImageBase, PixelFormat};
use shared::core::image::{PixelFormat};
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::path::Path;
@ -38,7 +37,7 @@ impl ImageIO for Image {
}
}
fn write(&self, filename: &str, metadata: &ImageMetadata) -> Result<(), Error> {
fn write(&self, filename: &str, metadata: &ImageMetadata) -> Result<()> {
let path = Path::new(filename);
let ext = path.extension().and_then(|s| s.to_str()).unwrap_or("");
let res = match ext.to_lowercase().as_str() {
@ -48,12 +47,13 @@ impl ImageIO for Image {
"qoi" => self.write_qoi(path),
_ => Err(anyhow::anyhow!("Unsupported write format: {}", ext)),
};
res.map_err(|e| ImageError::Io(std::io::Error::other(e)))
res.map_err(|e| ImageError::Io(std::io::Error::other(e)))?;
Ok(())
}
fn write_png(&self, path: &Path) -> Result<()> {
let w = self.resolution.x() as u32;
let h = self.resolution.y() as u32;
let w = self.resolution().x() as u32;
let h = self.resolution().y() as u32;
// Convert whatever we have to u8 [0..255]
let data = self.to_u8_buffer();
@ -99,8 +99,8 @@ impl ImageIO for Image {
}
fn write_qoi(&self, path: &Path) -> Result<()> {
let w = self.resolution.x() as u32;
let h = self.resolution.y() as u32;
let w = self.resolution().x() as u32;
let h = self.resolution().y() as u32;
let data = self.to_u8_buffer();
let color_type = match self.n_channels() {
@ -122,8 +122,8 @@ impl ImageIO for Image {
fn write_exr(&self, path: &Path, _metadata: &ImageMetadata) -> Result<()> {
// EXR requires F32
let w = self.resolution.x() as usize;
let h = self.resolution.y() as usize;
let w = self.resolution().x() as usize;
let h = self.resolution().y() as usize;
let c = self.n_channels();
write_rgba_file(path, w, h, |x, y| {
@ -159,8 +159,9 @@ impl ImageIO for Image {
}
// Header
let res = self.resolution();
writeln!(writer, "PF")?;
writeln!(writer, "{} {}", self.resolution.x(), self.resolution.y())?;
writeln!(writer, "{} {}", res.x(), res.y())?;
let scale = if cfg!(target_endian = "little") {
-1.0
} else {
@ -169,8 +170,8 @@ impl ImageIO for Image {
writeln!(writer, "{}", scale)?;
// PBRT stores top-to-bottom.
for y in (0..self.resolution.y()).rev() {
for x in 0..self.resolution.x() {
for y in (0..res.y()).rev() {
for x in 0..res.x() {
for c in 0..3 {
let val =
self.get_channel_with_wrap(Point2i::new(x, y), c, WrapMode::Clamp.into());
@ -182,9 +183,11 @@ impl ImageIO for Image {
Ok(())
}
// TODO: Change Image to use Vec for data, always. Only convert to Device types on
// constructors/creation
fn to_u8_buffer(&self) -> Vec<u8> {
match &self.pixels {
PixelStorage::U8(data) => data.clone(),
PixelStorage::U8(data) => data,
PixelStorage::F16(data) => data
.iter()
.map(|v| (v.to_f32().clamp(0.0, 1.0) * 255.0 + 0.5) as u8)
@ -207,27 +210,20 @@ fn read_generic(path: &Path, encoding: Option<ColorEncoding>) -> Result<ImageAnd
let res = Point2i::new(w, h);
// Check if it was loaded as high precision or standard
let rgb_names = || vec!["R".to_string(), "G".to_string(), "B".to_string()];
let rgba_names = || {
vec![
"R".to_string(),
"G".to_string(),
"B".to_string(),
"A".to_string(),
]
};
let rgb_names = vec!["R", "G", "B"];
let rgba_names = vec!["R", "G", "B", "A"];
let image = match dyn_img {
DynamicImage::ImageRgb32F(buf) => Image::from_f32(buf.into_raw(), res, rgb_names()),
DynamicImage::ImageRgba32F(buf) => Image::from_f32(buf.into_raw(), res, rgba_names()),
DynamicImage::ImageRgb32F(buf) => Image::from_f32(buf.into_raw(), res, &rgb_names),
DynamicImage::ImageRgba32F(buf) => Image::from_f32(buf.into_raw(), res, &rgba_names),
_ => {
// Default to RGB8 for everything else
let enc = encoding.unwrap_or(ColorEncoding::sRGB);
let enc = encoding.unwrap_or(SRGB);
if dyn_img.color().has_alpha() {
let buf = dyn_img.to_rgba8();
Image::from_u8(buf.into_raw(), res, rgba_names(), enc)
Image::from_u8(buf.into_raw(), res, &rgba_names, enc)
} else {
let buf = dyn_img.to_rgb8();
Image::from_u8(buf.into_raw(), res, rgb_names(), enc)
Image::from_u8(buf.into_raw(), res, &rgb_names, enc)
}
}
};
@ -258,13 +254,11 @@ fn read_exr(path: &Path) -> Result<ImageAndMetadata> {
let w = image.layer_data.size.width() as i32;
let h = image.layer_data.size.height() as i32;
let image = Image {
format: PixelFormat::F32,
resolution: Point2i::new(w, h),
channel_names: vec!["R".into(), "G".into(), "B".into(), "A".into()],
encoding: LINEAR,
pixels: PixelStorage::F32(image.layer_data.channel_data.pixels),
};
let image = Image::from_f32(
image.layer_data.channel_data.pixels,
Point2i::new(w, h),
&vec!["R", "G", "B", "A"],
);
let metadata = ImageMetadata::default();
Ok(ImageAndMetadata { image, metadata })
@ -338,12 +332,12 @@ fn read_pfm(path: &Path) -> Result<ImageAndMetadata> {
}
let names = if channels == 1 {
vec!["Y".into()]
vec!["Y"]
} else {
vec!["R".into(), "G".into(), "B".into()]
vec!["R", "G", "B"]
};
let image = Image::new(PixelFormat::F32, Point2i::new(w, h), names, LINEAR);
let image = Image::new(PixelFormat::F32, Point2i::new(w, h), &names, LINEAR.into());
let metadata = ImageMetadata::default();
Ok(ImageAndMetadata { image, metadata })

View file

@ -1,7 +1,7 @@
use crate::utils::containers::Array2D;
use anyhow::{Result, anyhow};
use half::f16;
use rayon::prelude::ParallelIterator;
use rayon::prelude::{IndexedParallelIterator, ParallelIterator, ParallelSliceMut};
use shared::Float;
use shared::core::color::{ColorEncoding, ColorEncodingTrait, LINEAR};
use shared::core::geometry::{Bounds2f, Point2f, Point2i};
@ -78,7 +78,7 @@ impl PixelStorage {
pub fn as_pixels(&self) -> Pixels {
match self {
PixelStorage::U8(data) => Pixels::U8(data.as_ptr().into()),
PixelStorage::F16(data) => Pixels::F16((data.as_ptr() as *const u16).into()),
PixelStorage::F16(data) => Pixels::F16((data.as_ptr() as *const f16).into()),
PixelStorage::F32(data) => Pixels::F32(data.as_ptr().into()),
}
}
@ -145,7 +145,7 @@ impl Image {
Self {
storage,
channel_names: String::from(channel_names),
channel_names,
device,
}
}
@ -212,7 +212,6 @@ impl Image {
) -> Self {
let n_channels = channel_names.len();
let pixel_count = (resolution.x() * resolution.y()) as usize * n_channels;
let owned_names: Vec<String> = channel_names.iter().map(|s| s.to_string()).collect();
let storage = match format {
PixelFormat::U8 => PixelStorage::U8(vec![0; pixel_count].into()),
@ -220,7 +219,7 @@ impl Image {
PixelFormat::F32 => PixelStorage::F32(vec![0.0; pixel_count].into()),
};
Self::from_storage(storage, resolution, owned_names, encoding)
Self::from_storage(storage, resolution, channel_names, *encoding)
}
pub fn new_constant(
@ -245,9 +244,7 @@ impl Image {
data.extend_from_slice(values);
}
let owned_names: Vec<String> = channel_names.iter().map(|s| s.to_string()).collect();
Self::from_f32(data, resolution, owned_names)
Self::from_f32(data, resolution, channel_names)
}
// Access
@ -585,7 +582,7 @@ impl Image {
pub fn update_view_pointers(&mut self) {
self.device.pixels = match &self.storage {
PixelStorage::U8(vec) => Pixels::U8(vec.as_ptr().into()),
PixelStorage::F16(vec) => Pixels::F16((vec.as_ptr() as *const u16).into()),
PixelStorage::F16(vec) => Pixels::F16((vec.as_ptr() as *const f16).into()),
PixelStorage::F32(vec) => Pixels::F32(vec.as_ptr().into()),
};
}

View file

@ -1,10 +1,11 @@
use super::Image;
use crate::core::image::pixel::PixelStorage;
use crate::core::image::PixelStorage;
use crate::core::image::pixel::PixelStorageTrait;
use rayon::prelude::*;
use shared::Float;
use shared::core::color::ColorEncoding;
use shared::core::geometry::{Bounds2i, Point2i};
use shared::core::image::{PixelFormat, Pixels, WrapMode, WrapMode2D};
use shared::core::image::{PixelFormat, WrapMode, WrapMode2D};
use shared::utils::Ptr;
use shared::utils::math::windowed_sinc;
use std::sync::{Arc, Mutex};
@ -18,35 +19,41 @@ pub struct ResampleWeight {
impl Image {
pub fn flip_y(&mut self) {
let res = self.resolution();
let nc = self.n_channels();
let nc = self.n_channels() as usize;
match &mut self.pixels {
Pixels::U8(d) => flip_y_kernel(d, res, nc),
Pixels::F16(d) => flip_y_kernel(d, res, nc),
Pixels::F32(d) => flip_y_kernel(d, res, nc),
PixelStorage::U8(d) => flip_y_kernel(d, res, nc),
PixelStorage::F16(d) => flip_y_kernel(d, res, nc),
PixelStorage::F32(d) => flip_y_kernel(d, res, nc),
}
}
pub fn crop(&self, bounds: Bounds2i) -> Image {
let res = self.resolution();
let n_channels = self.n_channels();
let n_channels = self.n_channels() as usize;
let new_res = Point2i::new(
bounds.p_max.x() - bounds.p_min.x(),
bounds.p_max.y() - bounds.p_min.y(),
);
let mut new_image = Image::from_vector(
self.format,
let mut new_image = Image::new(
self.format(),
new_res,
self.channel_names.clone(),
self.encoding(),
&self.channel_names,
self.encoding().into(),
);
match (&self.pixels, &mut new_image.pixels) {
(Pixels::U8(src), Pixels::U8(dst)) => crop_kernel(src, dst, res, bounds, n_channels),
(Pixels::F16(src), Pixels::F16(dst)) => crop_kernel(src, dst, res, bounds, n_channels),
(Pixels::F32(src), Pixels::F32(dst)) => crop_kernel(src, dst, res, bounds, n_channels),
(PixelStorage::U8(src), PixelStorage::U8(dst)) => {
crop_kernel(src, dst, res, bounds, n_channels)
}
(PixelStorage::F16(src), PixelStorage::F16(dst)) => {
crop_kernel(src, dst, res, bounds, n_channels)
}
(PixelStorage::F32(src), PixelStorage::F32(dst)) => {
crop_kernel(src, dst, res, bounds, n_channels)
}
_ => panic!("Format mismatch in crop"),
}
@ -55,9 +62,9 @@ impl Image {
pub fn copy_rect_out(&self, extent: Bounds2i, buf: &mut [Float], wrap: WrapMode2D) {
match &self.pixels {
Pixels::U8(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
Pixels::F16(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
Pixels::F32(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
PixelStorage::U8(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
PixelStorage::F16(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
PixelStorage::F32(d) => copy_rect_out_kernel(d, self, extent, buf, wrap),
}
}
@ -67,9 +74,9 @@ impl Image {
let encoding = self.encoding();
match &mut self.pixels {
Pixels::U8(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
Pixels::F16(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
Pixels::F32(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
PixelStorage::U8(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
PixelStorage::F16(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
PixelStorage::F32(d) => copy_rect_in_kernel(d, res, n_channels, encoding, extent, buf),
}
}
@ -81,11 +88,11 @@ impl Image {
"ResizeUp requires Float format"
);
let resampled_image = Arc::new(Mutex::new(Image::from_vector(
PixelFormat::F32, // Force float output
let resampled_image = Arc::new(Mutex::new(Image::new(
PixelFormat::F32,
new_res,
self.channel_names.clone(),
self.encoding(),
&self.channel_names,
self.encoding().into(),
)));
let x_weights = resample_weights(res.x() as usize, new_res.x() as usize);
@ -104,7 +111,7 @@ impl Image {
let in_extent =
Bounds2i::from_points(Point2i::new(x_start, y_start), Point2i::new(x_end, y_end));
let mut in_buf = vec![0.0; in_extent.area() as usize * n_channels];
let mut in_buf = vec![0.0; in_extent.area() as usize * n_channels as usize];
self.copy_rect_out(in_extent, &mut in_buf, wrap_mode);
let out_buf = compute_resize_tile(
@ -143,14 +150,14 @@ impl Image {
let mut next = Image::new(
prev.format(),
new_res,
prev.channel_names.clone(),
prev.encoding(),
&prev.channel_names,
prev.encoding().into(),
);
match &mut next.pixels {
Pixels::U8(d) => downsample_kernel(d, new_res, prev, internal_wrap),
Pixels::F16(d) => downsample_kernel(d, new_res, prev, internal_wrap),
Pixels::F32(d) => downsample_kernel(d, new_res, prev, internal_wrap),
PixelStorage::U8(d) => downsample_kernel(d, new_res, prev, internal_wrap),
PixelStorage::F16(d) => downsample_kernel(d, new_res, prev, internal_wrap),
PixelStorage::F32(d) => downsample_kernel(d, new_res, prev, internal_wrap),
}
levels.push(next);
}
@ -158,7 +165,7 @@ impl Image {
}
}
fn flip_y_kernel<T: PixelStorage>(pixels: &mut [T], res: Point2i, channels: usize) {
fn flip_y_kernel<T: PixelStorageTrait>(pixels: &mut [T], res: Point2i, channels: usize) {
let w = res.x() as usize;
let h = res.y() as usize;
let stride = w * channels;
@ -171,7 +178,7 @@ fn flip_y_kernel<T: PixelStorage>(pixels: &mut [T], res: Point2i, channels: usiz
}
}
fn crop_kernel<T: PixelStorage>(
fn crop_kernel<T: PixelStorageTrait>(
src: &[T],
dst: &mut [T],
src_res: Point2i,
@ -194,7 +201,7 @@ fn crop_kernel<T: PixelStorage>(
});
}
fn copy_rect_out_kernel<T: PixelStorage>(
fn copy_rect_out_kernel<T: PixelStorageTrait>(
src: &[T],
image: &Image,
extent: Bounds2i,
@ -223,14 +230,15 @@ fn copy_rect_out_kernel<T: PixelStorage>(
// We fall back to get_channel which handles the wrapping math.
let p = Point2i::new(x, y);
for c in 0..channels {
row_buf[x_rel * channels + c] = image.get_channel_with_wrap(p, c, wrap);
row_buf[x_rel * channels + c] =
image.get_channel_with_wrap(p, c.try_into().unwrap(), wrap);
}
}
}
});
}
fn copy_rect_in_kernel<T: PixelStorage>(
fn copy_rect_in_kernel<T: PixelStorageTrait>(
dst: &mut [T],
res: Point2i,
channels: usize,
@ -263,7 +271,7 @@ fn copy_rect_in_kernel<T: PixelStorage>(
}
}
fn downsample_kernel<T: PixelStorage>(
fn downsample_kernel<T: PixelStorageTrait>(
dst: &mut Ptr<T>,
dst_res: Point2i,
prev: &Image,

View file

@ -3,12 +3,12 @@ use shared::Float;
use shared::core::color::{ColorEncoding, ColorEncodingTrait};
// Allows writing generic algorithms that work on any image format.
pub trait PixelStorage: Copy + Send + Sync + 'static + PartialEq {
pub trait PixelStorageTrait: Copy + Send + Sync + 'static + PartialEq {
fn from_linear(val: Float, encoding: ColorEncoding) -> Self;
fn to_linear(self, encoding: ColorEncoding) -> Float;
}
impl PixelStorage for f32 {
impl PixelStorageTrait for f32 {
#[inline(always)]
fn from_linear(val: Float, _enc: ColorEncoding) -> Self {
val
@ -19,7 +19,7 @@ impl PixelStorage for f32 {
}
}
impl PixelStorage for f16 {
impl PixelStorageTrait for f16 {
#[inline(always)]
fn from_linear(val: Float, _enc: ColorEncoding) -> Self {
f16::from_f32(val)
@ -30,7 +30,7 @@ impl PixelStorage for f16 {
}
}
impl PixelStorage for u8 {
impl PixelStorageTrait for u8 {
#[inline(always)]
fn from_linear(val: Float, enc: ColorEncoding) -> Self {
let mut out = [0u8];

View file

@ -23,7 +23,7 @@ pub trait MaterialFactory {
name: &str,
params: &TextureParameterDictionary,
normal_map: Option<Arc<Image>>,
named_materials: Arc<HashMap<String, Material>>,
named_materials: &HashMap<String, Material>,
loc: FileLoc,
arena: &mut Arena,
) -> Result<Self>;
@ -34,52 +34,48 @@ impl MaterialFactory for Material {
name: &str,
parameters: &TextureParameterDictionary,
normal_map: Option<Arc<Image>>,
named_materials: Arc<HashMap<String, Material>>,
named_materials: &HashMap<String, Material>,
loc: FileLoc,
arena: &mut Arena,
) -> Result<Material> {
match name {
"diffuse" => {
DiffuseMaterial::create(parameters, normal_map, &named_materials, &loc, arena)
DiffuseMaterial::create(parameters, normal_map, named_materials, &loc, arena)
}
"coateddiffuse" => {
CoatedDiffuseMaterial::create(parameters, normal_map, &named_materials, &loc, arena)
CoatedDiffuseMaterial::create(parameters, normal_map, named_materials, &loc, arena)
}
"coatedconductor" => CoatedConductorMaterial::create(
parameters,
normal_map,
&named_materials,
named_materials,
&loc,
arena,
),
"diffusetransmission" => DiffuseTransmissionMaterial::create(
parameters,
normal_map,
&named_materials,
named_materials,
&loc,
arena,
),
"dielectric" => {
DielectricMaterial::create(parameters, normal_map, &named_materials, &loc, arena)
DielectricMaterial::create(parameters, normal_map, named_materials, &loc, arena)
}
"thindielectric" => ThinDielectricMaterial::create(
parameters,
normal_map,
&named_materials,
&loc,
arena,
),
"hair" => HairMaterial::create(parameters, normal_map, &named_materials, &loc, arena),
"thindielectric" => {
ThinDielectricMaterial::create(parameters, normal_map, named_materials, &loc, arena)
}
"hair" => HairMaterial::create(parameters, normal_map, named_materials, &loc, arena),
"conductor" => {
ConductorMaterial::create(parameters, normal_map, &named_materials, &loc, arena)
ConductorMaterial::create(parameters, normal_map, named_materials, &loc, arena)
}
"measured" => {
MeasuredMaterial::create(parameters, normal_map, &named_materials, &loc, arena)
MeasuredMaterial::create(parameters, normal_map, named_materials, &loc, arena)
}
"subsurface" => {
SubsurfaceMaterial::create(parameters, normal_map, &named_materials, &loc, arena)
SubsurfaceMaterial::create(parameters, normal_map, named_materials, &loc, arena)
}
"mix" => MixMaterial::create(parameters, normal_map, &named_materials, &loc, arena),
"mix" => MixMaterial::create(parameters, normal_map, named_materials, &loc, arena),
_ => Err(anyhow!("Material type '{}' unknown at {}", $name, $loc)),
}

View file

@ -1,6 +1,7 @@
use super::BasicScene;
use super::entities::*;
use crate::spectra::get_colorspace_context;
use crate::Arena;
use crate::spectra::get_colorspace_device;
use crate::utils::error::FileLoc;
use crate::utils::normalize_utf8;
use crate::utils::parameters::error_exit;
@ -188,7 +189,7 @@ impl ParserTarget for BasicSceneBuilder {
}
fn color_space(&mut self, name: &str, loc: FileLoc) {
let stdcs = get_colorspace_context();
let stdcs = get_colorspace_device();
let _ = match stdcs.get_named(name) {
Ok(cs) => {
self.graphics_state.color_space = Some(cs);
@ -427,7 +428,7 @@ impl ParserTarget for BasicSceneBuilder {
})
}
fn world_begin(&mut self, loc: FileLoc) {
fn world_begin(&mut self, loc: FileLoc, arena: &mut Arena) {
self.verify_options("WorldBegin", &loc);
self.current_block = BlockState::WorldBlock;
for i in 0..MAX_TRANSFORMS {
@ -456,6 +457,7 @@ impl ParserTarget for BasicSceneBuilder {
self.current_accelerator
.take()
.expect("Accelerator not set before WorldBegin"),
arena,
);
}
@ -529,6 +531,7 @@ impl ParserTarget for BasicSceneBuilder {
tex_name: &str,
params: &ParsedParameterVector,
loc: FileLoc,
arena: &mut Arena,
) {
let name = normalize_utf8(orig_name);
self.verify_world("Texture", &loc);
@ -572,9 +575,11 @@ impl ParserTarget for BasicSceneBuilder {
};
if type_name == "float" {
self.scene.add_float_texture(name.to_string(), entity);
self.scene
.add_float_texture(name.to_string(), entity, arena);
} else {
self.scene.add_spectrum_texture(name.to_string(), entity);
self.scene
.add_spectrum_texture(name.to_string(), entity, arena);
}
}

View file

@ -1,6 +1,7 @@
use super::entities::*;
use super::state::*;
use crate::core::camera::CameraFactory;
use crate::core::film::FilmFactory;
use crate::core::filter::FilterFactory;
use crate::core::image::{Image, io::ImageIO};
use crate::core::material::MaterialFactory;
@ -8,12 +9,10 @@ use crate::core::primitive::{CreateGeometricPrimitive, CreateSimplePrimitive};
use crate::core::sampler::SamplerFactory;
use crate::core::shape::ShapeFactory;
use crate::core::texture::{FloatTexture, SpectrumTexture};
use crate::utils::arena;
use crate::utils::arena::Arena;
use crate::utils::error::FileLoc;
use crate::utils::parallel::{AsyncJob, run_async};
use crate::utils::parameters::{NamedTextures, ParameterDictionary, TextureParameterDictionary};
use crate::utils::{Upload, resolve_filename};
use crate::{Arena, FileLoc};
use parking_lot::Mutex;
use rayon::prelude::*;
use shared::core::camera::Camera;
@ -132,6 +131,7 @@ impl BasicScene {
filt.expect("Must have a filter"),
Some(camera.camera_transform.clone()),
&film.loc,
arena,
)
.expect("Must have a film"),
);
@ -239,6 +239,7 @@ impl BasicScene {
pub fn add_float_texture(&self, name: String, texture: TextureSceneEntity, arena: &mut Arena) {
let mut state = self.texture_state.lock();
let arena = arena.clone();
self.add_texture_generic(
name,
texture,
@ -404,15 +405,17 @@ impl BasicScene {
}
let normal_map = self.get_normal_map(&state, &entity.parameters);
let tex_dict =
TextureParameterDictionary::new(entity.parameters.into(), Some(*textures));
let tex_dict = TextureParameterDictionary::new(
Arc::new(entity.parameters.clone()),
Some(textures),
);
let mat = Material::create(
&mat_type,
&tex_dict,
normal_map,
named_materials.into(),
entity.loc,
&named_materials, // Reference for now
entity.loc.clone(),
arena,
)
.expect("Could not create material");
@ -434,7 +437,7 @@ impl BasicScene {
&entity.name,
&tex_dict,
normal_map,
&named_materials, // Reference
&named_materials,
entity.loc.clone(),
arena,
)
@ -447,7 +450,6 @@ impl BasicScene {
pub fn create_aggregate(
&self,
arena: &mut Arena,
textures: &NamedTextures,
named_materials: &HashMap<String, Material>,
materials: &Vec<Material>,
@ -496,11 +498,12 @@ impl BasicScene {
*sh.render_from_object.as_ref(),
*sh.object_from_render.as_ref(),
sh.reverse_orientation,
sh.base.parameters,
lookup.textures.float_textures,
sh.base.loc,
sh.base.parameters.clone(),
&lookup.textures.float_textures,
sh.base.loc.clone(),
arena,
)
.expect("Could not create shape")
})
.collect()
}
@ -509,7 +512,7 @@ impl BasicScene {
&self,
entities: &[AnimatedShapeSceneEntity],
lookup: &SceneLookup,
arena: &mut Arena,
arena: &Arena,
) -> Vec<Vec<Shape>> {
entities
.par_iter()
@ -519,23 +522,19 @@ impl BasicScene {
*sh.identity.as_ref(),
*sh.identity.as_ref(),
sh.reverse_orientation,
sh.transformed_base.base.parameters,
lookup.textures.float_textures,
sh.transformed_base.base.loc,
sh.transformed_base.base.parameters.clone(),
&lookup.textures.float_textures,
sh.transformed_base.base.loc.clone(),
arena,
)
.map_err(|e| {
log::error!("{}: Failed to create shape: {}", sh.base.loc, e);
e
})
.ok()
.expect("Could not create shape")
})
.collect()
}
fn upload_shapes(
&self,
arena: &mut Arena,
arena: &Arena,
entities: &[ShapeSceneEntity],
loaded: Vec<Vec<Shape>>,
lookup: &SceneLookup,

View file

@ -7,7 +7,7 @@ use shared::core::light::Light;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
#[derive(Debug)]
#[derive(Debug, Default)]
pub struct TextureState {
pub serial_float_textures: Vec<(String, TextureSceneEntity)>,
pub serial_spectrum_textures: Vec<(String, TextureSceneEntity)>,
@ -18,7 +18,7 @@ pub struct TextureState {
pub n_missing_textures: usize,
}
#[derive(Debug)]
#[derive(Debug, Default)]
pub struct MaterialState {
pub named_materials: Vec<(String, SceneEntity)>,
pub materials: Vec<SceneEntity>,
@ -26,13 +26,13 @@ pub struct MaterialState {
pub normal_maps: HashMap<String, Arc<Image>>,
}
#[derive(Debug)]
#[derive(Debug, Default)]
pub struct LightState {
pub light_jobs: Vec<AsyncJob<Light>>,
pub area_lights: Vec<SceneEntity>,
}
#[derive(Debug)]
#[derive(Debug, Default)]
pub struct MediaState {
pub jobs: HashMap<String, AsyncJob<Medium>>,
pub map: HashMap<String, Arc<Medium>>,

View file

@ -1,11 +1,10 @@
use crate::core::texture::FloatTexture;
use crate::shapes::{BilinearPatchMesh, TriangleMesh};
use crate::utils::{Arena, FileLoc, ParameterDictionary};
use anyhow::{Result, anyhow};
use parking_lot::Mutex;
use shared::core::shape::*;
use shared::shapes::*;
// use shared::spectra::*;
use anyhow::Result;
use parking_lot::Mutex;
use shared::utils::Transform;
use std::collections::HashMap;
use std::sync::Arc;
@ -19,9 +18,9 @@ pub trait CreateShape {
object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
float_textures: HashMap<String, Arc<FloatTexture>>,
float_textures: &HashMap<String, Arc<FloatTexture>>,
loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Vec<Shape>>;
}
@ -32,9 +31,9 @@ pub trait ShapeFactory {
object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
float_textures: HashMap<String, Arc<FloatTexture>>,
float_textures: &HashMap<String, Arc<FloatTexture>>,
loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Vec<Shape>>;
}
@ -45,9 +44,9 @@ impl ShapeFactory for Shape {
object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
float_textures: HashMap<String, Arc<FloatTexture>>,
float_textures: &HashMap<String, Arc<FloatTexture>>,
loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Vec<Shape>> {
match name {
"sphere" => SphereShape::create(

99
src/films/gbuffer.rs Normal file
View file

@ -0,0 +1,99 @@
use super::*;
use crate::core::film::{CreateFilmBase, PixelSensor, PixelSensorTrait};
use crate::utils::containers::Array2D;
use shared::core::film::{DevicePixelSensor, FilmBase, GBufferFilm};
use shared::core::filter::FilterTrait;
use shared::spectra::RGBColorSpace;
use shared::utils::AnimatedTransform;
use std::path::Path;
pub struct GBufferFilmHost {
pub device: GBufferFilm,
}
impl GBufferFilmHost {
pub fn new(
base: &FilmBase,
output_from_render: &AnimatedTransform,
apply_inverse: bool,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
assert!(!base.pixel_bounds.is_empty());
let sensor_ptr = base.sensor;
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor.xyz_from_sensor_rgb;
let filter_integral = base.filter.integral();
let pixels = Array2D::new(base.pixel_bounds);
let device = GBufferFilm {
base: base.clone(),
output_from_render: *output_from_render,
apply_inverse,
pixels: pixels.device,
colorspace: colorspace.clone(),
max_component_value,
write_fp16,
filter_integral,
output_rgbf_from_sensor_rgb,
};
Self { device }
}
}
impl CreateFilm for GBufferFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(&sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(format!("{}: EXR is the only format supported by GBufferFilm", loc).into());
}
let coords_system = params.get_one_string("coordinatesystem", "camera");
let mut apply_inverse = false;
let camera_transform = camera_transform
.ok_or_else(|| "GBufferFilm requires a camera_transform".to_string())?;
let output_from_render = if coords_system == "camera" {
apply_inverse = true;
camera_transform.render_from_camera
} else if coords_system == "world" {
AnimatedTransform::from_transform(&camera_transform.world_from_render)
} else {
return Err(anyhow!(
"{}: unknown coordinate system for GBufferFilm. (Expecting camera
or world",
loc
)
.into());
};
let film = GBufferFilmHost::new(
&film_base,
&output_from_render,
apply_inverse,
colorspace,
max_component_value,
write_fp16,
);
Ok(Film::GBuffer(film.device))
}
}

26
src/films/mod.rs Normal file
View file

@ -0,0 +1,26 @@
use crate::{Arena, FileLoc, ParameterDictionary};
use anyhow::Result;
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::film::Film;
use shared::core::filter::Filter;
pub mod gbuffer;
pub mod rgb;
pub mod spectral;
pub use gbuffer::*;
pub use rgb::*;
pub use spectral::*;
pub trait CreateFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Film>;
}

79
src/films/rgb.rs Normal file
View file

@ -0,0 +1,79 @@
use super::*;
use crate::Arena;
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use shared::core::camera::CameraTransform;
use shared::core::film::{DevicePixelSensor, Film, FilmBase, RGBFilm, RGBPixel};
use shared::core::filter::FilterTrait;
use shared::spectra::RGBColorSpace;
struct RGBFilmStorage {
pixels: Array2D<RGBPixel>,
}
pub struct RGBFilmHost {
pub device: RGBFilm,
storage: RGBFilmStorage,
}
impl RGBFilmHost {
pub fn new(
base: FilmBase,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
let sensor_ptr = base.sensor;
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let filter_integral = base.filter.integral();
let sensor_matrix = sensor.xyz_from_sensor_rgb;
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor_matrix;
let width = base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x();
let height = base.pixel_bounds.p_max.y() - base.pixel_bounds.p_min.y();
let count = (width * height) as usize;
let mut pixel_vec = Vec::with_capacity(count);
for _ in 0..count {
pixel_vec.push(RGBPixel::default());
}
let pixels: Array2D<RGBPixel> = Array2D::new(base.pixel_bounds);
let storage = RGBFilmStorage { pixels };
let device = RGBFilm {
base,
max_component_value,
write_fp16,
filter_integral,
output_rgbf_from_sensor_rgb,
pixels: std::sync::Arc::new(pixels_array),
};
Self { device, storage }
}
}
impl CreateFilm for RGBFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> anyhow::Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = DevicePixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(&sensor), loc);
let film = RGBFilmHost::new(film_base, &colorspace, max_component_value, write_fp16);
Ok(Film::RGB(film.device))
}
}

147
src/films/spectral.rs Normal file
View file

@ -0,0 +1,147 @@
use super::*;
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use crate::{Arena, FileLoc, ParameterDictionary};
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::film::{
CreateFilm, DevicePixelSensor, FilmBase, PixelSensork, SpectralFilm, SpectralPixel,
};
use shared::core::filter::FilterTrait;
use shared::spectra::{LAMBDA_MAX, LAMBDA_MIN, RGBColorSpace};
use shared::utils::AtomicFloat;
use shared::utils::Ptr;
use shared::utils::containers::DeviceArray2D;
use shared::utils::math::SquareMatrix;
use std::path::Path;
struct SpectralFilmStorage {
pixels: DeviceArray2D<SpectralPixel>,
bucket_sums: Vec<f64>,
weight_sums: Vec<f64>,
bucket_splats: Vec<AtomicFloat>,
}
pub struct SpectralFilmHost {
pub device: SpectralFilm,
storage: Box<SpectralFilmStorage>,
}
impl SpectralFilmHost {
pub fn new(
base: &FilmBase,
lambda_min: Float,
lambda_max: Float,
n_buckets: usize,
colorspace: &RGBColorSpace,
max_component_value: Float,
write_fp16: bool,
) -> Self {
let n_pixels = base.pixel_bounds.area() as usize;
let total_buckets = n_pixels * n_buckets;
let bucket_sums = vec![0.0; total_buckets];
let weight_sums = vec![0.0; total_buckets];
let mut bucket_splats = Vec::with_capacity(total_buckets);
for _ in 0..total_buckets {
bucket_splats.push(AtomicFloat::new(0.0));
}
let mut pixels = Array2D::<SpectralPixel>::new(base.pixel_bounds);
let p_sums_base = bucket_sums.as_ptr() as *mut f64;
let p_weights_base = weight_sums.as_ptr() as *mut f64;
let p_splats_base = bucket_splats.as_ptr() as *mut AtomicFloat;
for i in 0..n_pixels {
let pixel = pixels.get_linear_mut(i);
pixel.bucket_offset = i * n_buckets;
unsafe {
let offset = i * n_buckets;
pixel.bucket_sums = p_sums_base.add(offset);
pixel.weight_sums = p_weights_base.add(offset);
pixel.bucket_splats = p_splats_base.add(offset);
}
}
let storage = Box::new(SpectralFilmStorage {
pixels,
bucket_sums,
weight_sums,
bucket_splats,
});
let device = SpectralFilm {
base: *base,
colorspace: colorspace.clone(),
lambda_min,
lambda_max,
n_buckets,
max_component_value,
write_fp16,
filter_integral: base.filter.integral(),
output_rgbf_from_sensor_rgb: SquareMatrix::identity(),
pixels: DeviceArray2D {
values: Ptr::from(&storage.pixels),
extent: base.pixel_bounds,
stride: base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x(),
},
bucket_sums: storage.bucket_sums.as_ptr() as *mut f64,
weight_sums: storage.weight_sums.as_ptr() as *mut f64,
bucket_splats: storage.bucket_splats.as_ptr() as *mut AtomicFloat,
};
Self { device, storage };
}
}
impl CreateFilm for SpectralFilm {
fn create(
name: &str,
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);
let sensor = DevicePixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
let film_base = FilmBase::create(params, filter, Some(&sensor), loc);
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(anyhow!("{}: EXR is the only format supported by GBufferFilm", loc).into());
}
let n_buckets = params.get_one_int("nbuckets", 16) as usize;
let lambda_min = params.get_one_float("lambdamin", LAMBDA_MIN as Float);
let lambda_max = params.get_one_float("lambdamin", LAMBDA_MAX as Float);
if lambda_min < LAMBDA_MIN as Float && lambda_max > LAMBDA_MAX as Float {
return Err(anyhow!(
"{}: PBRT must be recompiled with different values of LAMBDA_MIN and LAMBDA_MAX",
loc
));
}
let film = SpectralFilmHost::new(
&film_base,
lambda_min,
lambda_max,
n_buckets,
colorspace,
max_component_value,
write_fp16,
);
Ok(Film::Spectral(film.device))
}
}

View file

@ -3,3 +3,9 @@ pub mod gaussian;
pub mod lanczos;
pub mod mitchell;
pub mod triangle;
pub use boxf::*;
pub use gaussian::*;
pub use lanczos::*;
pub use mitchell::*;
pub use triangle::*;

View file

@ -4,7 +4,7 @@ use shared::core::interaction::{Interaction, InteractionTrait};
use shared::core::light::{Light, LightTrait};
use shared::core::primitive::{Primitive, PrimitiveTrait};
use shared::core::shape::ShapeIntersection;
use shared::lights::LightSampler;
use shared::lights::sampler::LightSampler;
use shared::spectra::SampledWavelengths;
use shared::utils::sampling::power_heuristic;
use shared::{Float, SHADOW_EPSILON};

View file

@ -1,5 +1,6 @@
#[allow(dead_code)]
pub mod core;
pub mod films;
pub mod filters;
pub mod globals;
pub mod integrators;

View file

@ -15,12 +15,11 @@ use shared::core::medium::MediumInterface;
use shared::core::spectrum::Spectrum;
use shared::core::texture::SpectrumType;
use shared::lights::{ImageInfiniteLight, PortalInfiniteLight, UniformInfiniteLight};
use shared::spectra::RGBColorSpace;
use shared::spectra::{DenselySampledSpectrum, RGBColorSpace};
use shared::utils::math::{equal_area_sphere_to_square, equal_area_square_to_sphere};
use shared::utils::sampling::{DevicePiecewiseConstant2D, DeviceWindowedPiecewiseConstant2D};
use shared::utils::{Ptr, Transform};
use shared::{Float, PI};
use shareed::spectra::DenselySampledSpectrum;
use std::path::Path;
pub trait CreateImageInfiniteLight {
@ -219,7 +218,7 @@ fn create_image_light(
})
.collect();
let distrib = PiecewiseConstant2D::new(&data, n_u, n_v);
let distrib = PiecewiseConstant2D::from_slice(&data_u, n_u, n_v, Bounds2f::unit());
// Build compensated distribution
let average = data.iter().sum::<Float>() / data.len() as Float;
@ -231,7 +230,7 @@ fn create_image_light(
if all_zero {
data.fill(1.0);
}
let compensated_distrib = PiecewiseConstant2D::new(&data, n_u, n_v);
let compensated_distrib = PiecewiseConstant2D::from_slice(&data, n_u, n_v, Bounds2f::unit());
let light = ImageInfiniteLight::new(
render_from_light,

View file

@ -37,7 +37,7 @@ impl CreateProjectionLight for ProjectionLight {
render_from_light: Transform,
medium_interface: MediumInterface,
scale: Float,
image: Ptr<DeviceImage>,
image: Ptr<Image>,
image_color_space: Ptr<RGBColorSpace>,
fov: Float,
) -> Self {
@ -72,12 +72,16 @@ impl CreateProjectionLight for ProjectionLight {
};
let d = image.get_sampling_distribution(dwda, screen_bounds);
let distrib =
PiecewiseConstant2D::new(d.as_slice(), d.x_size() as usize, d.y_size() as usize);
let distrib = PiecewiseConstant2D::from_slice(
d.as_slice(),
d.x_size() as usize,
d.y_size() as usize,
screen_bounds,
);
Self {
base,
image,
image: Ptr::from(image.device_image()),
image_color_space,
distrib: Ptr::from(&distrib.device),
screen_bounds,

View file

@ -4,7 +4,7 @@ use crate::core::texture::FloatTexture;
use crate::shapes::mesh::BilinearPatchMesh;
use crate::utils::sampling::PiecewiseConstant2D;
use crate::utils::{Arena, FileLoc, ParameterDictionary};
use anyhow::Result;
use anyhow::{Result, anyhow};
use log::warn;
use shared::core::shape::Shape;
use shared::shapes::BilinearPatchShape;
@ -19,9 +19,9 @@ impl CreateShape for BilinearPatchShape {
_object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>,
_float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Vec<Shape>> {
let mut vertex_indices = parameters.get_int_array("indices");
let p = parameters.get_point3f_array("P");

View file

@ -62,9 +62,9 @@ impl CreateShape for CurveShape {
object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>,
_float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Vec<Shape>> {
let width = parameters.get_one_float("width", 1.0);
let width0 = parameters.get_one_float("width0", width);

View file

@ -14,9 +14,9 @@ impl CreateShape for CylinderShape {
object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>,
_float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Vec<Shape>> {
let radius = parameters.get_one_float("radius", 1.);
let z_min = parameters.get_one_float("zmin", -1.);

View file

@ -14,9 +14,9 @@ impl CreateShape for DiskShape {
object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>,
_float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Vec<Shape>> {
let height = parameters.get_one_float("height", 0.);
let radius = parameters.get_one_float("radius", 1.);

View file

@ -14,7 +14,7 @@ impl CreateShape for SphereShape {
object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
_float_textures: HashMap<String, Arc<FloatTexture>>,
_float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc,
arena: &mut Arena,
) -> Result<Vec<Shape>> {

View file

@ -16,9 +16,9 @@ impl CreateShape for TriangleShape {
_object_from_render: Transform,
reverse_orientation: bool,
parameters: ParameterDictionary,
_float_texture: HashMap<String, Arc<FloatTexture>>,
_float_texture: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Vec<Shape>> {
let mut vertex_indices = parameters.get_int_array("indices");
let p = parameters.get_point3f_array("P");

View file

@ -10,7 +10,7 @@ use shared::utils::ptr::Ptr;
#[derive(Clone, Debug)]
pub struct RGBColorSpaceData {
_illuminant: DenselySampledSpectrumBuffer,
illuminant: Arc<DenselySampledSpectrumBuffer>,
pub view: RGBColorSpace,
}
@ -26,8 +26,8 @@ impl RGBColorSpaceData {
r: Point2f,
g: Point2f,
b: Point2f,
illuminant: DenselySampledSpectrumBuffer,
rgb_to_spectrum_table: Ptr<RGBToSpectrumTable>,
illuminant: Arc<DenselySampledSpectrumBuffer>,
rgb_to_spectrum_table: Arc<RGBToSpectrumTable>,
) -> Self {
let stdspec = get_spectra_context();
let w_xyz: XYZ = Spectrum::Dense(illuminant.device()).to_xyz(&stdspec);
@ -56,8 +56,9 @@ impl RGBColorSpaceData {
rgb_from_xyz,
rgb_to_spectrum_table,
};
Self {
_illuminant: illuminant,
illuminant: illuminant.into(),
view,
}
}

View file

@ -17,10 +17,6 @@ pub mod piecewise;
pub use dense::DenselySampledSpectrumBuffer;
fn get_d65_illuminant_buffer() -> &DenselySampledSpectrumBuffer {
&CIE_D65_DATA
}
static CIE_X_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_X));
static CIE_Y_DATA: LazyLock<DenselySampledSpectrumBuffer> =
@ -30,6 +26,10 @@ static CIE_Z_DATA: LazyLock<DenselySampledSpectrumBuffer> =
static CIE_D65_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_D65));
fn get_d65_illuminant_buffer() -> Arc<DenselySampledSpectrumBuffer> {
Arc::from(*&CIE_D65_DATA)
}
pub fn cie_x() -> Spectrum {
Spectrum::Dense(CIE_X_DATA.device())
}

View file

@ -4,6 +4,7 @@ use crate::shapes::{BilinearPatchMesh, TriangleMesh};
use crate::spectra::DenselySampledSpectrumBuffer;
use crate::utils::mipmap::MIPMap;
use crate::utils::sampling::{PiecewiseConstant2D, WindowedPiecewiseConstant2D};
use parking_lot::Mutex;
use shared::core::color::RGBToSpectrumTable;
use shared::core::image::DeviceImage;
use shared::core::light::Light;
@ -23,6 +24,15 @@ use std::collections::HashMap;
use std::slice::from_raw_parts;
use std::sync::Arc;
pub struct Arena {
inner: Mutex<ArenaInner>,
}
struct ArenaInner {
buffer: Vec<(*mut u8, Layout)>,
texture_cache: HashMap<usize, u64>,
}
pub struct Arena {
buffer: Vec<(*mut u8, Layout)>,
texture_cache: HashMap<usize, u64>,
@ -31,14 +41,15 @@ pub struct Arena {
impl Arena {
pub fn new() -> Self {
Self {
buffer: Vec::new(),
texture_cache: HashMap::new(),
inner: Mutex::new(ArenaInner {
buffer: Vec::new(),
texture_cache: HashMap::new(),
}),
}
}
pub fn alloc<T>(&mut self, value: T) -> Ptr<T> {
pub fn alloc<T>(&self, value: T) -> Ptr<T> {
let layout = Layout::new::<T>();
let ptr = unsafe { self.alloc_unified(layout) } as *mut T;
unsafe {
@ -48,14 +59,14 @@ impl Arena {
Ptr::from_raw(ptr)
}
pub fn alloc_opt<T>(&mut self, value: Option<T>) -> Ptr<T> {
pub fn alloc_opt<T>(&self, value: Option<T>) -> Ptr<T> {
match value {
Some(v) => self.alloc(v),
None => Ptr::null(),
}
}
pub fn alloc_slice<T: Copy>(&mut self, values: &[T]) -> (Ptr<T>, usize) {
pub fn alloc_slice<T: Copy>(&self, values: &[T]) -> (Ptr<T>, usize) {
if values.is_empty() {
return (Ptr::null(), 0);
}
@ -71,7 +82,7 @@ impl Arena {
}
#[cfg(feature = "cuda")]
unsafe fn alloc_unified(&mut self, layout: Layout) -> *mut u8 {
unsafe fn alloc_unified(&self, layout: Layout) -> *mut u8 {
use cuda_runtime_sys::*;
let mut ptr: *mut std::ffi::c_void = std::ptr::null_mut();
@ -87,10 +98,18 @@ impl Arena {
ptr as *mut u8
}
pub fn get_texture_object(&mut self, mipmap: &Arc<MIPMap>) -> u64 {
let key = Arc::as_ptr(mipmap) as usize;
#[cfg(not(feature = "cuda"))]
unsafe fn alloc_unified(&self, layout: Layout) -> *mut u8 {
let ptr = unsafe { std::alloc::alloc(layout) };
self.buffer.push((ptr, layout));
ptr
}
if let Some(&tex_obj) = self.texture_cache.get(&key) {
pub fn get_texture_object(&self, mipmap: &Arc<MIPMap>) -> u64 {
let key = Arc::as_ptr(mipmap) as usize;
let mut inner = self.inner.lock();
if let Some(&tex_obj) = inner.texture_cache.get(&key) {
return tex_obj;
}
@ -115,13 +134,6 @@ impl Arena {
0
}
#[cfg(not(feature = "cuda"))]
unsafe fn alloc_unified(&mut self, layout: Layout) -> *mut u8 {
let ptr = unsafe { std::alloc::alloc(layout) };
self.buffer.push((ptr, layout));
ptr
}
// pub fn raw_data(&self) -> &[u8] {
// &self.buffer
// }
@ -129,6 +141,7 @@ impl Arena {
impl Drop for Arena {
fn drop(&mut self) {
let inner = self.inner.get_mut().unwrap();
for (ptr, layout) in self.buffer.drain(..) {
unsafe {
#[cfg(feature = "cuda")]
@ -144,57 +157,60 @@ impl Drop for Arena {
}
}
unsafe impl Send for Arena {}
unsafe impl Sync for Arena {}
pub trait Upload {
type Target: Copy;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target>;
fn upload(&self, arena: &Arena) -> Ptr<Self::Target>;
}
impl Upload for Shape {
type Target = Shape;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone())
}
}
impl Upload for Light {
type Target = Light;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone())
}
}
impl Upload for Image {
type Target = DeviceImage;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(*self.device_image())
}
}
impl Upload for Spectrum {
type Target = Spectrum;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone())
}
}
impl Upload for Material {
type Target = Material;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.clone())
}
}
impl Upload for DenselySampledSpectrumBuffer {
type Target = DenselySampledSpectrum;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.device())
}
}
impl Upload for SpectrumTexture {
type Target = GPUSpectrumTexture;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let gpu_variant = match self {
SpectrumTexture::Constant(tex) => GPUSpectrumTexture::Constant(tex.clone()),
SpectrumTexture::Checkerboard(tex) => GPUSpectrumTexture::Checkerboard(tex.clone()),
@ -265,7 +281,7 @@ impl Upload for SpectrumTexture {
impl Upload for FloatTexture {
type Target = GPUFloatTexture;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let gpu_variant = match self {
FloatTexture::Constant(tex) => GPUFloatTexture::Constant(tex.clone()),
FloatTexture::Checkerboard(tex) => GPUFloatTexture::Checkerboard(tex.clone()),
@ -327,7 +343,7 @@ impl Upload for FloatTexture {
impl Upload for RGBToSpectrumTable {
type Target = RGBToSpectrumTable;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let n_nodes = self.n_nodes as usize;
let z_slice = unsafe { from_raw_parts(self.z_nodes.as_raw(), n_nodes) };
let coeffs_slice = unsafe { from_raw_parts(self.coeffs.as_raw(), n_nodes) };
@ -347,7 +363,7 @@ impl Upload for RGBToSpectrumTable {
impl Upload for RGBColorSpace {
type Target = RGBColorSpace;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let table_ptr = self.rgb_to_spectrum_table.upload(arena);
let shared_space = RGBColorSpace {
@ -368,7 +384,7 @@ impl Upload for RGBColorSpace {
impl Upload for DeviceStandardColorSpaces {
type Target = DeviceStandardColorSpaces;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let srgb_ptr = self.srgb.upload(arena);
let dci_ptr = self.dci_p3.upload(arena);
let rec_ptr = self.rec2020.upload(arena);
@ -388,7 +404,7 @@ impl Upload for DeviceStandardColorSpaces {
impl Upload for PiecewiseConstant2D {
type Target = DevicePiecewiseConstant2D;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let marginal_shared = self.marginal.to_shared(arena);
let conditionals_shared: Vec<DevicePiecewiseConstant1D> = self
@ -411,7 +427,7 @@ impl Upload for PiecewiseConstant2D {
impl Upload for WindowedPiecewiseConstant2D {
type Target = DeviceWindowedPiecewiseConstant2D;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let specific = DeviceWindowedPiecewiseConstant2D {
sat: self.sat,
func: self.func,
@ -423,7 +439,7 @@ impl Upload for WindowedPiecewiseConstant2D {
impl Upload for TriangleMesh {
type Target = DeviceTriangleMesh;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let storage = &self.storage;
// Upload all arrays to arena
@ -471,7 +487,7 @@ impl Upload for TriangleMesh {
impl Upload for BilinearPatchMesh {
type Target = DeviceBilinearPatchMesh;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
let storage = &self.storage;
let (vertex_indices_ptr, _) = arena.alloc_slice(&storage.vertex_indices);
@ -506,7 +522,7 @@ impl Upload for BilinearPatchMesh {
impl<T: Upload> Upload for Option<T> {
type Target = T::Target;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
match self {
Some(val) => val.upload(arena),
None => Ptr::null(),
@ -517,7 +533,7 @@ impl<T: Upload> Upload for Option<T> {
impl<T: Upload> Upload for Arc<T> {
type Target = T::Target;
fn upload(&self, arena: &mut Arena) -> Ptr<Self::Target> {
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
(**self).upload(arena)
}
}

View file

@ -36,7 +36,7 @@ where
#[derive(Debug, Clone)]
pub struct Array2D<T> {
pub device: DeviceArray2D<T>,
values: Vec<T>,
pub values: Vec<T>,
}
impl<T> Deref for Array2D<T> {

View file

@ -6,6 +6,7 @@ use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use crate::Arena;
use crate::utils::error::FileLoc;
use crate::utils::parameters::{ParameterDictionary, ParsedParameter, ParsedParameterVector};
use shared::Float;
@ -49,7 +50,7 @@ pub trait ParserTarget {
fn medium_interface(&mut self, inside_name: &str, outside_name: &str, loc: FileLoc);
fn sampler(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn world_begin(&mut self, loc: FileLoc);
fn world_begin(&mut self, loc: FileLoc, arena: &mut Arena);
fn attribute_begin(&mut self, loc: FileLoc);
fn attribute_end(&mut self, loc: FileLoc);
fn attribute(&mut self, target: &str, params: ParsedParameterVector, loc: FileLoc);
@ -61,6 +62,7 @@ pub trait ParserTarget {
tex_name: &str,
params: &ParsedParameterVector,
loc: FileLoc,
arena: &mut Arena,
);
fn material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn make_named_material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
@ -457,7 +459,7 @@ impl ParserTarget for FormattingParserTarget {
println!("{}CoordSysTransform \"{}\"", self.indent(0), name);
}
fn world_begin(&mut self, _loc: FileLoc) {
fn world_begin(&mut self, _loc: FileLoc, _arena: &mut Arena) {
println!("{}WorldBegin", self.indent(0));
self.cat_indent_count += 4;
}
@ -497,6 +499,7 @@ impl ParserTarget for FormattingParserTarget {
tex_name: &str,
_params: &ParsedParameterVector,
_loc: FileLoc,
_arena: &mut Arena,
) {
println!(
"{}Texture \"{}\" \"{}\" \"{}\"",
@ -741,6 +744,7 @@ impl<'a> SceneParser<'a> {
}
pub fn run(&mut self) -> Result<(), ParserError> {
let mut arena = Arena::new();
loop {
let token = match self.next_token()? {
Some(t) => t,
@ -1009,7 +1013,7 @@ impl<'a> SceneParser<'a> {
let tex_name = self.expect_quoted_string()?;
let params = self.parse_parameters()?;
self.target
.texture(&name, &type_name, &tex_name, &params, token.loc);
.texture(&name, &type_name, &tex_name, &params, token.loc, arena);
}
_ => {
return Err(ParserError::Generic(
@ -1020,7 +1024,7 @@ impl<'a> SceneParser<'a> {
},
'W' => match token.text.as_str() {
"WorldBegin" => self.target.world_begin(token.loc),
"WorldBegin" => self.target.world_begin(token.loc, &mut arena),
"WorldEnd" => {}
_ => {
return Err(ParserError::Generic(

View file

@ -2,7 +2,7 @@ use crate::core::image::Image;
use crate::utils::Arena;
use crate::utils::containers::Array2D;
use shared::Float;
use shared::core::geometry::{Point2i, Vector2f, Vector2i};
use shared::core::geometry::{Bounds2f, Point2i, Vector2f, Vector2i};
use shared::utils::Ptr;
use shared::utils::sampling::{
AliasTable, Bin, DevicePiecewiseConstant1D, DevicePiecewiseConstant2D, DeviceSummedAreaTable,
@ -133,26 +133,37 @@ impl std::ops::Deref for PiecewiseConstant2D {
}
impl PiecewiseConstant2D {
pub fn new(data: &[Float], n_u: usize, n_v: usize) -> Self {
pub fn new(data: &Array2D<Float>) -> Self {
Self::new_with_bounds(data, Bounds2f::unit())
}
pub fn new_with_bounds(data: &Array2D<Float>, domain: Bounds2f) -> Self {
Self::from_slice(
data.as_slice(),
data.x_size() as usize,
data.y_size() as usize,
domain,
)
}
pub fn from_slice(data: &[Float], n_u: usize, n_v: usize, domain: Bounds2f) -> Self {
assert_eq!(data.len(), n_u * n_v);
// Build conditional distributions p(u|v) for each row
let mut conditionals = Vec::with_capacity(n_v);
let mut marginal_func = Vec::with_capacity(n_v);
for v in 0..n_v {
let row_start = v * n_u;
let row: Vec<Float> = data[row_start..row_start + n_u].to_vec();
let conditional = PiecewiseConstant1D::new_with_bounds(row, 0.0, 1.0);
let conditional =
PiecewiseConstant1D::new_with_bounds(row, domain.p_min.x(), domain.p_max.x());
marginal_func.push(conditional.integral());
conditionals.push(conditional);
}
// Build marginal distribution p(v)
let marginal = PiecewiseConstant1D::new_with_bounds(marginal_func, 0.0, 1.0);
let marginal =
PiecewiseConstant1D::new_with_bounds(marginal_func, domain.p_min.y(), domain.p_max.y());
// Create array of device structs
let conditional_devices: Box<[DevicePiecewiseConstant1D]> = conditionals
.iter()
.map(|c| c.device)
@ -189,7 +200,7 @@ impl PiecewiseConstant2D {
}
}
Self::new(&data, n_u, n_v)
Self::from_slice(&data, n_u, n_v, Bounds2f::unit())
}
pub fn integral(&self) -> Float {