Continuing fixing errors from thoughtless copying of shared code

This commit is contained in:
Wito Wiala 2026-02-13 14:16:22 +00:00
parent a32cd60e9f
commit 30e8cf85f8
38 changed files with 174 additions and 147 deletions

View file

@ -20,7 +20,7 @@ To get a local copy up and running, follow these simple steps.
1. **Clone the repository:**
```sh
git clone <your-repository-url>
git clone <repository>
cd pbrt
```
@ -43,6 +43,7 @@ This project relies on the following external crates:
* [**once_cell**](https://crates.io/crates/once_cell)
* [**rand**](https://crates.io/crates/rand)
* [**thiserror**](https://crates.io/crates/thiserror)
* TODO: Generate docs with cargo docs. There are a lot more crates.
## Help

View file

@ -6,4 +6,4 @@ mod spherical;
pub use orthographic::OrthographicCamera;
pub use perspective::PerspectiveCamera;
pub use realistic::RealisticCamera;
pub use spherical::{SphericalCamera, Mapping};
pub use spherical::{Mapping, SphericalCamera};

View file

@ -96,7 +96,7 @@ impl CameraTrait for OrthographicCamera {
p_camera,
Vector3f::new(0., 0., 1.),
Some(self.sample_time(sample.time)),
&*self.base().medium,
&self.base().medium,
);
if self.lens_radius > 0. {
let p_lens_vec =

View file

@ -21,7 +21,7 @@ use crate::utils::transform::AnimatedTransform;
use crate::utils::{AtomicFloat, Ptr};
#[repr(C)]
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct RGBFilm {
pub base: FilmBase,
pub max_component_value: Float,
@ -32,7 +32,7 @@ pub struct RGBFilm {
}
#[repr(C)]
#[derive(Debug)]
#[derive(Debug, Clone, Default)]
pub struct RGBPixel {
rgb_sum: [AtomicFloat; 3],
weight_sum: AtomicFloat,
@ -152,12 +152,12 @@ impl RGBFilm {
}
#[repr(C)]
#[derive(Debug, Default)]
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
#[derive(Debug, Default, Clone)]
#[cfg_attr(target_os = "cuda", derive(Copy))]
pub struct GBufferPixel {
pub rgb_sum: [AtomicFloat; 3],
pub weight_sum: AtomicFloat,
pub g_bugger_weight_sum: AtomicFloat,
pub g_buffer_weight_sum: AtomicFloat,
pub rgb_splat: [AtomicFloat; 3],
pub p_sum: Point3f,
pub dz_dx_sum: AtomicFloat,
@ -170,8 +170,8 @@ pub struct GBufferPixel {
}
#[repr(C)]
#[derive(Debug)]
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
#[derive(Debug, Clone)]
#[cfg_attr(target_os = "cuda", derive(Copy))]
pub struct GBufferFilm {
pub base: FilmBase,
pub output_from_render: AnimatedTransform,
@ -206,7 +206,7 @@ impl GBufferFilm {
}
pub fn add_sample(
&mut self,
&self,
_p_film: Point2i,
_l: SampledSpectrum,
_lambda: &SampledWavelengths,
@ -286,15 +286,37 @@ impl GBufferFilm {
}
#[repr(C)]
#[derive(Debug, Default)]
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
#[derive(Debug)]
#[cfg_attr(target_os = "cuda", derive(Copy))]
pub struct SpectralPixel {
pub rgb_sum: [AtomicFloat; 3],
pub rgb_weigh_sum: AtomicFloat,
pub rgb_weight_sum: AtomicFloat,
pub rgb_splat: [AtomicFloat; 3],
pub bucket_offset: usize,
}
impl Clone for SpectralPixel {
fn clone(&self) -> Self {
Self {
rgb_sum: std::array::from_fn(|i| AtomicFloat::new(self.rgb_sum[i].get())),
rgb_weight_sum: AtomicFloat::new(self.rgb_weight_sum.get()),
rgb_splat: std::array::from_fn(|i| AtomicFloat::new(self.rgb_splat[i].get())),
bucket_offset: self.bucket_offset,
}
}
}
impl Default for SpectralPixel {
fn default() -> Self {
Self {
rgb_sum: std::array::from_fn(|_| AtomicFloat::new(0.0)),
rgb_weight_sum: AtomicFloat::new(0.0),
rgb_splat: std::array::from_fn(|_| AtomicFloat::new(0.0)),
bucket_offset: 0,
}
}
}
#[repr(C)]
#[derive(Debug)]
#[cfg_attr(target_os = "cuda", derive(Copy, Clone))]
@ -331,7 +353,7 @@ impl SpectralFilm {
}
pub fn add_sample(
&mut self,
&self,
_p_film: Point2i,
_l: SampledSpectrum,
_lambda: &SampledWavelengths,
@ -469,7 +491,7 @@ impl Film {
}
pub fn add_sample(
&mut self,
&self,
p_film: Point2i,
l: SampledSpectrum,
lambda: &SampledWavelengths,

View file

@ -59,11 +59,23 @@ where
}
#[repr(C)]
#[derive(Debug, Default)]
#[derive(Debug)]
pub struct AtomicFloat {
bits: AtomicU32,
}
impl Default for AtomicFloat {
fn default() -> Self {
Self::new(0.)
}
}
impl Clone for AtomicFloat {
fn clone(&self) -> Self {
Self::new(self.get())
}
}
impl AtomicFloat {
pub fn new(val: f32) -> Self {
Self {

View file

@ -3,6 +3,7 @@ use crate::core::image::{Image, ImageChannelDesc, ImageChannelValues, ImageIO, I
use crate::films::*;
use crate::spectra::{SRGB, data::get_named_spectrum};
use anyhow::{Result, anyhow};
use rayon::iter::ParallelIterator;
use rayon::prelude::IntoParallelIterator;
use shared::Float;
use shared::core::camera::CameraTransform;
@ -14,6 +15,7 @@ use shared::core::filter::{Filter, FilterTrait};
use shared::core::geometry::{Bounds2f, Bounds2i, Point2f, Point2i};
use shared::core::image::PixelFormat;
use shared::core::spectrum::Spectrum;
use shared::spectra::cie::CIE_Y;
use shared::spectra::{
DenselySampledSpectrum, PiecewiseLinearSpectrum, RGBColorSpace, cie::SWATCHES_RAW,
};
@ -21,7 +23,9 @@ use shared::utils::math::{SquareMatrix, linear_least_squares};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, LazyLock};
use crate::spectra::{DenselySampledSpectrumBuffer, get_spectra_context};
use crate::spectra::{
CIE_X_DATA, CIE_Y_DATA, CIE_Z_DATA, DenselySampledSpectrumBuffer, get_spectra_context,
};
use crate::utils::{FileLoc, ParameterDictionary};
const N_SWATCH_REFLECTANCES: usize = 24;
@ -43,7 +47,7 @@ pub trait PixelSensorTrait {
output_colorspace: Arc<RGBColorSpace>,
exposure_time: Float,
loc: &FileLoc,
) -> Result<Self, String> {
) -> Result<Self> {
let iso = params.get_one_float("iso", 100.);
let mut white_balance_temp = params.get_one_float("whitebalance", 0.);
let sensor_name = params.get_one_string("sensor", "cie1931");
@ -75,11 +79,11 @@ pub trait PixelSensorTrait {
let g_opt = get_named_spectrum(&format!("{}_g", sensor_name));
let b_opt = get_named_spectrum(&format!("{}_b", sensor_name));
if r_opt.is_none() || g_opt.is_none() || b_opt.is_none() {
return Err(format!(
return anyhow!(
"{}: unknown sensor type '{}' (missing RGB spectral data)",
loc, sensor_name
)
.into());
loc,
sensor_name
);
}
let r = Arc::new(r_opt.unwrap());
@ -114,9 +118,9 @@ pub trait PixelSensorTrait {
None => &output_colorspace.illuminant,
};
let r_bar = DenselySampledSpectrum::from_spectrum(&r);
let g_bar = DenselySampledSpectrum::from_spectrum(&g);
let b_bar = DenselySampledSpectrum::from_spectrum(&b);
let r_bar = DenselySampledSpectrumBuffer::from_spectrum(&r).device();
let g_bar = DenselySampledSpectrumBuffer::from_spectrum(&g).device();
let b_bar = DenselySampledSpectrumBuffer::from_spectrum(&b).device();
let mut rgb_camera = [[0.; 3]; N_SWATCH_REFLECTANCES];
let swatches = Self::get_swatches();
@ -125,9 +129,9 @@ pub trait PixelSensorTrait {
let rgb = Self::project_reflectance::<RGB>(
&swatches[i],
illum,
&Spectrum::Dense(r_bar.clone()),
&Spectrum::Dense(g_bar.clone()),
&Spectrum::Dense(b_bar.clone()),
&Spectrum::Dense(r_bar),
&Spectrum::Dense(g_bar),
&Spectrum::Dense(b_bar),
);
for c in 0..3 {
rgb_camera[i][c] = rgb[c];
@ -170,13 +174,13 @@ pub trait PixelSensorTrait {
imaging_ratio: Float,
) -> Self {
let spectra = get_spectra_context();
let r_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.x);
let g_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.y);
let b_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.z);
let r_bar = CIE_X_DATA.clone();
let g_bar = CIE_Y_DATA.clone();
let b_bar = CIE_Z_DATA.clone();
let xyz_from_sensor_rgb: SquareMatrix<Float, 3>;
if let Some(illum) = sensor_illum {
let source_white = illum.to_xyz(spectra).xy();
let source_white = illum.to_xyz(&spectra).xy();
let target_white = output_colorspace.w;
xyz_from_sensor_rgb = white_balance(source_white, target_white);
} else {

View file

@ -102,8 +102,8 @@ impl PixelStorage {
#[derive(Debug, Clone)]
pub struct Image {
storage: PixelStorage,
channel_names: Vec<String>,
pub pixels: PixelStorage,
pub channel_names: Vec<String>,
pub device: DeviceImage,
}
@ -144,7 +144,7 @@ impl Image {
};
Self {
storage,
pixels: storage,
channel_names,
device,
}
@ -248,7 +248,7 @@ impl Image {
}
// Access
pub fn device_image(&self) -> &DeviceImage {
pub fn device(&self) -> &DeviceImage {
&self.device
}
@ -290,7 +290,7 @@ impl Image {
let offset = self.pixel_offset(p) + c as usize;
match &self.storage {
match &self.pixels {
PixelStorage::U8(data) => self.device.base.encoding.to_linear_scalar(data[offset]),
PixelStorage::F16(data) => data[offset].to_f32(),
PixelStorage::F32(data) => data[offset],
@ -314,7 +314,7 @@ impl Image {
let nc = self.n_channels() as usize;
let mut values = SmallVec::with_capacity(nc);
match &self.storage {
match &self.pixels {
PixelStorage::U8(data) => {
for i in 0..nc {
values.push(self.device.base.encoding.to_linear_scalar(data[offset + i]));
@ -348,17 +348,14 @@ impl Image {
let offset = self.pixel_offset(p) + c as usize;
match &mut self.storage {
match &mut self.pixels {
PixelStorage::U8(data) => {
let data = Box::as_mut(data);
data[offset] = self.device.base.encoding.from_linear_scalar(value);
}
PixelStorage::F16(data) => {
let data = Box::as_mut(data);
data[offset] = f16::from_f32(value);
}
PixelStorage::F32(data) => {
let data = Box::as_mut(data);
data[offset] = value;
}
}
@ -379,7 +376,7 @@ impl Image {
let pixel_offset = self.pixel_offset(pp);
let mut values = SmallVec::with_capacity(desc.offset.len());
match &self.storage {
match &self.pixels {
PixelStorage::U8(data) => {
for &c in &desc.offset {
let raw = data[pixel_offset + c];
@ -410,7 +407,7 @@ impl Image {
pub fn get_channel_desc(
&self,
requested_channels: &[impl AsRef<str>],
requested_channels: &[impl AsRef<str> + std::fmt::Display],
) -> Result<ImageChannelDesc> {
let mut offset = Vec::with_capacity(requested_channels.len());
@ -450,7 +447,7 @@ impl Image {
let src_nc = self.n_channels() as usize;
let dst_nc = desc.offset.len();
let new_storage = match &self.storage {
let new_storage = match &self.pixels {
PixelStorage::U8(src) => {
let mut dst = vec![0u8; pixel_count * dst_nc];
for i in 0..pixel_count {
@ -580,7 +577,7 @@ impl Image {
}
pub fn update_view_pointers(&mut self) {
self.device.pixels = match &self.storage {
self.device.pixels = match &self.pixels {
PixelStorage::U8(vec) => Pixels::U8(vec.as_ptr().into()),
PixelStorage::F16(vec) => Pixels::F16((vec.as_ptr() as *const f16).into()),
PixelStorage::F32(vec) => Pixels::F32(vec.as_ptr().into()),

View file

@ -53,7 +53,7 @@ impl RGBGridMediumCreator for RGBGridMedium {
le_grid: SampledGrid<RGBIlluminantSpectrum>,
le_scale: Float,
) -> Self {
let majorant_grid = MajorantGrid::new(*bounds, Point3i::new(16, 16, 16));
let mut majorant_grid = MajorantGridHost::new(*bounds, Point3i::new(16, 16, 16)).device;
for z in 0..majorant_grid.res.x() {
for y in 0..majorant_grid.res.y() {
for x in 0..majorant_grid.res.x() {
@ -117,7 +117,7 @@ impl GridMediumCreator for GridMedium {
let le_spec = DenselySampledSpectrumBuffer::from_spectrum(le);
let mut majorant_grid = MajorantGrid::new(*bounds, Point3i::new(16, 16, 16));
let mut majorant_grid = MajorantGridHost::new(*bounds, Point3i::new(16, 16, 16)).device;
let is_emissive = if temperature_grid.is_some() {
true
} else {

View file

@ -12,7 +12,8 @@ use shared::core::camera::CameraTransform;
use shared::core::geometry::Vector3f;
use shared::core::options::RenderingCoordinateSystem;
use shared::spectra::RGBColorSpace;
use shared::utils::transform::{AnimatedTransform, Transform, look_at};
use shared::utils::transform;
use shared::utils::transform::{self, AnimatedTransform, Transform};
use std::collections::{HashMap, HashSet};
use std::ops::{Index, IndexMut};
use std::sync::Arc;
@ -192,7 +193,7 @@ impl ParserTarget for BasicSceneBuilder {
let stdcs = get_colorspace_device();
let _ = match stdcs.get_named(name) {
Ok(cs) => {
self.graphics_state.color_space = Some(cs);
self.graphics_state.color_space = unsafe { Some(Arc::new(*cs.as_ref())) };
}
Err(_) => {
eprintln!("Error: Color space '{}' unknown at {}", name, loc);
@ -232,7 +233,7 @@ impl ParserTarget for BasicSceneBuilder {
uz: Float,
loc: FileLoc,
) {
let result = look_at((ex, ey, ez), (lx, ly, lz), (ux, uy, uz));
let result = transform::look_at((ex, ey, ez), (lx, ly, lz), (ux, uy, uz));
match result {
Ok(t) => {
self.for_active_transforms(|cur| cur * &t);
@ -531,7 +532,7 @@ impl ParserTarget for BasicSceneBuilder {
tex_name: &str,
params: &ParsedParameterVector,
loc: FileLoc,
arena: &mut Arena,
arena: Arc<Arena>,
) {
let name = normalize_utf8(orig_name);
self.verify_world("Texture", &loc);
@ -588,9 +589,9 @@ impl ParserTarget for BasicSceneBuilder {
let entity = SceneEntity {
name: name.to_string(),
loc,
parameters: ParameterDictionary::new(*params, None),
parameters: ParameterDictionary::new(params.clone(), None),
};
self.graphics_state.current_material_name = self.scene.add_material(entity);
self.graphics_state.current_material_name = self.scene.add_material(entity).to_string();
}
fn make_named_material(&mut self, _name: &str, _params: &ParsedParameterVector, _loc: FileLoc) {
todo!()

View file

@ -189,7 +189,7 @@ impl BasicScene {
get_jobs: impl FnOnce(&mut TextureState) -> &mut HashMap<String, AsyncJob<Arc<T>>>,
create_fn: F,
) where
T: Send + 'static,
T: Send + Sync + 'static,
F: FnOnce(TextureSceneEntity) -> T + Send + 'static,
{
if texture.render_from_object.is_animated() {
@ -237,16 +237,15 @@ impl BasicScene {
true
}
pub fn add_float_texture(&self, name: String, texture: TextureSceneEntity, arena: &mut Arena) {
pub fn add_float_texture(&self, name: String, texture: TextureSceneEntity, arena: Arc<Arena>) {
let mut state = self.texture_state.lock();
let arena = arena.clone();
self.add_texture_generic(
name,
texture,
&mut state,
|s| &mut s.serial_float_textures,
|s| &mut s.float_texture_jobs,
|tex| {
move |tex| {
let render_from_texture = tex.render_from_object.start_transform;
let tex_dict = TextureParameterDictionary::new(tex.base.parameters.into(), None);
FloatTexture::create(
@ -254,7 +253,7 @@ impl BasicScene {
render_from_texture,
tex_dict,
tex.base.loc,
arena,
&arena,
)
.expect("Could not create Float texture")
},
@ -265,7 +264,7 @@ impl BasicScene {
&self,
name: String,
texture: TextureSceneEntity,
arena: &mut Arena,
arena: Arc<Arena>,
) {
let mut state = self.texture_state.lock();
self.add_texture_generic(
@ -274,7 +273,7 @@ impl BasicScene {
&mut state,
|s| &mut s.serial_spectrum_textures,
|s| &mut s.spectrum_texture_jobs,
|tex| {
move |tex| {
let render_from_texture = tex.render_from_object.start_transform;
let tex_dict = TextureParameterDictionary::new(tex.base.parameters.into(), None);
SpectrumTexture::create(
@ -283,7 +282,7 @@ impl BasicScene {
tex_dict,
SpectrumType::Albedo,
tex.base.loc,
arena,
&arena,
)
.expect("Could not create spectrum texture")
},
@ -378,8 +377,8 @@ impl BasicScene {
) -> (HashMap<String, Material>, Vec<Material>) {
let mut state = self.material_state.lock();
// Resolve normal map jobs
for (filename, job) in state.normal_map_jobs.drain() {
let finished: Vec<_> = state.normal_map_jobs.drain().collect();
for (filename, job) in finished {
state.normal_maps.insert(filename, job.wait());
}

View file

@ -56,7 +56,7 @@ pub trait CreateFloatTexture {
render_from_texture: Transform,
params: TextureParameterDictionary,
loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<FloatTexture>;
}
@ -66,7 +66,7 @@ impl FloatTexture {
render_from_texture: Transform,
params: TextureParameterDictionary,
loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Self> {
match name {
"constant" => FloatConstantTexture::create(render_from_texture, params, loc, arena),
@ -122,7 +122,7 @@ impl SpectrumTexture {
params: TextureParameterDictionary,
spectrum_type: SpectrumType,
loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<Self> {
match name {
"constant" => {

View file

@ -1,6 +1,7 @@
use super::*;
use crate::core::film::{CreateFilmBase, PixelSensor, PixelSensorTrait};
use crate::core::film::{CreateFilmBase, PixelSensor};
use crate::utils::containers::Array2D;
use anyhow::{Result, anyhow};
use shared::core::film::{DevicePixelSensor, FilmBase, GBufferFilm};
use shared::core::filter::FilterTrait;
use shared::spectra::RGBColorSpace;
@ -25,7 +26,7 @@ impl GBufferFilmHost {
if sensor_ptr.is_null() {
panic!("Film must have a sensor");
}
let sensor = unsafe { &*sensor_ptr };
let sensor = &*sensor_ptr;
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor.xyz_from_sensor_rgb;
let filter_integral = base.filter.integral();
let pixels = Array2D::new(base.pixel_bounds);
@ -64,13 +65,13 @@ impl CreateFilm for GBufferFilm {
let filename = params.get_one_string("filename", "pbrt.exr");
if Path::new(&filename).extension() != Some("exr".as_ref()) {
return Err(format!("{}: EXR is the only format supported by GBufferFilm", loc).into());
return Err(anyhow!("{}: EXR is the only format supported by GBufferFilm", loc).into());
}
let coords_system = params.get_one_string("coordinatesystem", "camera");
let mut apply_inverse = false;
let camera_transform = camera_transform
.ok_or_else(|| "GBufferFilm requires a camera_transform".to_string())?;
let camera_transform =
camera_transform.ok_or_else(|| anyhow!("GBufferFilm requires a camera_transform"))?;
let output_from_render = if coords_system == "camera" {
apply_inverse = true;
camera_transform.render_from_camera

View file

@ -2,6 +2,7 @@ use super::*;
use crate::Arena;
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use anyhow::Result;
use shared::core::camera::CameraTransform;
use shared::core::film::{DevicePixelSensor, Film, FilmBase, RGBFilm, RGBPixel};
use shared::core::filter::FilterTrait;
@ -64,10 +65,10 @@ impl CreateFilm for RGBFilm {
params: &ParameterDictionary,
exposure_time: Float,
filter: Filter,
camera_transform: Option<CameraTransform>,
_camera_transform: Option<CameraTransform>,
loc: &FileLoc,
arena: &mut Arena,
) -> anyhow::Result<Film> {
_ arena: &Arena,
) -> Result<Film> {
let colorspace = params.color_space.as_ref().unwrap();
let max_component_value = params.get_one_float("maxcomponentvalue", Float::INFINITY);
let write_fp16 = params.get_one_bool("savefp16", true);

View file

@ -2,6 +2,7 @@ use super::*;
use crate::core::film::{CreateFilmBase, PixelSensorTrait};
use crate::utils::containers::Array2D;
use crate::{Arena, FileLoc, ParameterDictionary};
use anyhow::Result;
use shared::Float;
use shared::core::camera::CameraTransform;
use shared::core::film::{
@ -24,7 +25,7 @@ struct SpectralFilmStorage {
pub struct SpectralFilmHost {
pub device: SpectralFilm,
storage: Box<SpectralFilmStorage>,
storage: Arc<SpectralFilmStorage>,
}
impl SpectralFilmHost {
@ -56,20 +57,11 @@ impl SpectralFilmHost {
for i in 0..n_pixels {
let pixel = pixels.get_linear_mut(i);
pixel.bucket_offset = i * n_buckets;
unsafe {
let offset = i * n_buckets;
pixel.bucket_sums = p_sums_base.add(offset);
pixel.weight_sums = p_weights_base.add(offset);
pixel.bucket_splats = p_splats_base.add(offset);
}
}
let storage = Box::new(SpectralFilmStorage {
pixels,
let storage = Arc::new(SpectralFilmStorage {
pixels: pixels.device,
bucket_sums,
weight_sums,
bucket_splats,
@ -87,7 +79,7 @@ impl SpectralFilmHost {
output_rgbf_from_sensor_rgb: SquareMatrix::identity(),
pixels: DeviceArray2D {
values: Ptr::from(&storage.pixels),
values: storage.pixels.as_ptr(),
extent: base.pixel_bounds,
stride: base.pixel_bounds.p_max.x() - base.pixel_bounds.p_min.x(),
},
@ -97,7 +89,7 @@ impl SpectralFilmHost {
bucket_splats: storage.bucket_splats.as_ptr() as *mut AtomicFloat,
};
Self { device, storage };
Self { device, storage }
}
}

View file

@ -0,0 +1 @@

View file

@ -4,7 +4,7 @@ use shared::core::interaction::{Interaction, InteractionTrait};
use shared::core::light::{Light, LightTrait};
use shared::core::primitive::{Primitive, PrimitiveTrait};
use shared::core::shape::ShapeIntersection;
use shared::lights::sampler::LightSampler;
use shared::lights::sampler::{LightSampler, LightSamplerTrait};
use shared::spectra::SampledWavelengths;
use shared::utils::sampling::power_heuristic;
use shared::{Float, SHADOW_EPSILON};

View file

@ -11,6 +11,7 @@ use shared::core::film::VisibleSurface;
use shared::core::geometry::{Point2i, Ray};
use shared::core::sampler::Sampler;
use shared::spectra::{SampledSpectrum, SampledWavelengths};
use std::sync::Arc;
pub trait IntegratorTrait {
fn render(&self);
@ -22,7 +23,7 @@ pub trait RayIntegratorTrait {
p_pixel: Point2i,
sample_ind: usize,
sampler: &mut Sampler,
arena: &mut Arena,
arena: &Arena,
);
fn li(
@ -31,6 +32,6 @@ pub trait RayIntegratorTrait {
lambda: &SampledWavelengths,
sampler: &mut Sampler,
visible_surface: bool,
arena: &mut Arena,
arena: &Arena,
) -> (SampledSpectrum, Option<VisibleSurface>);
}

View file

@ -223,6 +223,7 @@ impl RayIntegratorTrait for PathIntegrator {
break;
};
let t_hit = si.t_hit();
let isect = &mut si.intr;
// Emission from hit surface
@ -244,7 +245,7 @@ impl RayIntegratorTrait for PathIntegrator {
// Get BSDF
let Some(mut bsdf) = isect.get_bsdf(&ray, lambda, &self.camera, sampler) else {
state.specular_bounce = true;
isect.skip_intersection(&mut ray, si.t_hit());
isect.skip_intersection(&mut ray, t_hit);
continue;
};

View file

@ -17,6 +17,7 @@ use shared::core::sampler::{Sampler, SamplerTrait};
use shared::spectra::SampledSpectrum;
use std::io::Write;
use std::path::Path;
use std::sync::Arc;
struct PbrtProgress {
bar: ProgressBar,
@ -78,9 +79,9 @@ pub fn render<T>(
_base: &IntegratorBase,
camera: &Camera,
sampler_prototype: &Sampler,
arena: &mut Arena,
arena: Arc<Arena>,
) where
T: RayIntegratorTrait,
T: RayIntegratorTrait + Sync,
{
let options = get_options();
if let Some((p_pixel, sample_index)) = options.debug_start {
@ -95,7 +96,7 @@ pub fn render<T>(
&mut tile_sampler,
p_pixel,
s_index,
arena,
&arena,
);
return;
}
@ -168,7 +169,7 @@ pub fn render<T>(
&mut sampler,
*p_pixel,
sample_index.try_into().unwrap(),
arena,
&arena,
);
}
}
@ -206,7 +207,7 @@ pub fn render<T>(
let splat_scale = 1.0 / (wave_start as Float);
let film_metadata = ImageMetadata::default();
let film = *camera.get_film();
let film = camera.get_film();
let film_image = film.get_image(&film_metadata, splat_scale);
let (mse_values, _mse_debug_img) =
@ -230,7 +231,7 @@ pub fn evaluate_pixel_sample<T: RayIntegratorTrait>(
sampler: &mut Sampler,
pixel: Point2i,
_sample_index: usize,
arena: &mut Arena,
arena: &Arena,
) {
let mut lu = sampler.get1d();
if get_options().disable_wavelength_jitter {
@ -238,7 +239,7 @@ pub fn evaluate_pixel_sample<T: RayIntegratorTrait>(
}
let lambda = camera.get_film().sample_wavelengths(lu);
let mut film: &mut Film = camera.get_film();
let film = camera.get_film();
let filter = film.get_filter();
let camera_sample = get_camera_sample(sampler, pixel, filter);
if let Some(mut camera_ray) = camera.generate_ray_differential(camera_sample, &lambda) {

View file

@ -30,7 +30,6 @@ pub trait CreateDiffuseLight {
image: Ptr<DeviceImage>,
colorspace: Ptr<RGBColorSpace>,
two_sided: bool,
fov: Float,
) -> Self;
}
@ -42,10 +41,9 @@ impl CreateDiffuseLight for DiffuseAreaLight {
scale: Float,
shape: Ptr<Shape>,
alpha: Ptr<GPUFloatTexture>,
image: Ptr<DeviceImage>,
image: Ptr<Image>,
colorspace: Ptr<RGBColorSpace>,
two_sided: bool,
fov: Float,
) -> Self {
let is_constant_zero = match &*alpha {
GPUFloatTexture::Constant(tex) => tex.evaluate(&TextureEvalContext::default()) == 0.0,
@ -93,7 +91,7 @@ impl CreateDiffuseLight for DiffuseAreaLight {
Self {
base,
area: shape.area(),
image,
image: Ptr::from(image.device()),
colorspace,
shape,
alpha: stored_alpha.expect("Could not retrieve texture"),
@ -203,7 +201,6 @@ impl CreateLight for DiffuseAreaLight {
image.upload(arena),
image_color_space.upload(arena),
true,
shape.area(),
);
Ok(Light::DiffuseArea(specific))

View file

@ -48,7 +48,7 @@ impl CreateGoniometricLight for GoniometricLight {
base,
iemit: Ptr::from(&iemit.device()),
scale,
image: Ptr::from(image.device_image()),
image: Ptr::from(image.device()),
distrib: Ptr::from(&distrib.device),
}
}

View file

@ -81,7 +81,7 @@ impl CreateProjectionLight for ProjectionLight {
Self {
base,
image: Ptr::from(image.device_image()),
image: Ptr::from(image.device()),
image_color_space,
distrib: Ptr::from(&distrib.device),
screen_bounds,

View file

@ -114,7 +114,7 @@ impl CreateShape for BilinearPatchShape {
let host_arc = Arc::new(host);
let mut global_store = ALL_BILINEAR_MESHES.lock();
let mesh_index = global_store.len() as u32;
// let mesh_index = global_store.len() as u32;
global_store.push(host_arc.clone());
drop(global_store);
let n_patches = host_arc.device.n_patches;

View file

@ -16,7 +16,7 @@ impl CreateShape for SphereShape {
parameters: ParameterDictionary,
_float_textures: &HashMap<String, Arc<FloatTexture>>,
_loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
) -> Result<Vec<Shape>> {
let radius = parameters.get_one_float("radius", 1.);
let zmin = parameters.get_one_float("zmin", -radius);

View file

@ -17,17 +17,17 @@ pub mod piecewise;
pub use dense::DenselySampledSpectrumBuffer;
static CIE_X_DATA: LazyLock<DenselySampledSpectrumBuffer> =
pub static CIE_X_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_X));
static CIE_Y_DATA: LazyLock<DenselySampledSpectrumBuffer> =
pub static CIE_Y_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_Y));
static CIE_Z_DATA: LazyLock<DenselySampledSpectrumBuffer> =
pub static CIE_Z_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_Z));
static CIE_D65_DATA: LazyLock<DenselySampledSpectrumBuffer> =
pub static CIE_D65_DATA: LazyLock<DenselySampledSpectrumBuffer> =
LazyLock::new(|| data::create_cie_buffer(&CIE_D65));
fn get_d65_illuminant_buffer() -> Arc<DenselySampledSpectrumBuffer> {
Arc::from(*&CIE_D65_DATA)
Arc::new(CIE_D65_DATA.clone())
}
pub fn cie_x() -> Spectrum {

View file

@ -1,5 +1,5 @@
use anyhow::Result;
use crate::Arena;
use anyhow::Result;
use shared::{
core::texture::SpectrumType,
textures::{FloatCheckerboardTexture, SpectrumCheckerboardTexture},
@ -19,7 +19,7 @@ impl CreateFloatTexture for FloatCheckerboardTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}

View file

@ -19,7 +19,7 @@ impl CreateFloatTexture for FloatConstantTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}

View file

@ -25,7 +25,7 @@ impl CreateFloatTexture for FloatDotsTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}

View file

@ -1,5 +1,5 @@
use anyhow::Result;
use crate::Arena;
use anyhow::Result;
use shared::core::texture::TextureEvalContext;
use shared::{textures::FBmTexture, utils::Transform};
@ -13,7 +13,7 @@ impl CreateFloatTexture for FBmTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}

View file

@ -205,7 +205,7 @@ impl CreateFloatTexture for FloatImageTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}

View file

@ -31,7 +31,7 @@ impl FloatMixTexture {
_render_from_texture: &Transform,
params: &TextureParameterDictionary,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
let tex1 = params.get_float_texture("tex1", 0.);
let tex2 = params.get_float_texture("tex2", 1.);
@ -72,7 +72,7 @@ impl FloatDirectionMixTexture {
render_from_texture: &Transform,
params: &TextureParameterDictionary,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
let dir_raw = params.get_one_vector3f("dir", Vector3f::new(0., 1., 0.));
let dir = render_from_texture.apply_to_vector(dir_raw).normalize();

View file

@ -24,7 +24,7 @@ impl FloatScaledTexture {
_render_from_texture: &Transform,
params: &TextureParameterDictionary,
_loc: &FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
let mut tex = params.get_float_texture("tex", 1.);
let mut scale = params.get_float_texture("scale", 1.);

View file

@ -1,5 +1,5 @@
use anyhow::Result;
use crate::Arena;
use anyhow::Result;
use shared::{textures::WindyTexture, utils::Transform};
use crate::{
@ -12,7 +12,7 @@ impl CreateFloatTexture for WindyTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}

View file

@ -12,7 +12,7 @@ impl CreateFloatTexture for WrinkledTexture {
_render_from_texture: Transform,
_parameters: TextureParameterDictionary,
_loc: FileLoc,
_arena: &mut Arena,
_arena: &Arena,
) -> Result<FloatTexture> {
todo!()
}

View file

@ -33,11 +33,6 @@ struct ArenaInner {
texture_cache: HashMap<usize, u64>,
}
pub struct Arena {
buffer: Vec<(*mut u8, Layout)>,
texture_cache: HashMap<usize, u64>,
}
impl Arena {
pub fn new() -> Self {
Self {
@ -101,7 +96,8 @@ impl Arena {
#[cfg(not(feature = "cuda"))]
unsafe fn alloc_unified(&self, layout: Layout) -> *mut u8 {
let ptr = unsafe { std::alloc::alloc(layout) };
self.buffer.push((ptr, layout));
let mut inner = self.inner.lock();
inner.buffer.push((ptr, layout));
ptr
}
@ -119,7 +115,7 @@ impl Arena {
#[cfg(not(feature = "cuda"))]
let tex_obj = 0u64;
self.texture_cache.insert(key, tex_obj);
inner.texture_cache.insert(key, tex_obj);
tex_obj
}
@ -141,8 +137,8 @@ impl Arena {
impl Drop for Arena {
fn drop(&mut self) {
let inner = self.inner.get_mut().unwrap();
for (ptr, layout) in self.buffer.drain(..) {
let inner = self.inner.get_mut();
for (ptr, layout) in inner.buffer.drain(..) {
unsafe {
#[cfg(feature = "cuda")]
{
@ -183,7 +179,7 @@ impl Upload for Light {
impl Upload for Image {
type Target = DeviceImage;
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(*self.device_image())
arena.alloc(*self.device())
}
}
@ -204,7 +200,7 @@ impl Upload for Material {
impl Upload for DenselySampledSpectrumBuffer {
type Target = DenselySampledSpectrum;
fn upload(&self, arena: &Arena) -> Ptr<Self::Target> {
arena.alloc(self.device())
arena.alloc(*&self.device())
}
}

View file

@ -383,7 +383,7 @@ fn create_cuda_texture(pyramid: &[Image], wrap_mode: WrapMode) -> u64 {
let mut array: cudaArray_t = std::ptr::null_mut();
cudaMallocArray(&mut array, &channel_desc, width, height, 0);
let pixels = base.as_slice(); // Assuming you have this method
let pixels = base.as_slice();
cudaMemcpy2DToArray(
array,
0,

View file

@ -50,7 +50,7 @@ pub trait ParserTarget {
fn medium_interface(&mut self, inside_name: &str, outside_name: &str, loc: FileLoc);
fn sampler(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn world_begin(&mut self, loc: FileLoc, arena: &mut Arena);
fn world_begin(&mut self, loc: FileLoc, arena: &Arena);
fn attribute_begin(&mut self, loc: FileLoc);
fn attribute_end(&mut self, loc: FileLoc);
fn attribute(&mut self, target: &str, params: ParsedParameterVector, loc: FileLoc);
@ -62,7 +62,7 @@ pub trait ParserTarget {
tex_name: &str,
params: &ParsedParameterVector,
loc: FileLoc,
arena: &mut Arena,
arena: &Arena,
);
fn material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn make_named_material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
@ -459,7 +459,7 @@ impl ParserTarget for FormattingParserTarget {
println!("{}CoordSysTransform \"{}\"", self.indent(0), name);
}
fn world_begin(&mut self, _loc: FileLoc, _arena: &mut Arena) {
fn world_begin(&mut self, _loc: FileLoc, _arena: &Arena) {
println!("{}WorldBegin", self.indent(0));
self.cat_indent_count += 4;
}
@ -744,7 +744,7 @@ impl<'a> SceneParser<'a> {
}
pub fn run(&mut self) -> Result<(), ParserError> {
let mut arena = Arena::new();
let arena = Arc::new(Arena::new());
loop {
let token = match self.next_token()? {
Some(t) => t,
@ -1013,7 +1013,7 @@ impl<'a> SceneParser<'a> {
let tex_name = self.expect_quoted_string()?;
let params = self.parse_parameters()?;
self.target
.texture(&name, &type_name, &tex_name, &params, token.loc, arena);
.texture(&name, &type_name, &tex_name, &params, token.loc, &arena);
}
_ => {
return Err(ParserError::Generic(
@ -1024,7 +1024,7 @@ impl<'a> SceneParser<'a> {
},
'W' => match token.text.as_str() {
"WorldBegin" => self.target.world_begin(token.loc, &mut arena),
"WorldBegin" => self.target.world_begin(token.loc, &arena),
"WorldEnd" => {}
_ => {
return Err(ParserError::Generic(

View file

@ -23,7 +23,7 @@ impl PiecewiseConstant1D {
Self::new_with_bounds(f.to_vec(), 0.0, 1.0)
}
pub fn to_shared(&self, arena: &mut Arena) -> DevicePiecewiseConstant1D {
pub fn to_shared(&self, arena: &Arena) -> DevicePiecewiseConstant1D {
let (func_ptr, _) = arena.alloc_slice(&self.func);
let (cdf_ptr, _) = arena.alloc_slice(&self.cdf);