597 lines
21 KiB
Rust
597 lines
21 KiB
Rust
use crate::core::image::{Image, ImageChannelDesc, ImageChannelValues, ImageMetadata};
|
|
use crate::spectra::{SRGB, data::get_named_spectrum};
|
|
use shared::Float;
|
|
use shared::core::camera::CameraTransform;
|
|
use shared::core::color::{RGB, XYZ, white_balance};
|
|
use shared::core::film::SpectralPixel;
|
|
use shared::core::filter::Filter;
|
|
use shared::core::geometry::{Bounds2f, Bounds2i, Point2f, Point2i};
|
|
use shared::core::image::PixelFormat;
|
|
use shared::core::spectrum::{Spectrum, StandardSpectra};
|
|
use shared::film::{Film, FilmBase, GBufferFilm, PixelSensor, PixelSensor, RGBFilm, SpectralFilm};
|
|
use shared::spectra::cie::SWATCHES_RAW;
|
|
use shared::spectra::{
|
|
DenselySampledSpectrum, LAMBDA_MAX, LAMBDA_MIN, PiecewiseLinearSpectrum, RGBColorSpace,
|
|
};
|
|
use shared::utils::containers::Array2D;
|
|
use shared::utils::math::{SquareMatrix, linear_least_squares};
|
|
use shared::utils::{AnimatedTransform, AtomicFloat};
|
|
use std::cmp::Ordering;
|
|
use std::path::Path;
|
|
use std::sync::atomic::AtomicUsize;
|
|
use std::sync::{Arc, LazyLock};
|
|
|
|
use crate::spectra::DenselySampledSpectrumBuffer;
|
|
use crate::utils::{FileLoc, ParameterDictionary};
|
|
|
|
const N_SWATCH_REFLECTANCES: usize = 24;
|
|
const SWATCH_REFLECTANCES: LazyLock<[Spectrum; N_SWATCH_REFLECTANCES]> = LazyLock::new(|| {
|
|
std::array::from_fn(|i| {
|
|
let raw_data = SWATCHES_RAW[i];
|
|
let pls = PiecewiseLinearSpectrum::from_interleaved(raw_data, false);
|
|
Spectrum::PiecewiseLinear(pls)
|
|
})
|
|
});
|
|
|
|
pub trait PixelSensorHost {
|
|
pub fn get_swatches() -> &'static [Spectrum; N_SWATCH_REFLECTANCES] {
|
|
&*SWATCH_REFLECTANCES
|
|
}
|
|
|
|
pub fn create(
|
|
params: &ParameterDictionary,
|
|
output_colorspace: Arc<RGBColorSpace>,
|
|
exposure_time: Float,
|
|
loc: &FileLoc,
|
|
) -> Result<Self, String> {
|
|
let iso = params.get_one_float("iso", 100.);
|
|
let mut white_balance_temp = params.get_one_float("whitebalance", 0.);
|
|
let sensor_name = params.get_one_string("sensor", "cie1931");
|
|
if sensor_name != "cie1931" && white_balance_temp == 0. {
|
|
white_balance_temp = 6500.;
|
|
}
|
|
let imaging_ratio = exposure_time * iso / 100.;
|
|
|
|
let d_illum = if white_balance_temp == 0. {
|
|
DenselySampledSpectrumBuffer::generate_cie_d(6500.)
|
|
} else {
|
|
DenselySampledSpectrumBuffer::generate_cie_d(white_balance_temp)
|
|
};
|
|
|
|
let sensor_illum: Option<Arc<Spectrum>> = if white_balance_temp != 0. {
|
|
Some(Arc::new(Spectrum::DenselySampled(d_illum)))
|
|
} else {
|
|
None
|
|
};
|
|
|
|
if sensor_name == "cie1931" {
|
|
return Ok(PixelSensor::new_with_white_balance(
|
|
output_colorspace,
|
|
sensor_illum,
|
|
imaging_ratio,
|
|
));
|
|
} else {
|
|
let r_opt = get_named_spectrum(&format!("{}_r", sensor_name));
|
|
let g_opt = get_named_spectrum(&format!("{}_g", sensor_name));
|
|
let b_opt = get_named_spectrum(&format!("{}_b", sensor_name));
|
|
if r_opt.is_none() || g_opt.is_none() || b_opt.is_none() {
|
|
return Err(format!(
|
|
"{}: unknown sensor type '{}' (missing RGB spectral data)",
|
|
loc, sensor_name
|
|
)
|
|
.into());
|
|
}
|
|
|
|
let r = Arc::new(r_opt.unwrap());
|
|
let g = Arc::new(g_opt.unwrap());
|
|
let b = Arc::new(b_opt.unwrap());
|
|
|
|
return PixelSensor::new(
|
|
r,
|
|
g,
|
|
b,
|
|
output_colorspace.clone(),
|
|
sensor_illum,
|
|
imaging_ratio,
|
|
)
|
|
.map_err(|e| e.to_string());
|
|
}
|
|
}
|
|
|
|
fn new(
|
|
r: Spectrum,
|
|
g: Spectrum,
|
|
b: Spectrum,
|
|
output_colorspace: RGBColorSpace,
|
|
sensor_illum: &Spectrum,
|
|
imaging_ratio: Float,
|
|
spectra: *const StandardSpectra,
|
|
swatches: &[Spectrum; 24],
|
|
) -> Self {
|
|
// As seen in usages of this constructos, sensor_illum can be null
|
|
// Going with the colorspace's own illuminant, but this might not be the right choice
|
|
// TODO: Test this
|
|
let illum: &Spectrum = match sensor_illum {
|
|
Some(arc_illum) => &**arc_illum,
|
|
None => &output_colorspace.illuminant,
|
|
};
|
|
|
|
let r_bar = DenselySampledSpectrum::from_spectrum(&r);
|
|
let g_bar = DenselySampledSpectrum::from_spectrum(&g);
|
|
let b_bar = DenselySampledSpectrum::from_spectrum(&b);
|
|
let mut rgb_camera = [[0.; 3]; Self::N_SWATCH_REFLECTANCES];
|
|
|
|
let swatches = Self::get_swatches();
|
|
|
|
for i in 0..Self::N_SWATCH_REFLECTANCES {
|
|
let rgb = Self::project_reflectance::<RGB>(
|
|
&swatches[i],
|
|
illum,
|
|
&Spectrum::Dense(r_bar.clone()),
|
|
&Spectrum::Dense(g_bar.clone()),
|
|
&Spectrum::Dense(b_bar.clone()),
|
|
);
|
|
for c in 0..3 {
|
|
rgb_camera[i][c] = rgb[c];
|
|
}
|
|
}
|
|
|
|
let mut xyz_output = [[0.; 3]; Self::N_SWATCH_REFLECTANCES];
|
|
let sensor_white_g = illum.inner_product(&Spectrum::Dense(g_bar.clone()));
|
|
let sensor_white_y = illum.inner_product(spectra.y);
|
|
for i in 0..Self::N_SWATCH_REFLECTANCES {
|
|
let s = swatches[i].clone();
|
|
let xyz = Self::project_reflectance::<XYZ>(
|
|
&s,
|
|
&Spectrum::Dense(output_colorspace.illuminant),
|
|
spectra.x,
|
|
spectra.y,
|
|
spectra.z,
|
|
) * (sensor_white_y / sensor_white_g);
|
|
for c in 0..3 {
|
|
xyz_output[i][c] = xyz[c];
|
|
}
|
|
}
|
|
|
|
let xyz_from_sensor_rgb = linear_least_squares(rgb_camera, xyz_output)?;
|
|
|
|
Ok(Self {
|
|
xyz_from_sensor_rgb,
|
|
r_bar,
|
|
g_bar,
|
|
b_bar,
|
|
imaging_ratio,
|
|
})
|
|
}
|
|
|
|
fn new_with_white_balance(
|
|
output_colorspace: &RGBColorSpace,
|
|
sensor_illum: &Spectrum,
|
|
imaging_ratio: Float,
|
|
spectra: *const StandardSpectra,
|
|
) -> Self {
|
|
let r_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.x);
|
|
let g_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.y);
|
|
let b_bar = DenselySampledSpectrumBuffer::from_spectrum(spectra.z);
|
|
let xyz_from_sensor_rgb: SquareMatrix<Float, 3>;
|
|
|
|
if let Some(illum) = sensor_illum {
|
|
let source_white = illum.to_xyz(spectra).xy();
|
|
let target_white = output_colorspace.w;
|
|
xyz_from_sensor_rgb = white_balance(source_white, target_white);
|
|
} else {
|
|
xyz_from_sensor_rgb = SquareMatrix::<Float, 3>::default();
|
|
}
|
|
|
|
Self {
|
|
xyz_from_sensor_rgb,
|
|
r_bar,
|
|
g_bar,
|
|
b_bar,
|
|
imaging_ratio,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl PixelSensorHost for PixelSensor {}
|
|
|
|
struct SpectralFilmStorage {
|
|
pixels: Array2D<SpectralPixel>,
|
|
bucket_sums: Vec<f64>,
|
|
weight_sums: Vec<f64>,
|
|
bucket_splats: Vec<AtomicFloat>,
|
|
}
|
|
|
|
pub struct SpectralFilmHost {
|
|
pub view: SpectralFilm,
|
|
_storage: Box<SpectralFilmStorage>,
|
|
}
|
|
|
|
impl SpectralFilmHost {
|
|
pub fn new(
|
|
base: &FilmBase,
|
|
lambda_min: Float,
|
|
lambda_max: Float,
|
|
n_buckets: usize,
|
|
colorspace: &RGBColorSpace,
|
|
max_component_value: Float,
|
|
write_fp16: bool,
|
|
) -> Self {
|
|
let n_pixels = base.pixel_bounds.area() as usize;
|
|
let total_buckets = n_pixels * n_buckets;
|
|
|
|
let bucket_sums = vec![0.0; total_buckets];
|
|
let weight_sums = vec![0.0; total_buckets];
|
|
|
|
let mut bucket_splats = Vec::with_capacity(total_buckets);
|
|
for _ in 0..total_buckets {
|
|
bucket_splats.push(AtomicFloat::new(0.0));
|
|
}
|
|
|
|
let mut pixels = Array2D::<SpectralPixel>::new(base.pixel_bounds);
|
|
|
|
let p_sums_base = bucket_sums.as_ptr() as *mut f64;
|
|
let p_weights_base = weight_sums.as_ptr() as *mut f64;
|
|
let p_splats_base = bucket_splats.as_ptr() as *mut AtomicFloat;
|
|
|
|
for i in 0..n_pixels {
|
|
let pixel = pixels.get_linear_mut(i);
|
|
|
|
pixel.bucket_offset = i * n_buckets;
|
|
|
|
unsafe {
|
|
let offset = i * n_buckets;
|
|
|
|
pixel.bucket_sums = p_sums_base.add(offset);
|
|
pixel.weight_sums = p_weights_base.add(offset);
|
|
pixel.bucket_splats = p_splats_base.add(offset);
|
|
}
|
|
}
|
|
|
|
let storage = Box::new(SpectralFilmStorage {
|
|
pixels,
|
|
bucket_sums,
|
|
weight_sums,
|
|
bucket_splats,
|
|
});
|
|
|
|
let view = SpectralFilm {
|
|
base: base.clone(),
|
|
colorspace: colorspace.clone(),
|
|
lambda_min,
|
|
lambda_max,
|
|
n_buckets: n_buckets as u32,
|
|
max_component_value,
|
|
write_fp16,
|
|
filter_integral: base.filter.integral(),
|
|
output_rgbf_from_sensor_rgb: SquareMatrix::identity(), // Logic omitted
|
|
|
|
pixels: Array2D {
|
|
values: storage.pixels.as_mut_ptr(),
|
|
extent: base.pixel_bounds,
|
|
stride: base.pixel_bounds.max.x - base.pixel_bounds.min.x,
|
|
},
|
|
|
|
bucket_sums: storage.bucket_sums.as_ptr() as *mut f64,
|
|
weight_sums: storage.weight_sums.as_ptr() as *mut f64,
|
|
bucket_splats: storage.bucket_splats.as_ptr() as *mut AtomicFloat,
|
|
};
|
|
|
|
Self {
|
|
view,
|
|
_storage: storage,
|
|
}
|
|
}
|
|
}
|
|
|
|
pub struct GBufferFilmHost {
|
|
pub device: GBufferFilm,
|
|
}
|
|
|
|
impl GBufferFilmHost {
|
|
pub fn new(
|
|
base: &FilmBase,
|
|
output_from_render: &AnimatedTransform,
|
|
apply_inverse: bool,
|
|
colorspace: &RGBColorSpace,
|
|
max_component_value: Float,
|
|
write_fp16: bool,
|
|
) -> Self {
|
|
assert!(!base.pixel_bounds.is_empty());
|
|
let sensor_ptr = base.sensor;
|
|
if sensor_ptr.is_null() {
|
|
panic!("Film must have a sensor");
|
|
}
|
|
let sensor = unsafe { &*sensor_ptr };
|
|
let output_rgbf_from_sensor_rgb = colorspace.rgb_from_xyz * sensor.xyz_from_sensor_rgb;
|
|
let filter_integral = base.filter.integral();
|
|
let pixels = Array2D::new(base.pixel_bounds);
|
|
|
|
let device = GBufferFilm {
|
|
base: base.clone(),
|
|
output_from_render: output_from_render.clone(),
|
|
apply_inverse,
|
|
pixels,
|
|
colorspace: colorspace.clone(),
|
|
max_component_value,
|
|
write_fp16,
|
|
filter_integral,
|
|
output_rgbf_from_sensor_rgb,
|
|
};
|
|
|
|
Self { device }
|
|
}
|
|
}
|
|
|
|
pub trait FilmBaseHost {
|
|
fn create(
|
|
params: &ParameterDictionary,
|
|
filter: Filter,
|
|
sensor: Option<&PixelSensor>,
|
|
loc: &FileLoc,
|
|
) -> Self;
|
|
}
|
|
|
|
impl FilmBaseHost for FilmBase {
|
|
fn create(
|
|
params: &ParameterDictionary,
|
|
filter: Filter,
|
|
sensor: Option<&PixelSensor>,
|
|
loc: &FileLoc,
|
|
) -> Self {
|
|
let x_res = params.get_one_int("xresolution", 1280);
|
|
let y_res = params.get_one_int("yresolution", 720);
|
|
|
|
if x_res <= 0 || y_res <= 0 {
|
|
eprintln!(
|
|
"{}: Film resolution must be > 0. Defaulting to 1280x720.",
|
|
loc
|
|
);
|
|
}
|
|
let full_resolution = Point2i::new(x_res.max(1), y_res.max(1));
|
|
|
|
let crop_data = params.get_float_array("cropwindow");
|
|
let crop = if crop_data.len() == 4 {
|
|
Bounds2f::from_points(
|
|
Point2f::new(crop_data[0], crop_data[2]),
|
|
Point2f::new(crop_data[1], crop_data[3]),
|
|
)
|
|
} else {
|
|
Bounds2f::from_points(Point2f::zero(), Point2f::new(1.0, 1.0))
|
|
};
|
|
|
|
let p_min = Point2i::new(
|
|
(full_resolution.x() as Float * crop.p_min.x()).ceil() as i32,
|
|
(full_resolution.y() as Float * crop.p_min.y()).ceil() as i32,
|
|
);
|
|
let p_max = Point2i::new(
|
|
(full_resolution.x() as Float * crop.p_max.x()).ceil() as i32,
|
|
(full_resolution.y() as Float * crop.p_max.y()).ceil() as i32,
|
|
);
|
|
|
|
let mut pixel_bounds = Bounds2i::from_points(p_min, p_max);
|
|
|
|
if pixel_bounds.is_empty() {
|
|
eprintln!("{}: Film crop window results in empty pixel bounds.", loc);
|
|
}
|
|
|
|
let rad = filter.radius();
|
|
let expansion = Point2i::new(rad.x().ceil() as i32, rad.y().ceil() as i32);
|
|
pixel_bounds = pixel_bounds.expand(expansion);
|
|
|
|
let diagonal_mm = params.get_one_float("diagonal", 35.0);
|
|
let filename = params.get_one_string("filename", "pbrt.exr");
|
|
|
|
Self {
|
|
full_resolution,
|
|
pixel_bounds,
|
|
filter,
|
|
diagonal: diagonal_mm * 0.001,
|
|
sensor,
|
|
}
|
|
}
|
|
}
|
|
|
|
pub trait FilmHost {
|
|
fn write_image(&self, metadata: &ImageMetadata, splat_scale: Float) {
|
|
let image = self.get_image(metadata, splat_scale);
|
|
image
|
|
.write(self.get_filename(), metadata)
|
|
.expect("Something")
|
|
}
|
|
fn get_image(&self, _metadata: &ImageMetadata, splat_scale: Float) -> Image {
|
|
let write_fp16 = true;
|
|
let format = if write_fp16 {
|
|
PixelFormat::F16
|
|
} else {
|
|
PixelFormat::F32
|
|
};
|
|
|
|
let channel_names = &["R", "G", "B"];
|
|
|
|
let pixel_bounds = self.base().pixel_bounds;
|
|
let resolution = Point2i::from(pixel_bounds.diagonal());
|
|
|
|
let n_clamped = Arc::new(AtomicUsize::new(0));
|
|
let processed_rows: Vec<Vec<Float>> = (pixel_bounds.p_min.y()..pixel_bounds.p_max.y())
|
|
.into_par_iter()
|
|
.map(|y| {
|
|
let n_clamped = Arc::clone(&n_clamped);
|
|
let mut row_data = Vec::with_capacity(resolution.x() as usize * 3);
|
|
for x in pixel_bounds.p_min.x()..pixel_bounds.p_max.x() {
|
|
let p = Point2i::new(x, y);
|
|
let mut rgb = self.get_pixel_rgb(p, Some(splat_scale));
|
|
let mut was_clamped = false;
|
|
if write_fp16 {
|
|
if rgb.r > 65504.0 {
|
|
rgb.r = 65504.0;
|
|
was_clamped = true;
|
|
}
|
|
if rgb.g > 65504.0 {
|
|
rgb.g = 65504.0;
|
|
was_clamped = true;
|
|
}
|
|
if rgb.b > 65504.0 {
|
|
rgb.b = 65504.0;
|
|
was_clamped = true;
|
|
}
|
|
}
|
|
if was_clamped {
|
|
n_clamped.fetch_add(1, Ordering::SeqCst);
|
|
}
|
|
row_data.push(rgb.r);
|
|
row_data.push(rgb.g);
|
|
row_data.push(rgb.b);
|
|
}
|
|
row_data
|
|
})
|
|
.collect();
|
|
|
|
let mut image = Image::new(format, resolution, channel_names, SRGB);
|
|
let rgb_desc = ImageChannelDesc::new(&[0, 1, 2]);
|
|
|
|
for (iy, row_data) in processed_rows.into_iter().enumerate() {
|
|
for (ix, rgb_chunk) in row_data.chunks_exact(3).enumerate() {
|
|
let p_offset = Point2i::new(ix as i32, iy as i32);
|
|
let values = ImageChannelValues::from(rgb_chunk);
|
|
image.set_channels(p_offset, &rgb_desc, &values);
|
|
}
|
|
}
|
|
|
|
let clamped_count = n_clamped.load(Ordering::SeqCst);
|
|
if clamped_count > 0 {
|
|
println!(
|
|
"{} pixel values clamped to maximum fp16 value.",
|
|
clamped_count
|
|
);
|
|
}
|
|
|
|
// self.base().pixel_bounds = pixel_bounds;
|
|
// self.base().full_resolution = resolution;
|
|
// self.colorspace = colorspace;
|
|
|
|
image
|
|
}
|
|
fn get_filename(&self) -> &str;
|
|
}
|
|
|
|
pub trait FilmFactory {
|
|
fn create(
|
|
name: &str,
|
|
params: &ParameterDictionary,
|
|
exposure_time: Float,
|
|
filter: Filter,
|
|
_camera_transform: Option<CameraTransform>,
|
|
loc: &FileLoc,
|
|
) -> Result<Self, String>;
|
|
}
|
|
|
|
impl FilmFactory for Film {
|
|
fn create(
|
|
name: &str,
|
|
params: &ParameterDictionary,
|
|
exposure_time: Float,
|
|
filter: Filter,
|
|
camera_transform: Option<CameraTransform>,
|
|
loc: &FileLoc,
|
|
) -> Result<Self, String> {
|
|
match name {
|
|
"rgb" => {
|
|
let colorspace = params.color_space.as_ref().unwrap();
|
|
let max_component_value =
|
|
params.get_one_float("maxcomponentvalue", Float::INFINITY);
|
|
let write_fp16 = params.get_one_bool("savefp16", true);
|
|
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
|
|
let film_base = FilmBase::create(params, filter, Some(sensor), loc);
|
|
Ok(RGBFilm::new(
|
|
film_base,
|
|
&colorspace,
|
|
max_component_value,
|
|
write_fp16,
|
|
))
|
|
}
|
|
"gbuffer" => {
|
|
let colorspace = params.color_space.as_ref().unwrap();
|
|
let max_component_value =
|
|
params.get_one_float("maxcomponentvalue", Float::INFINITY);
|
|
let write_fp16 = params.get_one_bool("savefp16", true);
|
|
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
|
|
let film_base = FilmBase::create(params, filter, Some(sensor), loc);
|
|
|
|
let filename = params.get_one_string("filename", "pbrt.exr");
|
|
if Path::new(&filename).extension() != Some("exr".as_ref()) {
|
|
return Err(format!(
|
|
"{}: EXR is the only format supported by GBufferFilm",
|
|
loc
|
|
)
|
|
.into());
|
|
}
|
|
|
|
let coords_system = params.get_one_string("coordinatesystem", "camera");
|
|
let mut apply_inverse = false;
|
|
let camera_transform = camera_transform
|
|
.ok_or_else(|| "GBufferFilm requires a camera_transform".to_string())?;
|
|
let output_from_render = if coords_system == "camera" {
|
|
apply_inverse = true;
|
|
camera_transform.render_from_camera
|
|
} else if coords_system == "world" {
|
|
AnimatedTransform::from_transform(&camera_transform.world_from_render)
|
|
} else {
|
|
return Err(format!(
|
|
"{}: unknown coordinate system for GBufferFilm. (Expecting camera
|
|
or world",
|
|
loc
|
|
)
|
|
.into());
|
|
};
|
|
|
|
Ok(GBufferFilm::new(
|
|
&film_base,
|
|
&output_from_render,
|
|
apply_inverse,
|
|
colorspace,
|
|
max_component_value,
|
|
write_fp16,
|
|
))
|
|
}
|
|
"spectral" => {
|
|
let colorspace = params.color_space.as_ref().unwrap();
|
|
let max_component_value =
|
|
params.get_one_float("maxcomponentvalue", Float::INFINITY);
|
|
let write_fp16 = params.get_one_bool("savefp16", true);
|
|
let sensor = PixelSensor::create(params, colorspace.clone(), exposure_time, loc)?;
|
|
let film_base = FilmBase::create(params, filter, Some(sensor), loc);
|
|
|
|
let filename = params.get_one_string("filename", "pbrt.exr");
|
|
if Path::new(&filename).extension() != Some("exr".as_ref()) {
|
|
return Err(format!(
|
|
"{}: EXR is the only format supported by GBufferFilm",
|
|
loc
|
|
)
|
|
.into());
|
|
}
|
|
|
|
let n_buckets = params.get_one_int("nbuckets", 16) as usize;
|
|
let lambda_min = params.get_one_float("lambdamin", LAMBDA_MIN as Float);
|
|
let lambda_max = params.get_one_float("lambdamin", LAMBDA_MAX as Float);
|
|
if lambda_min < LAMBDA_MIN as Float && lambda_max > LAMBDA_MAX as Float {
|
|
return Err(format!(
|
|
"{}: PBRT must be recompiled with different values of LAMBDA_MIN and LAMBDA_MAX",
|
|
loc
|
|
));
|
|
}
|
|
|
|
Ok(SpectralFilm::new(
|
|
&film_base,
|
|
lambda_min,
|
|
lambda_max,
|
|
n_buckets,
|
|
colorspace,
|
|
max_component_value,
|
|
write_fp16,
|
|
))
|
|
}
|
|
_ => Err(format!("Film type '{}' unknown at {}", name, loc)),
|
|
}
|
|
}
|
|
}
|