pbrt/src/utils/parser.rs

1095 lines
38 KiB
Rust

use flate2::read::GzDecoder;
use memmap2::Mmap;
use std::collections::HashMap;
use std::fs::File;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use crate::Arena;
use crate::utils::error::FileLoc;
use crate::utils::parameters::{ParameterDictionary, ParsedParameter, ParsedParameterVector};
use shared::Float;
pub trait ParserTarget {
fn identity(&mut self, loc: FileLoc);
fn translate(&mut self, dx: Float, dy: Float, dz: Float, loc: FileLoc);
fn rotate(&mut self, angle: Float, ax: Float, ay: Float, az: Float, loc: FileLoc);
fn scale(&mut self, sx: Float, sy: Float, sz: Float, loc: FileLoc);
fn look_at(
&mut self,
ex: Float,
ey: Float,
ez: Float,
lx: Float,
ly: Float,
lz: Float,
ux: Float,
uy: Float,
uz: Float,
loc: FileLoc,
);
fn transform(&mut self, transform: &[Float; 16], loc: FileLoc);
fn concat_transform(&mut self, transform: &[Float; 16], loc: FileLoc);
fn coordinate_system(&mut self, name: &str, loc: FileLoc);
fn coord_sys_transform(&mut self, name: &str, loc: FileLoc);
fn active_transform_all(&mut self, loc: FileLoc);
fn active_transform_end_time(&mut self, loc: FileLoc);
fn active_transform_start_time(&mut self, loc: FileLoc);
fn transform_times(&mut self, start: Float, end: Float, loc: FileLoc);
fn option(&mut self, name: &str, value: &str, loc: FileLoc);
fn color_space(&mut self, n: &str, loc: FileLoc);
fn pixel_filter(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn film(&mut self, type_name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn accelerator(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn integrator(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn camera(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn make_named_medium(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn medium_interface(&mut self, inside_name: &str, outside_name: &str, loc: FileLoc);
fn sampler(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn world_begin(&mut self, loc: FileLoc, arena: Arc<Arena>);
fn attribute_begin(&mut self, loc: FileLoc);
fn attribute_end(&mut self, loc: FileLoc);
fn attribute(&mut self, target: &str, params: ParsedParameterVector, loc: FileLoc);
fn texture(
&mut self,
name: &str,
type_name: &str,
tex_name: &str,
params: &ParsedParameterVector,
loc: FileLoc,
arena: Arc<Arena>,
);
fn material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn make_named_material(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn named_material(&mut self, name: &str, loc: FileLoc);
fn light_source(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn area_light_source(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn shape(&mut self, name: &str, params: &ParsedParameterVector, loc: FileLoc);
fn reverse_orientation(&mut self, loc: FileLoc);
fn object_begin(&mut self, name: &str, loc: FileLoc);
fn object_end(&mut self, loc: FileLoc);
fn object_instance(&mut self, name: &str, loc: FileLoc);
fn end_of_files(&mut self);
}
#[derive(Debug, Clone)]
pub struct Token {
pub text: String,
pub loc: FileLoc,
}
impl Token {
pub fn parse_int(&self) -> Result<i32, ParserError> {
match self.text.parse::<i32>() {
Ok(val) => Ok(val),
Err(_) => {
if let Ok(val_i64) = self.text.parse::<i64>() {
if val_i64 > i32::MAX as i64 {
return Err(ParserError::NumericOverflow(
"Numeric value too large for 32-bit int".into(),
self.loc.clone(),
));
}
if val_i64 < i32::MIN as i64 {
return Err(ParserError::NumericOverflow(
"Numeric value too low for 32-bit int".into(),
self.loc.clone(),
));
}
}
Err(ParserError::ParseIntError(
format!("\"{}\": expected a number", self.text),
self.loc.clone(),
))
}
}
}
pub fn parse_float(&self) -> Result<Float, ParserError> {
self.text.parse::<Float>().map_err(|_| {
ParserError::ParseFloatError(
format!("\"{}\": expected a number", self.text),
self.loc.clone(),
)
})
}
fn is_quoted(&self) -> bool {
self.text.len() >= 2 && self.text.starts_with('"') && self.text.ends_with('"')
}
fn is_quoted_string(s: &str) -> bool {
s.len() >= 2 && s.starts_with('"') && s.ends_with('"')
}
fn dequote_string(s: &str) -> &str {
if Self::is_quoted_string(s) {
&s[1..s.len() - 1]
} else {
s
}
}
fn dequote(&self) -> &str {
if self.is_quoted() {
&self.text[1..self.text.len() - 1]
} else {
&self.text
}
}
pub fn dequote_inplace(&mut self) {
if self.is_quoted() {
self.text = self.text[1..self.text.len() - 1].to_string();
}
}
}
#[derive(Debug)]
pub enum ParserError {
Io(String),
UnexpectedEof,
InvalidUtf8(String),
Generic(String, FileLoc),
ParseIntError(String, FileLoc),
ParseFloatError(String, FileLoc),
NumericOverflow(String, FileLoc),
}
pub enum TokenizerBuffer {
Ram(String),
Mapped(Mmap),
}
impl TokenizerBuffer {
pub fn as_str(&self) -> &str {
match self {
Self::Ram(s) => s,
Self::Mapped(m) => str::from_utf8(m).expect("File is not valid UTF-8"),
}
}
}
pub struct Tokenizer {
buffer: TokenizerBuffer,
cursor: usize,
filename: Arc<str>,
line: i32,
column: i32,
}
impl Tokenizer {
pub fn create_from_file(filename: &str) -> Result<Self, ParserError> {
let buffer = Self::load_buffer(filename)?;
Ok(Self {
buffer,
cursor: 0,
filename: Arc::from(filename),
line: 1,
column: 0,
})
}
fn load_buffer(filename: &str) -> Result<TokenizerBuffer, ParserError> {
if filename == "-" {
let mut s = String::new();
io::stdin()
.read_to_string(&mut s)
.map_err(|e| ParserError::Io(e.to_string()))?;
return Ok(TokenizerBuffer::Ram(s));
}
if filename.ends_with(".gz") {
let f = File::open(filename).map_err(|e| ParserError::Io(e.to_string()))?;
let mut d = GzDecoder::new(f);
let mut s = String::new();
d.read_to_string(&mut s)
.map_err(|e| ParserError::Io(e.to_string()))?;
return Ok(TokenizerBuffer::Ram(s));
}
let file = File::open(filename).map_err(|e| ParserError::Io(e.to_string()))?;
let len = file
.metadata()
.map_err(|e| ParserError::Io(e.to_string()))?
.len();
if len < 16 * 1024 * 1024 {
let mut s = String::new();
let mut reader = &file;
reader
.read_to_string(&mut s)
.map_err(|e| ParserError::Io(e.to_string()))?;
return Ok(TokenizerBuffer::Ram(s));
}
let mmap = unsafe { Mmap::map(&file).map_err(|e| ParserError::Io(e.to_string()))? };
if str::from_utf8(&mmap).is_err() {
return Err(ParserError::InvalidUtf8(filename.to_string()));
}
Ok(TokenizerBuffer::Mapped(mmap))
}
fn peek_char(&self) -> Option<char> {
self.buffer.as_str().get(self.cursor..)?.chars().next()
}
fn advance(&mut self) -> Option<char> {
let ch = self.peek_char()?;
self.cursor += ch.len_utf8();
if ch == '\n' {
self.line += 1;
self.column = 0;
} else {
self.column += 1;
}
Some(ch)
}
pub fn next(&mut self) -> Result<Option<Token>, ParserError> {
loop {
match self.peek_char() {
Some(ch) if ch.is_whitespace() => {
self.advance();
continue;
}
Some('#') => {
while let Some(c) = self.peek_char() {
if c == '\n' {
break;
}
self.advance();
}
continue;
}
None => return Ok(None),
Some(_) => break,
}
}
let start_loc = FileLoc {
filename: self.filename.clone(),
line: self.line,
column: self.column,
};
let first_char = self.peek_char().unwrap();
if first_char == '"' {
self.advance();
let mut val = String::new();
loop {
let ch = self.advance().ok_or(ParserError::UnexpectedEof)?;
if ch == '"' {
break;
}
if ch == '\\' {
let escaped = self.advance().ok_or(ParserError::UnexpectedEof)?;
let res = match escaped {
'n' => '\n',
'r' => '\r',
't' => '\t',
'"' => '"',
'\\' => '\\',
_ => escaped,
};
val.push(res);
} else {
val.push(ch);
}
}
return Ok(Some(Token {
text: val,
loc: start_loc,
}));
}
if first_char == '[' || first_char == ']' {
self.advance();
return Ok(Some(Token {
text: first_char.to_string(),
loc: start_loc,
}));
}
let start_idx = self.cursor;
while let Some(ch) = self.peek_char() {
if ch.is_whitespace() || ch == '"' || ch == '[' || ch == ']' {
break;
}
self.advance();
}
let text_slice = &self.buffer.as_str()[start_idx..self.cursor];
Ok(Some(Token {
text: text_slice.to_string(),
loc: start_loc,
}))
}
}
pub struct FormattingParserTarget {
to_ply: bool,
upgrade: bool,
cat_indent_count: usize,
defined_textures: HashMap<String, String>,
defined_named_materials: HashMap<String, String>,
named_material_dictionaries: HashMap<String, ParameterDictionary>,
defined_object_instances: HashMap<String, String>,
}
impl FormattingParserTarget {
pub fn new(to_ply: bool, upgrade: bool) -> Self {
Self {
to_ply,
upgrade,
cat_indent_count: 0,
defined_textures: HashMap::new(),
defined_named_materials: HashMap::new(),
named_material_dictionaries: HashMap::new(),
defined_object_instances: HashMap::new(),
}
}
pub fn indent(&self, extra: usize) -> String {
" ".repeat(self.cat_indent_count + 4 * extra)
}
fn upgrade_material_index(
&self,
_name: &str,
_dict: &ParameterDictionary,
_loc: FileLoc,
) -> String {
String::new()
}
fn upgrade_material(
&self,
_name: &mut String,
_dict: &ParameterDictionary,
_loc: FileLoc,
) -> String {
String::new()
}
}
impl ParserTarget for FormattingParserTarget {
fn option(&mut self, name: &str, value: &str, _loc: FileLoc) {
println!("{}Option \"{}\" \"{}\"", self.indent(0), name, value);
}
fn identity(&mut self, _loc: FileLoc) {
println!("{}Identity", self.indent(0));
}
fn translate(&mut self, dx: Float, dy: Float, dz: Float, _loc: FileLoc) {
println!("{}Translate {} {} {}", self.indent(0), dx, dy, dz);
}
fn rotate(&mut self, angle: Float, ax: Float, ay: Float, az: Float, _loc: FileLoc) {
println!("{}Rotate {} {} {} {}", self.indent(0), angle, ax, ay, az);
}
fn scale(&mut self, sx: Float, sy: Float, sz: Float, _loc: FileLoc) {
println!("{}Scale {} {} {}", self.indent(0), sx, sy, sz);
}
fn look_at(
&mut self,
ex: Float,
ey: Float,
ez: Float,
lx: Float,
ly: Float,
lz: Float,
ux: Float,
uy: Float,
uz: Float,
_loc: FileLoc,
) {
println!(
"{}LookAt {} {} {} {} {} {} {} {} {}",
self.indent(0),
ex,
ey,
ez,
lx,
ly,
lz,
ux,
uy,
uz
);
}
fn concat_transform(&mut self, t: &[Float; 16], _loc: FileLoc) {
// Rust arrays verify size at compile time, simpler than C++ pointers
println!("{}ConcatTransform [ {:?} ]", self.indent(0), t);
}
fn transform(&mut self, t: &[Float; 16], _loc: FileLoc) {
println!("{}Transform [ {:?} ]", self.indent(0), t);
}
fn coordinate_system(&mut self, name: &str, _loc: FileLoc) {
println!("{}CoordinateSystem \"{}\"", self.indent(0), name);
}
fn coord_sys_transform(&mut self, name: &str, _loc: FileLoc) {
println!("{}CoordSysTransform \"{}\"", self.indent(0), name);
}
fn world_begin(&mut self, _loc: FileLoc, _arena: Arc<Arena>) {
println!("{}WorldBegin", self.indent(0));
self.cat_indent_count += 4;
}
fn attribute_begin(&mut self, _loc: FileLoc) {
println!("{}AttributeBegin", self.indent(0));
self.cat_indent_count += 4;
}
fn attribute_end(&mut self, _loc: FileLoc) {
self.cat_indent_count = self.cat_indent_count.saturating_sub(4);
println!("{}AttributeEnd", self.indent(0));
}
fn shape(&mut self, name: &str, params: &ParsedParameterVector, _loc: FileLoc) {
println!(
"{}Shape \"{}\" ... ({} params)",
self.indent(0),
name,
params.len()
);
}
fn material(&mut self, name: &str, params: &ParsedParameterVector, _loc: FileLoc) {
println!(
"{}Material \"{}\" ... ({} params)",
self.indent(0),
name,
params.len()
);
}
fn texture(
&mut self,
name: &str,
type_name: &str,
tex_name: &str,
_params: &ParsedParameterVector,
_loc: FileLoc,
_arena: Arc<Arena>,
) {
println!(
"{}Texture \"{}\" \"{}\" \"{}\"",
self.indent(0),
name,
type_name,
tex_name
);
}
fn active_transform_all(&mut self, _loc: FileLoc) {}
fn active_transform_end_time(&mut self, _loc: FileLoc) {}
fn active_transform_start_time(&mut self, _loc: FileLoc) {}
fn transform_times(&mut self, _s: Float, _e: Float, _loc: FileLoc) {}
fn color_space(&mut self, _n: &str, _loc: FileLoc) {}
fn pixel_filter(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn film(&mut self, _t: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn accelerator(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn integrator(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn camera(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn make_named_medium(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn medium_interface(&mut self, _i: &str, _o: &str, _loc: FileLoc) {}
fn sampler(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn attribute(&mut self, _t: &str, _p: ParsedParameterVector, _loc: FileLoc) {}
fn make_named_material(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn named_material(&mut self, _n: &str, _loc: FileLoc) {}
fn light_source(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn area_light_source(&mut self, _n: &str, _p: &ParsedParameterVector, _loc: FileLoc) {}
fn reverse_orientation(&mut self, _loc: FileLoc) {}
fn object_begin(&mut self, name: &str, _loc: FileLoc) {
println!("{}ObjectBegin \"{}\"", self.indent(0), name);
self.cat_indent_count += 4;
}
fn object_end(&mut self, _loc: FileLoc) {
self.cat_indent_count = self.cat_indent_count.saturating_sub(4);
println!("{}ObjectEnd", self.indent(0));
}
fn object_instance(&mut self, _n: &str, _loc: FileLoc) {}
fn end_of_files(&mut self) {
self.cat_indent_count = 0;
}
}
pub struct SceneParser<'a> {
target: &'a mut dyn ParserTarget,
file_stack: Vec<Tokenizer>,
unget_token: Option<Token>,
current_dir: PathBuf,
}
impl<'a> SceneParser<'a> {
pub fn new(target: &'a mut dyn ParserTarget, root: Tokenizer) -> Self {
let current_dir = Path::new(&*root.filename)
.parent()
.unwrap_or(Path::new("."))
.to_path_buf();
Self {
target,
file_stack: vec![root],
unget_token: None,
current_dir,
}
}
// Kinda cursed
fn next_token(&mut self) -> Result<Option<Token>, ParserError> {
if let Some(tok) = self.unget_token.take() {
return Ok(Some(tok));
}
loop {
if self.file_stack.is_empty() {
return Ok(None);
}
let tokenizer = self.file_stack.last_mut().unwrap();
match tokenizer.next() {
Ok(Some(tok)) => return Ok(Some(tok)),
Ok(None) => {
self.file_stack.pop();
continue;
}
Err(e) => return Err(e),
}
}
}
fn unget(&mut self, token: Token) {
self.unget_token = Some(token);
}
fn next_token_required(&mut self) -> Result<Token, ParserError> {
match self.next_token()? {
Some(t) => Ok(t),
None => Err(ParserError::UnexpectedEof),
}
}
fn expect_float(&mut self) -> Result<Float, ParserError> {
let t = self.next_token_required()?;
t.parse_float()
}
fn parse_parameters(&mut self) -> Result<ParsedParameterVector, ParserError> {
let mut params = Vec::new();
loop {
let t = match self.next_token()? {
Some(tok) => tok,
None => return Ok(params),
};
if !t.is_quoted() {
self.unget(t);
return Ok(params);
}
// Parse declarations
let decl = Token::dequote_string(&t.text);
let mut parts = decl.split_whitespace();
let type_name = parts.next().ok_or_else(|| {
ParserError::Generic(
format!("Parameter \"{}\" missing type", decl),
t.loc.clone(),
)
})?;
let param_name = parts.next().ok_or_else(|| {
ParserError::Generic(
format!("Parameter \"{}\" missing name", decl),
t.loc.clone(),
)
})?;
let mut param = ParsedParameter {
type_name: type_name.to_string(),
name: param_name.to_string(),
loc: t.loc.clone(),
..Default::default()
};
enum ValType {
Unknown,
String,
Bool,
Float,
Int,
}
let mut val_type = match type_name {
"integer" => ValType::Int,
"bool" => ValType::Bool,
"float" | "point" | "vector" | "normal" | "color" | "spectrum" | "rgb"
| "blackbody" => ValType::Float,
"string" | "texture" => ValType::String,
_ => ValType::Unknown,
};
let mut add_val =
|token: &Token, dest: &mut ParsedParameter| -> Result<(), ParserError> {
let is_quoted = token.is_quoted();
if is_quoted {
if let ValType::Unknown = val_type {
val_type = ValType::String;
}
if let ValType::String = val_type {
dest.strings.push(token.dequote().to_string());
} else {
return Err(ParserError::Generic(
format!("Expected non-string for param {}", param_name),
token.loc.clone(),
));
}
} else if token.text == "true" {
if let ValType::Unknown = val_type {
val_type = ValType::Bool;
}
if let ValType::Bool = val_type {
dest.bools.push(true);
} else {
return Err(ParserError::Generic(
format!("Expected bool for param {}", param_name),
token.loc.clone(),
));
}
} else if token.text == "false" {
if let ValType::Unknown = val_type {
val_type = ValType::Bool;
}
if let ValType::Bool = val_type {
dest.bools.push(false);
} else {
return Err(ParserError::Generic(
format!("Expected bool for param {}", param_name),
token.loc.clone(),
));
}
} else {
// Number
if let ValType::Unknown = val_type {
val_type = ValType::Float;
}
match val_type {
ValType::Int => {
let val = token.parse_int()?;
dest.ints.push(val);
}
ValType::Float => {
let val = token.parse_float()?;
dest.floats.push(val);
}
_ => {
return Err(ParserError::Generic(
format!("Expected number for param {}", param_name),
token.loc.clone(),
));
}
}
}
Ok(())
};
let val_token = self.next_token_required()?;
if val_token.text == "[" {
loop {
let next_val = self.next_token_required()?;
if next_val.text == "]" {
break;
}
add_val(&next_val, &mut param)?;
}
} else {
add_val(&val_token, &mut param)?;
}
params.push(param);
}
}
pub fn run(&mut self) -> Result<(), ParserError> {
let arena = Arc::new(Arena::default());
loop {
let token = match self.next_token()? {
Some(t) => t,
// EOF
None => break,
};
let first_char = token.text.chars().next().unwrap();
match first_char {
'A' => match token.text.as_str() {
"AttributeBegin" => self.target.attribute_begin(token.loc),
"AttributeEnd" => self.target.attribute_end(token.loc),
"Attribute" => {
self.parse_basic_entry(|t, n, p, l| t.attribute(n, p.to_vec(), l))?
}
"ActiveTransform" => {
let a = self.next_token_required()?;
match a.text.as_str() {
"All" => self.target.active_transform_all(token.loc),
"EndTime" => self.target.active_transform_end_time(token.loc),
"StartTime" => self.target.active_transform_start_time(token.loc),
_ => {
return Err(ParserError::Generic(
"Unknown ActiveTransform type".into(),
a.loc,
));
}
}
}
"AreaLightSource" => {
self.parse_basic_entry(|t, n, p, l| t.area_light_source(n, p, l))?
}
"Accelerator" => self.parse_basic_entry(|t, n, p, l| t.accelerator(n, p, l))?,
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'C' => match token.text.as_str() {
"Camera" => self.parse_basic_entry(|t, n, p, l| t.camera(n, p, l))?,
"ConcatTransform" => {
self.expect_token("[")?;
let mut m = [0.0; 16];
for i in 0..16 {
m[i] = self.expect_float()?;
}
self.expect_token("]")?;
self.target.concat_transform(&m, token.loc);
}
"CoordinateSystem" => {
let n = self.expect_quoted_string()?;
self.target.coordinate_system(&n, token.loc);
}
"CoordSysTransform" => {
let n = self.expect_quoted_string()?;
self.target.coord_sys_transform(&n, token.loc);
}
"ColorSpace" => {
let n = self.expect_quoted_string()?;
self.target.color_space(&n, token.loc);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'F' => match token.text.as_str() {
"Film" => self.parse_basic_entry(|t, n, p, l| t.film(n, p, l))?,
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'I' => match token.text.as_str() {
"Integrator" => self.parse_basic_entry(|t, n, p, l| t.integrator(n, p, l))?,
"Include" => {
let filename_tok = self.next_token_required()?;
let raw_filename = filename_tok.dequote();
// Resolve path relative to current dir
let path = self.current_dir.join(raw_filename);
let new_tokenizer = Tokenizer::create_from_file(path.to_str().unwrap())
.map_err(|e| {
ParserError::Generic(
format!("Could not include: {:?}", e),
token.loc,
)
})?;
self.file_stack.push(new_tokenizer);
}
"Identity" => self.target.identity(token.loc),
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'L' => match token.text.as_str() {
"LightSource" => {
self.parse_basic_entry(|t, n, p, l| t.light_source(n, p, l))?
}
"LookAt" => {
let v: Vec<Float> = (0..9)
.map(|_| self.expect_float())
.collect::<Result<_, _>>()?;
self.target.look_at(
v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7], v[8], token.loc,
);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'M' => match token.text.as_str() {
"MakeNamedMaterial" => {
self.parse_basic_entry(|t, n, p, l| t.make_named_material(n, p, l))?
}
"MakeNamedMedium" => {
self.parse_basic_entry(|t, n, p, l| t.make_named_medium(n, p, l))?
}
"Material" => self.parse_basic_entry(|t, n, p, l| t.material(n, p, l))?,
"MediumInterface" => {
let inside = self.expect_quoted_string()?;
let next = self.next_token_required()?;
let outside = if next.is_quoted() {
next.dequote().to_string()
} else {
self.unget(next);
inside.clone()
};
self.target.medium_interface(&inside, &outside, token.loc);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'N' => match token.text.as_str() {
"NamedMaterial" => {
let n = self.expect_quoted_string()?;
self.target.named_material(&n, token.loc);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'O' => match token.text.as_str() {
"ObjectBegin" => {
let n = self.expect_quoted_string()?;
self.target.object_begin(&n, token.loc);
}
"ObjectEnd" => self.target.object_end(token.loc),
"ObjectInstance" => {
let n = self.expect_quoted_string()?;
self.target.object_instance(&n, token.loc);
}
"Option" => {
let name = self.expect_quoted_string()?;
let val_tok = self.next_token_required()?;
let val = val_tok.dequote().to_string();
self.target.option(&name, &val, token.loc);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'P' => match token.text.as_str() {
"PixelFilter" => {
self.parse_basic_entry(|t, n, p, l| t.pixel_filter(n, p, l))?
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'R' => match token.text.as_str() {
"ReverseOrientation" => self.target.reverse_orientation(token.loc),
"Rotate" => {
let angle = self.expect_float()?;
let ax = self.expect_float()?;
let ay = self.expect_float()?;
let az = self.expect_float()?;
self.target.rotate(angle, ax, ay, az, token.loc);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'S' => match token.text.as_str() {
"Shape" => self.parse_basic_entry(|t, n, p, l| t.shape(n, p, l))?,
"Sampler" => self.parse_basic_entry(|t, n, p, l| t.sampler(n, p, l))?,
"Scale" => {
let x = self.expect_float()?;
let y = self.expect_float()?;
let z = self.expect_float()?;
self.target.scale(x, y, z, token.loc);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'T' => match token.text.as_str() {
"TransformBegin" => self.target.attribute_begin(token.loc),
"TransformEnd" => self.target.attribute_end(token.loc),
"Transform" => {
self.expect_token("[")?;
let mut m = [0.0; 16];
for i in 0..16 {
m[i] = self.expect_float()?;
}
self.expect_token("]")?;
self.target.transform(&m, token.loc);
}
"Translate" => {
let x = self.expect_float()?;
let y = self.expect_float()?;
let z = self.expect_float()?;
self.target.translate(x, y, z, token.loc);
}
"TransformTimes" => {
let s = self.expect_float()?;
let e = self.expect_float()?;
self.target.transform_times(s, e, token.loc);
}
"Texture" => {
let name = self.expect_quoted_string()?;
let type_name = self.expect_quoted_string()?;
let tex_name = self.expect_quoted_string()?;
let params = self.parse_parameters()?;
self.target.texture(
&name,
&type_name,
&tex_name,
&params,
token.loc,
arena.clone(),
);
}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
'W' => match token.text.as_str() {
"WorldBegin" => self.target.world_begin(token.loc, arena.clone()),
"WorldEnd" => {}
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
},
_ => {
return Err(ParserError::Generic(
format!("Unknown directive {}", token.text),
token.loc,
));
}
}
}
self.target.end_of_files();
Ok(())
}
fn expect_token(&mut self, expected: &str) -> Result<(), ParserError> {
let t = self.next_token_required()?;
if t.text != expected {
return Err(ParserError::Generic(
format!("Expected '{}', found '{}'", expected, t.text),
t.loc,
));
}
Ok(())
}
fn expect_quoted_string(&mut self) -> Result<String, ParserError> {
let t = self.next_token_required()?;
if t.is_quoted() {
Ok(t.dequote().to_string())
} else {
Err(ParserError::Generic(
format!("Expected quoted string, found '{}'", t.text),
t.loc,
))
}
}
fn parse_basic_entry<F>(&mut self, mut func: F) -> Result<(), ParserError>
where
F: FnMut(&mut dyn ParserTarget, &str, &ParsedParameterVector, FileLoc),
{
let type_token = self.next_token_required()?;
let type_name = if type_token.is_quoted() {
type_token.dequote().to_string()
} else {
type_token.text.clone()
};
let params = self.parse_parameters()?;
func(self.target, &type_name, &params, type_token.loc);
Ok(())
}
}