refactor: use 64-bit types instead of 32-bit

I don't know why I wasn't using 64 bit floats from the beginning,
honestly. I had weird priorities back then
This commit is contained in:
Kiana Sheibani 2024-10-14 18:06:50 -04:00
parent 9879184d47
commit 763a4ff923
Signed by: toki
GPG key ID: 6CB106C25E86A9F7
9 changed files with 469 additions and 243 deletions

View file

@ -1,18 +1,18 @@
extern crate nalgebra as na;
use std::cmp::Ordering;
use std::f32::consts::PI;
use std::f64::consts::PI;
use na::*;
use na::geometry::Point3;
use na::*;
use crate::object::*;
use crate::types::*;
fn trace(ray: Ray, objects: &Vec<Object>) -> Option<(&Object, f32)> {
objects.iter()
.filter_map(|obj| obj.intersect(ray)
.map(|x| (obj, x)))
fn trace(ray: Ray, objects: &Vec<Object>) -> Option<(&Object, f64)> {
objects
.iter()
.filter_map(|obj| obj.intersect(ray).map(|x| (obj, x)))
.min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(Ordering::Equal))
}
@ -20,7 +20,10 @@ fn light_point(objects: &Vec<Object>, obj: &Object, point: Point3f, light: &dyn
if light.check_shadow(point, objects) {
let texture = obj.get_texture(point);
light.get_color(point) * (texture.albedo / PI) * light.intensity(point) * obj.normal(point).dot(&*light.direction(point))
light.get_color(point)
* (texture.albedo / PI)
* light.intensity(point)
* obj.normal(point).dot(&*light.direction(point))
} else {
// Point is in shadow
Color::black()
@ -32,8 +35,13 @@ pub fn cast_ray(ray: Ray, scene: &Scene) -> Color {
let point = ray.project(dist);
let surface_color = obj.get_texture(point).color;
scene.lights.iter()
scene
.lights
.iter()
.map(|light| light_point(&scene.objects, obj, point, &**light))
.fold(Color::black(), |acc, c| acc + c) * surface_color
} else { scene.background }
.fold(Color::black(), |acc, c| acc + c)
* surface_color
} else {
scene.background
}
}