接下来我会带你一步步的实现这个光线追踪,并加入一些我的 debug 建议。最后你会得到一个能渲染出漂亮图片的光线追踪器。你认为你应该能在一个周末的时间内搞定。如果你花的时间比这长,别担心,也没太大问题。我使用 C++ 作为本光线追踪器的实现语言。你其实不必,但我还是推荐你用 C++, 因为 C++ 快速,平台移植性好,并且大部分的工业级电影和游戏渲染器都是使用 C++ 编写的。注意这里我避免了大部分 C++ 的新特性。但是继承和重载运算符我们保留,对光线追踪器来说这个太有用了。网上的那些代码不是我提供的,但是这些代码是真的可以跑的。除了 vec3 类中的一些简单的操作,我将所有的代码都公开了。我是” 学习编程要亲自动手敲代码” 派。但是如果有一份代码摆在我面前,我可以直接用,我还是会用的。所以我只在没代码用的时候,我才奉行我刚刚说的话。好啦,别提这个了!
for (int j = 0; j < image_height; j++) { for (int i = 0; i < image_width; i++) { auto r = double(i) / (image_width-1); auto g = double(j) / (image_height-1); auto b = 0.0;
int ir = int(255.999 * r); int ig = int(255.999 * g); int ib = int(255.999 * b);
for (int j = 0; j < image_height; ++j) { std::clog << "\rScanlines remaining: " << (image_height - j) << ' ' << std::flush; for (int i = 0; i < image_width; i++) { auto r = double(i) / (image_width-1); auto g = double(j) / (image_height-1); auto b = 0.0;
int ir = int(255.999 * r); int ig = int(255.999 * g); int ib = int(255.999 * b);
voidwrite_color(std::ostream& out, const color& pixel_color){ auto r = pixel_color.x(); auto g = pixel_color.y(); auto b = pixel_color.z();
// Translate the [0,1] component values to the byte range [0,255]. int rbyte = int(255.999 * r); int gbyte = int(255.999 * g); int bbyte = int(255.999 * b);
// Write out the pixel color components. out << rbyte << ' ' << gbyte << ' ' << bbyte << '\n'; }
auto aspect_ratio = 16.0 / 9.0; int image_width = 400;
// Calculate the image height, and ensure that it's at least 1. int image_height = int(image_width / aspect_ratio); image_height = (image_height < 1) ? 1 : image_height;
// Viewport widths less than one are ok since they are real valued. auto viewport_height = 2.0; auto viewport_width = viewport_height * (double(image_width)/image_height);
color ray_color(const ray& r){ returncolor(0,0,0); }
intmain(){
// Image
auto aspect_ratio = 16.0 / 9.0; int image_width = 400;
// Calculate the image height, and ensure that it's at least 1. int image_height = int(image_width / aspect_ratio); image_height = (image_height < 1) ? 1 : image_height;
// Camera
auto focal_length = 1.0; auto viewport_height = 2.0; auto viewport_width = viewport_height * (double(image_width)/image_height); auto camera_center = point3(0, 0, 0);
// Calculate the vectors across the horizontal and down the vertical viewport edges. auto viewport_u = vec3(viewport_width, 0, 0); auto viewport_v = vec3(0, -viewport_height, 0);
// Calculate the horizontal and vertical delta vectors from pixel to pixel. auto pixel_delta_u = viewport_u / image_width; auto pixel_delta_v = viewport_v / image_height;
// Calculate the location of the upper left pixel. auto viewport_upper_left = camera_center - vec3(0, 0, focal_length) - viewport_u/2 - viewport_v/2; auto pixel00_loc = viewport_upper_left + 0.5 * (pixel_delta_u + pixel_delta_v);
boolhit_sphere(const point3& center, double radius, const ray& r){ vec3 oc = center - r.origin(); auto a = dot(r.direction(), r.direction()); auto b = -2.0 * dot(r.direction(), oc); auto c = dot(oc, oc) - radius*radius; auto discriminant = b*b - 4*a*c; return (discriminant >= 0); }
color ray_color(const ray& r){ if (hit_sphere(point3(0,0,-1), 0.5, r)) returncolor(1, 0, 0);
vec3 unit_direction = unit_vector(r.direction()); auto a = 0.5*(unit_direction.y() + 1.0); return (1.0-a)*color(1.0, 1.0, 1.0) + a*color(0.5, 0.7, 1.0); }
//main.cc 球体表面法相 doublehit_sphere(const vec3& center, double radius, const ray& r){ vec3 oc = r.origin() - center; auto a = dot(r.direction(), r.direction()); auto b = 2.0 * dot(oc, r.direction()); auto c = dot(oc, oc) - radius*radius; auto discriminant = b*b - 4*a*c; if (discriminant < 0) { return-1.0; } else { return (-b - sqrt(discriminant) ) / (2.0*a); } }
vec3 ray_color(const ray& r){ auto t = hit_sphere(vec3(0,0,-1), 0.5, r); if (t > 0.0) { vec3 N = unit_vector(r.at(t) - vec3(0,0,-1)); return0.5*vec3(N.x()+1, N.y()+1, N.z()+1); } vec3 unit_direction = unit_vector(r.direction()); t = 0.5*(unit_direction.y() + 1.0); return (1.0-t)*vec3(1.0, 1.0, 1.0) + t*vec3(0.5, 0.7, 1.0); }
这会得到下面的结果:
6.2 简化射线-球相交代码
让我们重温射线-球相交函数:
1 2 3 4 5 6 7 8 9 10 11 12 13
doublehit_sphere(const point3& center, double radius, const ray& r){ vec3 oc = center - r.origin(); auto a = dot(r.direction(), r.direction()); auto b = -2.0 * dot(r.direction(), oc); auto c = dot(oc, oc) - radius*radius; auto discriminant = b*b - 4*a*c;
doublehit_sphere(const point3& center, double radius, const ray& r){ vec3 oc = center - r.origin(); auto a = r.direction().length_squared(); auto h = dot(r.direction(), oc); auto c = oc.length_squared() - radius*radius; auto discriminant = h*h - a*c;
classsphere : public hittable { public: sphere(const point3& center, double radius) : center(center), radius(std::fmax(0,radius)) {}
boolhit(const ray& r, double ray_tmin, double ray_tmax, hit_record& rec)constoverride{ vec3 oc = center - r.origin(); auto a = r.direction().length_squared(); auto h = dot(r.direction(), oc); auto c = oc.length_squared() - radius*radius;
auto discriminant = h*h - a*c; if (discriminant < 0) returnfalse;
auto sqrtd = std::sqrt(discriminant);
// Find the nearest root that lies in the acceptable range. auto root = (h - sqrtd) / a; if (root <= ray_tmin || ray_tmax <= root) { root = (h + sqrtd) / a; if (root <= ray_tmin || ray_tmax <= root) returnfalse; }
bool front_face; if (dot(ray_direction, outward_normal) > 0.0) { // ray is inside the sphere normal = -outward_normal; front_face = false; } else { // ray is outside the sphere normal = outward_normal; front_face = true; }
voidset_face_normal(const ray& r, const vec3& outward_normal){ // Sets the hit record normal vector. // NOTE: the parameter `outward_normal` is assumed to have unit length.
auto double_ptr = make_shared<double>(0.37); auto vec3_ptr = make_shared<vec3>(1.414214, 2.718281, 1.618034); auto sphere_ptr = make_shared<sphere>(point3(0,0,0), 1.0);
vec3 unit_direction = unit_vector(r.direction()); auto a = 0.5*(unit_direction.y() + 1.0); return (1.0-a)*color(1.0, 1.0, 1.0) + a*color(0.5, 0.7, 1.0); }
intmain(){
// Image
auto aspect_ratio = 16.0 / 9.0; int image_width = 400;
// Calculate the image height, and ensure that it's at least 1. int image_height = int(image_width / aspect_ratio); image_height = (image_height < 1) ? 1 : image_height;
auto focal_length = 1.0; auto viewport_height = 2.0; auto viewport_width = viewport_height * (double(image_width)/image_height); auto camera_center = point3(0, 0, 0);
// Calculate the vectors across the horizontal and down the vertical viewport edges. auto viewport_u = vec3(viewport_width, 0, 0); auto viewport_v = vec3(0, -viewport_height, 0);
// Calculate the horizontal and vertical delta vectors from pixel to pixel. auto pixel_delta_u = viewport_u / image_width; auto pixel_delta_v = viewport_v / image_height;
// Calculate the location of the upper left pixel. auto viewport_upper_left = camera_center - vec3(0, 0, focal_length) - viewport_u/2 - viewport_v/2; auto pixel00_loc = viewport_upper_left + 0.5 * (pixel_delta_u + pixel_delta_v);
for (int j = 0; j < image_height; j++) { std::clog << "\rScanlines remaining: " << (image_height - j) << ' ' << std::flush; for (int i = 0; i < image_width; i++) { auto pixel_center = pixel00_loc + (i * pixel_delta_u) + (j * pixel_delta_v); auto ray_direction = pixel_center - center; ray r(center, ray_direction);
color pixel_color = ray_color(r, world); write_color(std::cout, pixel_color); } }
std::clog << "\rDone. \n"; }
private: int image_height; // Rendered image height point3 center; // Camera center point3 pixel00_loc; // Location of pixel 0, 0 vec3 pixel_delta_u; // Offset to pixel to the right vec3 pixel_delta_v; // Offset to pixel below
// Determine viewport dimensions. auto focal_length = 1.0; auto viewport_height = 2.0; auto viewport_width = viewport_height * (double(image_width)/image_height);
// Calculate the vectors across the horizontal and down the vertical viewport edges. auto viewport_u = vec3(viewport_width, 0, 0); auto viewport_v = vec3(0, -viewport_height, 0);
// Calculate the horizontal and vertical delta vectors from pixel to pixel. pixel_delta_u = viewport_u / image_width; pixel_delta_v = viewport_v / image_height;
// Calculate the location of the upper left pixel. auto viewport_upper_left = center - vec3(0, 0, focal_length) - viewport_u/2 - viewport_v/2; pixel00_loc = viewport_upper_left + 0.5 * (pixel_delta_u + pixel_delta_v); }
color ray_color(const ray& r, const hittable& world)const{ ... } };
voidwrite_color(std::ostream& out, const color& pixel_color){ auto r = pixel_color.x(); auto g = pixel_color.y(); auto b = pixel_color.z();
// Translate the [0,1] component values to the byte range [0,255]. staticconst interval intensity(0.000, 0.999); int rbyte = int(256 * intensity.clamp(r)); int gbyte = int(256 * intensity.clamp(g)); int bbyte = int(256 * intensity.clamp(b));
// Write out the pixel color components. out << rbyte << ' ' << gbyte << ' ' << bbyte << '\n'; }
现在更新camera类,增加一个对每个像素生成不同采样的新函数 get_ray(int i, int j),在该函数内使用一个新的辅助函数 sample_square(),这个辅助函数在以原点为中心的方格像素内生成一个随机采样点。我们随后将多重采样的平均颜色作为该像素的颜色:
Now let’s update the camera class to define and use a new camera::get_ray(i,j) function, which will generate different samples for each pixel. This function will use a new helper function sample_square() that generates a random sample point within the unit square centered at the origin. We then transform the random sample from this ideal square back to the particular pixel we’re currently sampling.
classcamera { public: double aspect_ratio = 1.0; // Ratio of image width over height int image_width = 100; // Rendered image width in pixel count int samples_per_pixel = 10; // Count of random samples for each pixel
for (int j = 0; j < image_height; j++) { std::clog << "\rScanlines remaining: " << (image_height - j) << ' ' << std::flush; for (int i = 0; i < image_width; i++) { color pixel_color(0,0,0); for (int sample = 0; sample < samples_per_pixel; sample++) { ray r = get_ray(i, j); pixel_color += ray_color(r, world); } write_color(std::cout, pixel_samples_scale * pixel_color); } }
std::clog << "\rDone. \n"; } ... private: int image_height; // Rendered image height double pixel_samples_scale; // Color scale factor for a sum of pixel samples point3 center; // Camera center point3 pixel00_loc; // Location of pixel 0, 0 vec3 pixel_delta_u; // Offset to pixel to the right vec3 pixel_delta_v; // Offset to pixel below
ray get_ray(int i, int j)const{ // Construct a camera ray originating from the origin and directed at randomly sampled // point around the pixel location i, j.
auto offset = sample_square(); auto pixel_sample = pixel00_loc + ((i + offset.x()) * pixel_delta_u) + ((j + offset.y()) * pixel_delta_v);
auto ray_origin = center; auto ray_direction = pixel_sample - ray_origin;
returnray(ray_origin, ray_direction); }
vec3 sample_square()const{ // Returns the vector to a random point in the [-.5,-.5]-[+.5,+.5] unit square. returnvec3(random_double() - 0.5, random_double() - 0.5, 0); }
color ray_color(const ray& r, const hittable& world)const{ ... } };
inline vec3 unit_vector(const vec3& v){ return v / v.length(); }
inline vec3 random_unit_vector(){ while (true) { auto p = vec3::random(-1,1); auto lensq = p.length_squared(); if (lensq <= 1) return p / sqrt(lensq); } }
inline vec3 random_unit_vector(){ while (true) { auto p = vec3::random(-1,1); auto lensq = p.length_squared(); if (1e-160 < lensq && lensq <= 1) return p / sqrt(lensq); } }
inline vec3 random_unit_vector(){ while (true) { auto p = vec3::random(-1,1); auto lensq = p.length_squared(); if (1e-160 < lensq && lensq <= 1) return p / sqrt(lensq); } }
inline vec3 random_on_hemisphere(const vec3& normal){ vec3 on_unit_sphere = random_unit_vector(); if (dot(on_unit_sphere, normal) > 0.0) // In the same hemisphere as the normal return on_unit_sphere; else return -on_unit_sphere; }
classcamera { public: double aspect_ratio = 1.0; // Ratio of image width over height int image_width = 100; // Rendered image width in pixel count int samples_per_pixel = 10; // Count of random samples for each pixel int max_depth = 10; // Maximum number of ray bounces into scene
for (int j = 0; j < image_height; j++) { std::clog << "\rScanlines remaining: " << (image_height - j) << ' ' << std::flush; for (int i = 0; i < image_width; i++) { color pixel_color(0,0,0); for (int sample = 0; sample < samples_per_pixel; sample++) { ray r = get_ray(i, j); pixel_color += ray_color(r, max_depth, world); } write_color(std::cout, pixel_samples_scale * pixel_color); } }
std::clog << "\rDone. \n"; } ... private: ... color ray_color(const ray& r, int depth, const hittable& world)const{ // If we've exceeded the ray bounce limit, no more light is gathered. if (depth <= 0) returncolor(0,0,0);
hit_record rec;
if (world.hit(r, interval(0, infinity), rec)) { vec3 direction = random_on_hemisphere(rec.normal); return0.5 * ray_color(ray(rec.p, direction), depth-1, world); }
vec3 unit_direction = unit_vector(r.direction()); auto a = 0.5*(unit_direction.y() + 1.0); return (1.0-a)*color(1.0, 1.0, 1.0) + a*color(0.5, 0.7, 1.0); } };
classcamera { ... private: ... color ray_color(const ray& r, int depth, const hittable& world)const{ // If we've exceeded the ray bounce limit, no more light is gathered. if (depth <= 0) returncolor(0,0,0);
hit_record rec;
if (world.hit(r, interval(0.001, infinity), rec)) { vec3 direction = random_on_hemisphere(rec.normal); return0.5 * ray_color(ray(rec.p, direction), depth-1, world); }
vec3 unit_direction = unit_vector(r.direction()); auto a = 0.5*(unit_direction.y() + 1.0); return (1.0-a)*color(1.0, 1.0, 1.0) + a*color(0.5, 0.7, 1.0); } };
classcamera { ... color ray_color(const ray& r, int depth, const hittable& world)const{ // If we've exceeded the ray bounce limit, no more light is gathered. if (depth <= 0) returncolor(0,0,0);
classcamera { ... color ray_color(const ray& r, int depth, const hittable& world)const{ // If we've exceeded the ray bounce limit, no more light is gathered. if (depth <= 0) returncolor(0,0,0);
hit_record rec;
if (world.hit(r, interval(0.001, infinity), rec)) { vec3 direction = rec.normal + random_unit_vector(); return0.1 * ray_color(ray(rec.p, direction), depth-1, world); }
vec3 unit_direction = unit_vector(r.direction()); auto a = 0.5*(unit_direction.y() + 1.0); return (1.0-a)*color(1.0, 1.0, 1.0) + a*color(0.5, 0.7, 1.0); } };
inlinedoublelinear_to_gamma(double linear_component) { if (linear_component > 0) return std::sqrt(linear_component);
return0; }
voidwrite_color(std::ostream& out, const color& pixel_color){ auto r = pixel_color.x(); auto g = pixel_color.y(); auto b = pixel_color.z();
// Apply a linear to gamma transform for gamma 2 r = linear_to_gamma(r); g = linear_to_gamma(g); b = linear_to_gamma(b);
// Translate the [0,1] component values to the byte range [0,255]. staticconst interval intensity(0.000, 0.999); int rbyte = int(256 * intensity.clamp(r)); int gbyte = int(256 * intensity.clamp(g)); int bbyte = int(256 * intensity.clamp(b));
// Write out the pixel color components. out << rbyte << ' ' << gbyte << ' ' << bbyte << '\n'; }
使用hit_record可以避免一堆参数,因此我们可以把任何我们想要的信息放进去。你可以用参数代替封装类型,这只是个人喜好的问题。Hittable的物体和材质需要能够在代码中引用对方的类型,所以存在一些引用的循环性。在 C++ 中,我们添加了一行class material; 来告诉编译器material是一个稍后会定义的类。因为我们只是指定了一个指向类的指针,编译器不需要知道类的细节,这解决了循环引用的问题。
boolnear_zero()const{ // Return true if the vector is close to zero in all dimensions. auto s = 1e-8; return (std::fabs(e[0]) < s) && (std::fabs(e[1]) < s) && (std::fabs(e[2]) < s); }
classcamera { ... private: ... color ray_color(const ray& r, int depth, const hittable& world)const{ // If we've exceeded the ray bounce limit, no more light is gathered. if (depth <= 0) returncolor(0,0,0);
hit_record rec;
if (world.hit(r, interval(0.001, infinity), rec)) { ray scattered; color attenuation; if (rec.mat->scatter(r, rec, attenuation, scattered)) return attenuation * ray_color(scattered, depth-1, world); returncolor(0,0,0); }
vec3 unit_direction = unit_vector(r.direction()); auto a = 0.5*(unit_direction.y() + 1.0); return (1.0-a)*color(1.0, 1.0, 1.0) + a*color(0.5, 0.7, 1.0); } };
我们还要更新sphere的构造方法初始化材质指针mat:
1 2 3 4 5 6 7
classsphere : public hittable { public: sphere(const point3& center, double radius, shared_ptr<material> mat) : center(center), radius(std::fmax(0,radius)), mat(mat) {}
private: // Refractive index in vacuum or air, or the ratio of the material's refractive index over // the refractive index of the enclosing media double refraction_index; };
现在我们将更新场景,将左侧的球体更改为折射率为1.5的玻璃:
1 2 3 4
auto material_ground = make_shared<lambertian>(color(0.8, 0.8, 0.0)); auto material_center = make_shared<lambertian>(color(0.1, 0.2, 0.5)); auto material_left = make_shared<dielectric>(1.50); auto material_right = make_shared<metal>(color(0.8, 0.6, 0.2), 1.0);
bool cannot_refract = ri * sin_theta > 1.0; vec3 direction;
if (cannot_refract) direction = reflect(unit_direction, rec.normal); else direction = refract(unit_direction, rec.normal, ri);
scattered = ray(rec.p, direction); returntrue; }
private: // Refractive index in vacuum or air, or the ratio of the material's refractive index over // the refractive index of the enclosing media double refraction_index; };
bool cannot_refract = ri * sin_theta > 1.0; vec3 direction;
if (cannot_refract || reflectance(cos_theta, ri) > random_double()) direction = reflect(unit_direction, rec.normal); else direction = refract(unit_direction, rec.normal, ri);
scattered = ray(rec.p, direction); returntrue; }
private: // Refractive index in vacuum or air, or the ratio of the material's refractive index over // the refractive index of the enclosing media double refraction_index;
classcamera { public: double aspect_ratio = 1.0; // Ratio of image width over height int image_width = 100; // Rendered image width in pixel count int samples_per_pixel = 10; // Count of random samples for each pixel int max_depth = 10; // Maximum number of ray bounces into scene
// Determine viewport dimensions. auto focal_length = 1.0; auto theta = degrees_to_radians(vfov); auto h = std::tan(theta/2); auto viewport_height = 2 * h * focal_length; auto viewport_width = viewport_height * (double(image_width)/image_height);
// Calculate the vectors across the horizontal and down the vertical viewport edges. auto viewport_u = vec3(viewport_width, 0, 0); auto viewport_v = vec3(0, -viewport_height, 0);
// Calculate the horizontal and vertical delta vectors from pixel to pixel. pixel_delta_u = viewport_u / image_width; pixel_delta_v = viewport_v / image_height;
// Calculate the location of the upper left pixel. auto viewport_upper_left = center - vec3(0, 0, focal_length) - viewport_u/2 - viewport_v/2; pixel00_loc = viewport_upper_left + 0.5 * (pixel_delta_u + pixel_delta_v); }
classcamera { public: double aspect_ratio = 1.0; // Ratio of image width over height int image_width = 100; // Rendered image width in pixel count int samples_per_pixel = 10; // Count of random samples for each pixel int max_depth = 10; // Maximum number of ray bounces into scene
double vfov = 90; // Vertical view angle (field of view) point3 lookfrom = point3(0,0,0); // Point camera is looking from point3 lookat = point3(0,0,-1); // Point camera is looking at vec3 vup = vec3(0,1,0); // Camera-relative "up" direction
...
private: int image_height; // Rendered image height double pixel_samples_scale; // Color scale factor for a sum of pixel samples point3 center; // Camera center point3 pixel00_loc; // Location of pixel 0, 0 vec3 pixel_delta_u; // Offset to pixel to the right vec3 pixel_delta_v; // Offset to pixel below vec3 u, v, w; // Camera frame basis vectors
// Determine viewport dimensions. auto focal_length = (lookfrom - lookat).length(); auto theta = degrees_to_radians(vfov); auto h = std::tan(theta/2); auto viewport_height = 2 * h * focal_length; auto viewport_width = viewport_height * (double(image_width)/image_height);
// Calculate the u,v,w unit basis vectors for the camera coordinate frame. w = unit_vector(lookfrom - lookat); u = unit_vector(cross(vup, w)); v = cross(w, u);
// Calculate the vectors across the horizontal and down the vertical viewport edges. vec3 viewport_u = viewport_width * u; // Vector across viewport horizontal edge vec3 viewport_v = viewport_height * -v; // Vector down viewport vertical edge
// Calculate the horizontal and vertical delta vectors from pixel to pixel. pixel_delta_u = viewport_u / image_width; pixel_delta_v = viewport_v / image_height;
// Calculate the location of the upper left pixel. auto viewport_upper_left = center - (focal_length * w) - viewport_u/2 - viewport_v/2; pixel00_loc = viewport_upper_left + 0.5 * (pixel_delta_u + pixel_delta_v); }
inline vec3 unit_vector(const vec3& u){ return v / v.length(); }
inline vec3 random_in_unit_disk(){ while (true) { auto p = vec3(random_double(-1,1), random_double(-1,1), 0); if (p.length_squared() < 1) return p; } }
classcamera { public: double aspect_ratio = 1.0; // Ratio of image width over height int image_width = 100; // Rendered image width in pixel count int samples_per_pixel = 10; // Count of random samples for each pixel int max_depth = 10; // Maximum number of ray bounces into scene
double vfov = 90; // Vertical view angle (field of view) point3 lookfrom = point3(0,0,0); // Point camera is looking from point3 lookat = point3(0,0,-1); // Point camera is looking at vec3 vup = vec3(0,1,0); // Camera-relative "up" direction
double defocus_angle = 0; // Variation angle of rays through each pixel double focus_dist = 10; // Distance from camera lookfrom point to plane of perfect focus
...
private: int image_height; // Rendered image height double pixel_samples_scale; // Color scale factor for a sum of pixel samples point3 center; // Camera center point3 pixel00_loc; // Location of pixel 0, 0 vec3 pixel_delta_u; // Offset to pixel to the right vec3 pixel_delta_v; // Offset to pixel below vec3 u, v, w; // Camera frame basis vectors vec3 defocus_disk_u; // Defocus disk horizontal radius vec3 defocus_disk_v; // Defocus disk vertical radius
// Determine viewport dimensions. auto focal_length = (lookfrom - lookat).length(); auto theta = degrees_to_radians(vfov); auto h = std::tan(theta/2); auto viewport_height = 2 * h * focus_dist; auto viewport_width = viewport_height * (double(image_width)/image_height);
// Calculate the u,v,w unit basis vectors for the camera coordinate frame. w = unit_vector(lookfrom - lookat); u = unit_vector(cross(vup, w)); v = cross(w, u);
// Calculate the vectors across the horizontal and down the vertical viewport edges. vec3 viewport_u = viewport_width * u; // Vector across viewport horizontal edge vec3 viewport_v = viewport_height * -v; // Vector down viewport vertical edge
// Calculate the horizontal and vertical delta vectors to the next pixel. pixel_delta_u = viewport_u / image_width; pixel_delta_v = viewport_v / image_height;
// Calculate the location of the upper left pixel. auto viewport_upper_left = center - (focus_dist * w) - viewport_u/2 - viewport_v/2; pixel00_loc = viewport_upper_left + 0.5 * (pixel_delta_u + pixel_delta_v);
// Calculate the camera defocus disk basis vectors. auto defocus_radius = focus_dist * std::tan(degrees_to_radians(defocus_angle / 2)); defocus_disk_u = u * defocus_radius; defocus_disk_v = v * defocus_radius; }
ray get_ray(int i, int j)const{ // Construct a camera ray originating from the defocus disk and directed at a randomly // sampled point around the pixel location i, j.
auto offset = sample_square(); auto pixel_sample = pixel00_loc + ((i + offset.x()) * pixel_delta_u) + ((j + offset.y()) * pixel_delta_v);
auto ray_origin = (defocus_angle <= 0) ? center : defocus_disk_sample(); auto ray_direction = pixel_sample - ray_origin;
returnray(ray_origin, ray_direction); }
vec3 sample_square()const{ ... }
point3 defocus_disk_sample()const{ // Returns a random point in the camera defocus disk. auto p = random_in_unit_disk(); return center + (p[0] * defocus_disk_u) + (p[1] * defocus_disk_v); }
color ray_color(const ray& r, int depth, const hittable& world)const{ ... } };
auto ground_material = make_shared<lambertian>(color(0.5, 0.5, 0.5)); world.add(make_shared<sphere>(point3(0,-1000,0), 1000, ground_material));
for (int a = -11; a < 11; a++) { for (int b = -11; b < 11; b++) { auto choose_mat = random_double(); point3 center(a + 0.9*random_double(), 0.2, b + 0.9*random_double());