Search code examples
c++graphicscameraaspect-ratioraytracing

(Ray tracing) Trouble converting to screen coordinates, objects being stretched


I followed along with Ray Tracing in One Weekend and managed to get the final output but I wanted to understand more about creating a camera and "painting" the screen since he didn't go over it much.

When I tried using a different way of creating a camera by spheres actually get elongated, making them look like more like ellipses. I've tried modifying the x and y assignments in screenCords but I've only managed to make more errors (such as objects wrapping around to the opposite side)

Camera.h:

#pragma once

#include "../Matrix.h"
#include "../Defs.h"
#include "Defs.h"

template<typename O>
using Point3 = Vec3<O>;

template<typename O>
using Color = Vec3<O>;

template <typename O>
class Camera{
  O Height;
  O Width;
  Vec3<O> Forward, Right, Up;
  Point3<O> Origin;

public:
  Camera(O fov, O aspect_ratio, Point3<O> origin, Point3<O> target, Vec3<O> upguide) {
    Height = atan(degrees_to_radians(fov));
    Width = Height * aspect_ratio;
    
    Origin = origin;

    Forward = target - origin;
    Forward.normalize();
    Right = Forward.cross(upguide);
    Right.normalize();
    Up = Right.cross(Forward);

    }

    Ray<O> get_raydir(O right, O up){
      Vec3<O> result(Forward + right * Width * Right + up * Height * Up); result.normalize();

      return Ray<O>(Origin, result);
    }

    void screenCords(O &x, O &y, O width, O height){
      x = ((2.0f * x) / width) -1.0f;
      y = ((2.0f * y) / height); 
    }
};

Main.cpp

#include <iostream>
#include <cmath>
#include "../Matrix.h"
#include "Camera.h"
#include <vector>
#include "Image.h"
#include "Shapes.h"
#include "Tracer.h"
#include "../Defs.h"

template<typename O>
using Point3 = Vec3<O>;

template<typename O>
using Color = Vec3<O>;

int main(){
  const int img_ratio = 2;
  const int img_width = 640;
  const int img_height = 480;
  const int depth = 50; float t_Max = infinity; float t_Min = 0.001;

  float inv_width = 1 / float(img_width);
  float inv_height = 1 / float(img_height);

  std::vector<Sphere<float>> shapes;

  Camera<float> cam1(20.0f, img_ratio, Point3<float>(0.0f, 0.0f, 0.0f), Point3<float>(0.0f, 0.0f, -1.0f), Vec3<float>(0.0f, 1.0f, 0.0f));

  Sphere<float> cir1(0.2f, Point3<float>(0.2f, 0.0f, -1.0f));
  Sphere<float> cir2(7.0f, Point3<float>(0.0f, -7.0f, -1.0f));
  Sphere<float> cir3(0.5f, Point3<float>(1.0f, 0.0f, -1.0f));
  shapes.push_back(cir1);
  //shapes.push_back(cir2);
  //shapes.push_back(cir3);

  Tracer<float> tracer(shapes);

  std::cout << "P3\n" << img_width << ' ' << img_height << "\n255" << std::endl;

  Ray<float> ray(Point3<float>(0.0f), Vec3<float>(0.0f));

  for (int j = 0; j < img_height; j++)
  {
    std::cerr << "\rScanlines remaining: " << j << ' ' << std::flush;
    for (int i = 0; i < img_width; i++){

        float x = i;
        float y = j;

        cam1.screenCords(x, y, img_width, img_height);

        ray = cam1.get_raydir(x, y);
        //ray = Ray<float>(Vec3<float>(x1, y1, 1), Point3<float>(0.0f, 0.0f, 0.0f));
        tracer.iterator(ray, depth, t_Max, t_Min);
    }
  }
  std::cerr << "\n done " << std::endl;
}

I suspect the error is in one of these files since the spheres are actually being drawn with the colors based on normals (with the top and bottom normal colors unsurprisingly being bugged)

Here are a few examples of the output:

With one sphere centered at the origin

More spheres


Solution

  • You shall define

    const float img_ratio = (float)img_width/img_height;
    

    Which, for a 640x480 image, would be 1.333 rather than 2 as in your code.

    Also in screenCords you subtract 1.0f from x but not from y. It creates a tilt-shift effect.