Files
blender/intern/cycles/kernel/kernel_camera.h
Dalai Felinto d7fbe03a8a Fisheye Camera for Cycles
For sample images see:
http://www.dalaifelinto.com/?p=399 (equisolid)
http://www.dalaifelinto.com/?p=389 (equidistant)

The 'use_panorama' option is now part of a new Camera type: 'Panorama'.
Created two other panorama cameras:

- Equisolid: most of lens in the market simulate this lens - e.g. Nikon, Canon, ...)
             this works as a real lens up to an extent. The final result takes the
             sensor dimensions into account also.
             .:. to simulate a Nikon DX2S with a 10.5mm lens do:
                 sensor: 23.7 x 15.7
                 fisheye lens: 10.5
                 fisheye fov: 180
                 render dimensions: 4288 x 2848

- Equidistant: this is not a real lens model. Although the old equidistant lens simulate
               this lens. The result is always as a circular fisheye that takes the whole sensor
               (in other words, it doesn't take the sensor into consideration).
               This is perfect for fulldomes ;)

               For the UI we have 10 to 360 as soft values and 10 to 3600 as hard values (because we can).


Reference material:
http://www.hdrlabs.com/tutorials/downloads_files/HDRI%20for%20CGI.pdf
http://www.bobatkins.com/photography/technical/field_of_view.html

Note, this is not a real simulation of the light path through the lens.
The ideal solution would be this:
https://graphics.stanford.edu/wikis/cs348b-11/Assignment3
http://www.graphics.stanford.edu/papers/camera/


Thanks Brecht for the fix, suggestions and code review.
Kudos for the dome community for keeping me stimulated on the topic since 2009 ;)

Patch partly implemented during lab time at VisGraf, IMPA - Rio de Janeiro.
2012-05-04 16:20:51 +00:00

224 lines
6.9 KiB
C

/*
* Copyright 2011, Blender Foundation.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
CCL_NAMESPACE_BEGIN
/* Perspective Camera */
__device float2 camera_sample_aperture(KernelGlobals *kg, float u, float v)
{
float blades = kernel_data.cam.blades;
if(blades == 0.0f) {
/* sample disk */
return concentric_sample_disk(u, v);
}
else {
/* sample polygon */
float rotation = kernel_data.cam.bladesrotation;
return regular_polygon_sample(blades, rotation, u, v);
}
}
__device void camera_sample_perspective(KernelGlobals *kg, float raster_x, float raster_y, float lens_u, float lens_v, Ray *ray)
{
/* create ray form raster position */
Transform rastertocamera = kernel_data.cam.rastertocamera;
float3 Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y, 0.0f));
ray->P = make_float3(0.0f, 0.0f, 0.0f);
ray->D = Pcamera;
/* modify ray for depth of field */
float aperturesize = kernel_data.cam.aperturesize;
if(aperturesize > 0.0f) {
/* sample point on aperture */
float2 lensuv = camera_sample_aperture(kg, lens_u, lens_v)*aperturesize;
/* compute point on plane of focus */
float ft = kernel_data.cam.focaldistance/ray->D.z;
float3 Pfocus = ray->P + ray->D*ft;
/* update ray for effect of lens */
ray->P = make_float3(lensuv.x, lensuv.y, 0.0f);
ray->D = normalize(Pfocus - ray->P);
}
/* transform ray from camera to world */
Transform cameratoworld = kernel_data.cam.cameratoworld;
#ifdef __MOTION__
if(kernel_data.cam.have_motion)
transform_motion_interpolate(&cameratoworld, &kernel_data.cam.motion, ray->time);
#endif
ray->P = transform_point(&cameratoworld, ray->P);
ray->D = transform_direction(&cameratoworld, ray->D);
ray->D = normalize(ray->D);
#ifdef __RAY_DIFFERENTIALS__
/* ray differential */
float3 Ddiff = transform_direction(&cameratoworld, Pcamera);
ray->dP.dx = make_float3(0.0f, 0.0f, 0.0f);
ray->dP.dy = make_float3(0.0f, 0.0f, 0.0f);
ray->dD.dx = normalize(Ddiff + float4_to_float3(kernel_data.cam.dx)) - normalize(Ddiff);
ray->dD.dy = normalize(Ddiff + float4_to_float3(kernel_data.cam.dy)) - normalize(Ddiff);
#endif
#ifdef __CAMERA_CLIPPING__
/* clipping */
ray->P += kernel_data.cam.nearclip*ray->D;
ray->t = kernel_data.cam.cliplength;
#else
ray->t = FLT_MAX;
#endif
}
/* Orthographic Camera */
__device void camera_sample_orthographic(KernelGlobals *kg, float raster_x, float raster_y, Ray *ray)
{
/* create ray form raster position */
Transform rastertocamera = kernel_data.cam.rastertocamera;
float3 Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y, 0.0f));
ray->P = Pcamera;
ray->D = make_float3(0.0f, 0.0f, 1.0f);
/* transform ray from camera to world */
Transform cameratoworld = kernel_data.cam.cameratoworld;
#ifdef __MOTION__
if(kernel_data.cam.have_motion)
transform_motion_interpolate(&cameratoworld, &kernel_data.cam.motion, ray->time);
#endif
ray->P = transform_point(&cameratoworld, ray->P);
ray->D = transform_direction(&cameratoworld, ray->D);
ray->D = normalize(ray->D);
#ifdef __RAY_DIFFERENTIALS__
/* ray differential */
ray->dP.dx = float4_to_float3(kernel_data.cam.dx);
ray->dP.dy = float4_to_float3(kernel_data.cam.dy);
ray->dD.dx = make_float3(0.0f, 0.0f, 0.0f);
ray->dD.dy = make_float3(0.0f, 0.0f, 0.0f);
#endif
#ifdef __CAMERA_CLIPPING__
/* clipping */
ray->t = kernel_data.cam.cliplength;
#else
ray->t = FLT_MAX;
#endif
}
/* Panorama Camera */
__device float3 panorama_to_direction(KernelGlobals *kg, float u, float v, Ray *ray)
{
switch (kernel_data.cam.panorama_type) {
case PANORAMA_EQUIRECTANGULAR:
return equirectangular_to_direction(u, v);
break;
case PANORAMA_FISHEYE_EQUIDISTANT:
return fisheye_to_direction(u, v, kernel_data.cam.fisheye_fov, ray);
break;
case PANORAMA_FISHEYE_EQUISOLID:
default:
return fisheye_equisolid_to_direction(u, v, kernel_data.cam.fisheye_lens, kernel_data.cam.fisheye_fov, kernel_data.cam.sensorwidth, kernel_data.cam.sensorheight, ray);
break;
}
}
__device void camera_sample_panorama(KernelGlobals *kg, float raster_x, float raster_y, Ray *ray)
{
Transform rastertocamera = kernel_data.cam.rastertocamera;
float3 Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y, 0.0f));
/* create ray form raster position */
ray->P = make_float3(0.0f, 0.0f, 0.0f);
#ifdef __CAMERA_CLIPPING__
/* clipping */
ray->t = kernel_data.cam.cliplength;
#else
ray->t = FLT_MAX;
#endif
ray->D = panorama_to_direction(kg, Pcamera.x, Pcamera.y, ray);
/* transform ray from camera to world */
Transform cameratoworld = kernel_data.cam.cameratoworld;
#ifdef __MOTION__
if(kernel_data.cam.have_motion)
transform_motion_interpolate(&cameratoworld, &kernel_data.cam.motion, ray->time);
#endif
ray->P = transform_point(&cameratoworld, ray->P);
ray->D = transform_direction(&cameratoworld, ray->D);
ray->D = normalize(ray->D);
#ifdef __RAY_DIFFERENTIALS__
/* ray differential */
ray->dP.dx = make_float3(0.0f, 0.0f, 0.0f);
ray->dP.dy = make_float3(0.0f, 0.0f, 0.0f);
Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x + 1.0f, raster_y, 0.0f));
ray->dD.dx = normalize(transform_direction(&cameratoworld, panorama_to_direction(kg, Pcamera.x, Pcamera.y, ray))) - ray->D;
Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y + 1.0f, 0.0f));
ray->dD.dy = normalize(transform_direction(&cameratoworld, panorama_to_direction(kg, Pcamera.x, Pcamera.y, ray))) - ray->D;
#endif
}
/* Common */
__device void camera_sample(KernelGlobals *kg, int x, int y, float filter_u, float filter_v,
float lens_u, float lens_v, float time, Ray *ray)
{
/* pixel filter */
float raster_x = x + kernel_tex_interp(__filter_table, filter_u, FILTER_TABLE_SIZE);
float raster_y = y + kernel_tex_interp(__filter_table, filter_v, FILTER_TABLE_SIZE);
#ifdef __MOTION__
/* motion blur */
if(kernel_data.cam.shuttertime == 0.0f)
ray->time = TIME_INVALID;
else
ray->time = 0.5f + (time - 0.5f)*kernel_data.cam.shuttertime;
#endif
/* sample */
if(kernel_data.cam.type == CAMERA_PERSPECTIVE)
camera_sample_perspective(kg, raster_x, raster_y, lens_u, lens_v, ray);
else if(kernel_data.cam.type == CAMERA_ORTHOGRAPHIC)
camera_sample_orthographic(kg, raster_x, raster_y, ray);
else
camera_sample_panorama(kg, raster_x, raster_y, ray);
}
CCL_NAMESPACE_END