Skip to content

Commit

Permalink
fix(client_core) Fix FFE shader for PhoneVR (#2083)
Browse files Browse the repository at this point in the history
* fix(client_core) Fix FFE shader for PhoneVR

* fix(client_core) Fix typo from previous commit
  • Loading branch information
yoyobuae authored Apr 24, 2024
1 parent becee48 commit aadb3ea
Showing 1 changed file with 92 additions and 51 deletions.
143 changes: 92 additions & 51 deletions alvr/client_core/cpp/ffr.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,32 @@

#include "utils.h"

#include <glm/vec2.hpp>

using namespace std;
using namespace gl_render_utils;

namespace {
const string FFR_COMMON_SHADER_FORMAT = R"glsl(#version 300 es
precision highp float;
const uvec2 TARGET_RESOLUTION = uvec2(%u, %u);
const uvec2 OPTIMIZED_RESOLUTION = uvec2(%u, %u);
const vec2 EYE_SIZE_RATIO = vec2(%f, %f);
const vec2 CENTER_SIZE = vec2(%f, %f);
const vec2 CENTER_SHIFT = vec2(%f, %f);
const vec2 EDGE_RATIO = vec2(%f, %f);
const vec2 c1 = vec2(%f, %f);
const vec2 c2 = vec2(%f, %f);
const vec2 loBound = vec2(%f, %f);
const vec2 hiBound = vec2(%f, %f);
const vec2 loBoundC = vec2(%f, %f);
const vec2 hiBoundC = vec2(%f, %f);
const vec2 aleft = vec2(%f, %f);
const vec2 bleft = vec2(%f, %f);
const vec2 aright = vec2(%f, %f);
const vec2 bright = vec2(%f, %f);
const vec2 cright = vec2(%f, %f);
vec2 TextureToEyeUV(vec2 textureUV, bool isRightEye) {
// flip distortion horizontally for right eye
// left: x * 2; right: (1 - x) * 2
Expand All @@ -31,50 +43,49 @@ const string FFR_COMMON_SHADER_FORMAT = R"glsl(#version 300 es
}
)glsl";

// Fragment shader which reverses the FFE compressed image on client side.
// Essentially it implements this function, for both the X and Y axis (or U and V in texture):
// https://www.desmos.com/calculator/cmvjr7ljje
//
// The function is the same for each axis. The code refers to the "left" and "right" edges, but
// that kinda only refers to the left/right sides of the function. The actual edge being calculated
// in the image might not be the left and right ones (might be top and bottom instead). Also for
// the right eye the UVs are mirrored so the "left" edge is actually the right one.
const string DECOMPRESS_AXIS_ALIGNED_FRAGMENT_SHADER = R"glsl(
uniform sampler2D tex0;
in vec2 uv;
out vec4 color;
void main() {
// Source UV spans across both eyes, so first transform
// the UV coordinates to be per-eye.
bool isRightEye = uv.x > 0.5;
vec2 eyeUV = TextureToEyeUV(uv, isRightEye);
vec2 c0 = (1. - CENTER_SIZE) * 0.5;
vec2 c1 = (EDGE_RATIO - 1.) * c0 * (CENTER_SHIFT + 1.) / EDGE_RATIO;
vec2 c2 = (EDGE_RATIO - 1.) * CENTER_SIZE + 1.;
vec2 loBound = c0 * (CENTER_SHIFT + 1.);
vec2 hiBound = c0 * (CENTER_SHIFT - 1.) + 1.;
vec2 underBound = vec2(eyeUV.x < loBound.x, eyeUV.y < loBound.y);
vec2 inBound = vec2(loBound.x < eyeUV.x && eyeUV.x < hiBound.x,
loBound.y < eyeUV.y && eyeUV.y < hiBound.y);
vec2 overBound = vec2(eyeUV.x > hiBound.x, eyeUV.y > hiBound.y);
// Now calculate the uncompressed UVs for the various regions of the image.
// There's three regions to consider: the "left", the middle and the "right"
vec2 center = (eyeUV - c1) * EDGE_RATIO / c2;
vec2 loBoundC = c0 * (CENTER_SHIFT + 1.) / c2;
vec2 hiBoundC = c0 * (CENTER_SHIFT - 1.) / c2 + 1.;
vec2 leftEdge = (-(c1 + c2 * loBoundC) / loBoundC +
sqrt(((c1 + c2 * loBoundC) / loBoundC) * ((c1 + c2 * loBoundC) / loBoundC) +
4. * c2 * (1. - EDGE_RATIO) / (EDGE_RATIO * loBoundC) * eyeUV)) /
(2. * c2 * (1. - EDGE_RATIO)) * (EDGE_RATIO * loBoundC);
vec2 rightEdge =
(-(c2 - EDGE_RATIO * c1 - 2. * EDGE_RATIO * c2 + c2 * EDGE_RATIO * (1. - hiBoundC) +
EDGE_RATIO) /
(EDGE_RATIO * (1. - hiBoundC)) +
sqrt(((c2 - EDGE_RATIO * c1 - 2. * EDGE_RATIO * c2 + c2 * EDGE_RATIO * (1. - hiBoundC) +
EDGE_RATIO) /
(EDGE_RATIO * (1. - hiBoundC))) *
((c2 - EDGE_RATIO * c1 - 2. * EDGE_RATIO * c2 +
c2 * EDGE_RATIO * (1. - hiBoundC) + EDGE_RATIO) /
(EDGE_RATIO * (1. - hiBoundC))) -
4. * ((c2 * EDGE_RATIO - c2) * (c1 - hiBoundC + hiBoundC * c2) /
(EDGE_RATIO * (1. - hiBoundC) * (1. - hiBoundC)) -
eyeUV * (c2 * EDGE_RATIO - c2) / (EDGE_RATIO * (1. - hiBoundC))))) /
(2. * c2 * (EDGE_RATIO - 1.)) * (EDGE_RATIO * (1. - hiBoundC));
vec2 uncompressedUV = underBound * leftEdge + inBound * center + overBound * rightEdge;
vec2 leftEdge = (-bleft + sqrt(bleft * bleft + 4. * aleft * eyeUV)) /
(2. * aleft);
vec2 rightEdge = (-bright + sqrt(bright * bright - 4. * (cright - aright * eyeUV))) / (2. * aright);
// Now figure out which UV coordinates to actually output depending on which
// UV region is being processed. Do each axis separately to cover all the nine
// possible combinations.
vec2 uncompressedUV = vec2(0., 0.);
if (eyeUV.x < loBound.x)
uncompressedUV.x = leftEdge.x;
else if (eyeUV.x > hiBound.x)
uncompressedUV.x = rightEdge.x;
else
uncompressedUV.x = center.x;
if (eyeUV.y < loBound.y)
uncompressedUV.y = leftEdge.y;
else if (eyeUV.y > hiBound.y)
uncompressedUV.y = rightEdge.y;
else
uncompressedUV.y = center.y;
color = texture(tex0, EyeToTextureUV(uncompressedUV * EYE_SIZE_RATIO, isRightEye));
}
Expand Down Expand Up @@ -138,19 +149,49 @@ FoveationVars CalculateFoveationVars(FFRData data) {
FFR::FFR(Texture *inputSurface) : mInputSurface(inputSurface) {}

void FFR::Initialize(FoveationVars fv) {
using glm::vec2;

// Precalculate a bunch of constants that will be used in fragment shader
auto CENTER_SIZE = vec2(fv.centerSizeX, fv.centerSizeY); // Size of the center, non-distorted region
auto CENTER_SHIFT = vec2(fv.centerShiftX, fv.centerShiftY); // How much to shift the center region
auto EDGE_RATIO = vec2(fv.edgeRatioX, fv.edgeRatioY); // Ratio of edge region VS center region

auto c0 = (vec2(1., 1.) - CENTER_SIZE) * vec2(0.5, 0.5);
auto c1 = (EDGE_RATIO - vec2(1., 1.)) * c0 * (CENTER_SHIFT + vec2(1., 1.)) / EDGE_RATIO;
auto c2 = (EDGE_RATIO - vec2(1., 1.)) * CENTER_SIZE + vec2(1., 1.);

auto loBound = c0 * (CENTER_SHIFT + vec2(1., 1.)); // Lower bound bellow which "left" edge begins
auto hiBound = c0 * (CENTER_SHIFT - vec2(1., 1.)) + vec2(1., 1.); // Upper bound above which "right" edge begins
auto loBoundC = c0 * (CENTER_SHIFT + vec2(1., 1.)) / c2; // Same as loBound but rescaled for distorted image
auto hiBoundC = c0 * (CENTER_SHIFT - vec2(1., 1.)) / c2 + vec2(1., 1.); // Same as hiBound but rescaled for distorted image

// Constants for function:
// leftEdge(x) = (-bleft + sqrt(bleft^2 + 4 * aleft * x)) / (2 * aleft)
auto aleft = c2 * (vec2(1., 1.) - EDGE_RATIO) / (EDGE_RATIO * loBoundC);
auto bleft = (c1 + c2 * loBoundC) / loBoundC;

// Constants for function:
// rightEdge(x) = (-bright + sqrt(bright^2 + 4 * (cright - aright * x)) / (2 * aright)
auto aright = c2 * (EDGE_RATIO - vec2(1., 1.)) / (EDGE_RATIO * (vec2(1., 1.) - hiBoundC));
auto bright = (c2 - EDGE_RATIO * c1 - vec2(2., 2.) * EDGE_RATIO * c2 + c2 * EDGE_RATIO * (vec2(1., 1.) - hiBoundC) + EDGE_RATIO) / (EDGE_RATIO * (vec2(1., 1.) - hiBoundC));
auto cright = ((c2 * EDGE_RATIO - c2) * (c1 - hiBoundC + c2 * hiBoundC)) / (EDGE_RATIO * (vec2(1., 1.) - hiBoundC) * (vec2(1., 1.) - hiBoundC));

// Put all the constants into the shader
auto ffrCommonShaderStr = string_format(FFR_COMMON_SHADER_FORMAT,
fv.targetEyeWidth,
fv.targetEyeHeight,
fv.optimizedEyeWidth,
fv.optimizedEyeHeight,
fv.eyeWidthRatio,
fv.eyeHeightRatio,
fv.centerSizeX,
fv.centerSizeY,
fv.centerShiftX,
fv.centerShiftY,
fv.edgeRatioX,
fv.edgeRatioY);
fv.eyeWidthRatio, fv.eyeHeightRatio,
fv.edgeRatioX, fv.edgeRatioY,
c1.x, c1.y,
c2.x, c2.y,
loBound.x, loBound.y,
hiBound.x, hiBound.y,
loBoundC.x, loBoundC.y,
hiBoundC.x, hiBoundC.y,
aleft.x, aleft.y,
bleft.x, bleft.y,
aright.x, aright.y,
bright.x, bright.y,
cright.x, cright.y
);

mExpandedTexture.reset(new Texture(false, 0, false, fv.targetEyeWidth * 2, fv.targetEyeHeight));
mExpandedTextureState = make_unique<RenderState>(mExpandedTexture.get());
Expand Down

0 comments on commit aadb3ea

Please # to comment.