Commit 82af7f1d by andrewlewis Committed by Tofunmi Adigun-Hameed

Run `clang-format` on GLSL

PiperOrigin-RevId: 534015933
(cherry picked from commit 65c33e69709f34ef94f8831b290eb30df2a4306b)
parent 9066652b
...@@ -34,44 +34,41 @@ varying vec2 vTexSamplingCoord; ...@@ -34,44 +34,41 @@ varying vec2 vTexSamplingCoord;
const float epsilon = 1e-10; const float epsilon = 1e-10;
vec3 rgbToHcv(vec3 rgb) { vec3 rgbToHcv(vec3 rgb) {
vec4 p = (rgb.g < rgb.b) vec4 p = (rgb.g < rgb.b) ? vec4(rgb.bg, -1.0, 2.0 / 3.0)
? vec4(rgb.bg, -1.0, 2.0 / 3.0) : vec4(rgb.gb, 0.0, -1.0 / 3.0);
: vec4(rgb.gb, 0.0, -1.0 / 3.0); vec4 q = (rgb.r < p.x) ? vec4(p.xyw, rgb.r) : vec4(rgb.r, p.yzx);
vec4 q = (rgb.r < p.x) float c = q.x - min(q.w, q.y);
? vec4(p.xyw, rgb.r) float h = abs((q.w - q.y) / (6.0 * c + epsilon) + q.z);
: vec4(rgb.r, p.yzx); return vec3(h, c, q.x);
float c = q.x - min(q.w, q.y);
float h = abs((q.w - q.y) / (6.0 * c + epsilon) + q.z);
return vec3(h, c, q.x);
} }
vec3 rgbToHsl(vec3 rgb) { vec3 rgbToHsl(vec3 rgb) {
vec3 hcv = rgbToHcv(rgb); vec3 hcv = rgbToHcv(rgb);
float l = hcv.z - hcv.y * 0.5; float l = hcv.z - hcv.y * 0.5;
float s = hcv.y / (1.0 - abs(l * 2.0 - 1.0) + epsilon); float s = hcv.y / (1.0 - abs(l * 2.0 - 1.0) + epsilon);
return vec3(hcv.x, s, l); return vec3(hcv.x, s, l);
} }
vec3 hueToRgb(float hue) { vec3 hueToRgb(float hue) {
float r = abs(hue * 6.0 - 3.0) - 1.0; float r = abs(hue * 6.0 - 3.0) - 1.0;
float g = 2.0 - abs(hue * 6.0 - 2.0); float g = 2.0 - abs(hue * 6.0 - 2.0);
float b = 2.0 - abs(hue * 6.0 - 4.0); float b = 2.0 - abs(hue * 6.0 - 4.0);
return clamp(vec3(r, g, b), 0.0, 1.0); return clamp(vec3(r, g, b), 0.0, 1.0);
} }
vec3 hslToRgb(vec3 hsl) { vec3 hslToRgb(vec3 hsl) {
vec3 rgb = hueToRgb(hsl.x); vec3 rgb = hueToRgb(hsl.x);
float c = (1.0 - abs(2.0 * hsl.z - 1.0)) * hsl.y; float c = (1.0 - abs(2.0 * hsl.z - 1.0)) * hsl.y;
return (rgb - 0.5) * c + hsl.z; return (rgb - 0.5) * c + hsl.z;
} }
void main() { void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord); vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
vec3 hslColor = rgbToHsl(inputColor.rgb); vec3 hslColor = rgbToHsl(inputColor.rgb);
hslColor.x = mod(hslColor.x + uHueAdjustmentDegrees, 1.0); hslColor.x = mod(hslColor.x + uHueAdjustmentDegrees, 1.0);
hslColor.y = clamp(hslColor.y + uSaturationAdjustment, 0.0, 1.0); hslColor.y = clamp(hslColor.y + uSaturationAdjustment, 0.0, 1.0);
hslColor.z = clamp(hslColor.z + uLightnessAdjustment, 0.0, 1.0); hslColor.z = clamp(hslColor.z + uLightnessAdjustment, 0.0, 1.0);
gl_FragColor = vec4(hslToRgb(hslColor), inputColor.a); gl_FragColor = vec4(hslToRgb(hslColor), inputColor.a);
} }
...@@ -31,69 +31,67 @@ varying vec2 vTexSamplingCoord; ...@@ -31,69 +31,67 @@ varying vec2 vTexSamplingCoord;
// Applies the color lookup using uLut based on the input colors. // Applies the color lookup using uLut based on the input colors.
vec3 applyLookup(vec3 color) { vec3 applyLookup(vec3 color) {
// Reminder: Inside OpenGL vector.xyz is the same as vector.rgb. // Reminder: Inside OpenGL vector.xyz is the same as vector.rgb.
// Here we use mentions of x and y coordinates to references to // Here we use mentions of x and y coordinates to references to
// the position to sample from inside the 2D LUT plane and // the position to sample from inside the 2D LUT plane and
// rgb to create the 3D coordinates based on the input colors. // rgb to create the 3D coordinates based on the input colors.
// To sample from the 3D LUT we interpolate bilinearly twice in the 2D LUT // To sample from the 3D LUT we interpolate bilinearly twice in the 2D LUT
// to replicate the trilinear interpolation in a 3D LUT. Thus we sample // to replicate the trilinear interpolation in a 3D LUT. Thus we sample
// from the plane of position redCoordLow and on the plane above. // from the plane of position redCoordLow and on the plane above.
// redCoordLow points to the lower plane to sample from. // redCoordLow points to the lower plane to sample from.
float redCoord = color.r * (uColorLutLength - 1.0); float redCoord = color.r * (uColorLutLength - 1.0);
// Clamping to uColorLutLength - 2 is only needed if redCoord points to the // Clamping to uColorLutLength - 2 is only needed if redCoord points to the
// most upper plane. In this case there would not be any plane above // most upper plane. In this case there would not be any plane above
// available to sample from. // available to sample from.
float redCoordLow = clamp(floor(redCoord), 0.0, uColorLutLength - 2.0); float redCoordLow = clamp(floor(redCoord), 0.0, uColorLutLength - 2.0);
// lowerY is indexed in two steps. First redCoordLow defines the plane to // lowerY is indexed in two steps. First redCoordLow defines the plane to
// sample from. Next the green color component is added to index the row in // sample from. Next the green color component is added to index the row in
// the found plane. As described in the NVIDIA blog article about LUTs // the found plane. As described in the NVIDIA blog article about LUTs
// https://developer.nvidia.com/gpugems/gpugems2/part-iii-high-quality-rendering/chapter-24-using-lookup-tables-accelerate-color // https://developer.nvidia.com/gpugems/gpugems2/part-iii-high-quality-rendering/chapter-24-using-lookup-tables-accelerate-color
// (Section 24.2), we sample from color * scale + offset, where offset is // (Section 24.2), we sample from color * scale + offset, where offset is
// defined by 1 / (2 * uColorLutLength) and the scale is defined by // defined by 1 / (2 * uColorLutLength) and the scale is defined by
// (uColorLutLength - 1.0) / uColorLutLength. // (uColorLutLength - 1.0) / uColorLutLength.
// The following derives the equation of lowerY. For this let // The following derives the equation of lowerY. For this let
// N = uColorLutLenght. The general formula to sample at row y // N = uColorLutLenght. The general formula to sample at row y
// is defined as y = N * r + g. // is defined as y = N * r + g.
// Using the offset and scale as described in NVIDIA's blog article we get: // Using the offset and scale as described in NVIDIA's blog article we get:
// y = offset + (N * r + g) * scale // y = offset + (N * r + g) * scale
// y = 1 / (2 * N) + (N * r + g) * (N - 1) / N // y = 1 / (2 * N) + (N * r + g) * (N - 1) / N
// y = 1 / (2 * N) + N * r * (N - 1) / N + g * (N - 1) / N // y = 1 / (2 * N) + N * r * (N - 1) / N + g * (N - 1) / N
// We have defined redCoord as r * (N - 1) if we excluded the clamping for // We have defined redCoord as r * (N - 1) if we excluded the clamping for
// now, giving us: // now, giving us:
// y = 1 / (2 * N) + N * redCoord / N + g * (N - 1) / N // y = 1 / (2 * N) + N * redCoord / N + g * (N - 1) / N
// This simplifies to: // This simplifies to:
// y = 0.5 / N + (N * redCoord + g * (N - 1)) / N // y = 0.5 / N + (N * redCoord + g * (N - 1)) / N
// y = (0.5 + N * redCoord + g * (N - 1)) / N // y = (0.5 + N * redCoord + g * (N - 1)) / N
// This formula now assumes a coordinate system in the range of [0, N] but // This formula now assumes a coordinate system in the range of [0, N] but
// OpenGL uses a [0, 1] unit coordinate system internally. Thus dividing // OpenGL uses a [0, 1] unit coordinate system internally. Thus dividing
// by N gives us the final formula for y: // by N gives us the final formula for y:
// y = ((0.5 + N * redCoord + g * (N - 1)) / N) / N // y = ((0.5 + N * redCoord + g * (N - 1)) / N) / N
// y = (0.5 + redCoord * N + g * (N - 1)) / (N * N) // y = (0.5 + redCoord * N + g * (N - 1)) / (N * N)
float lowerY = float lowerY = (0.5 + redCoordLow * uColorLutLength +
(0.5 color.g * (uColorLutLength - 1.0)) /
+ redCoordLow * uColorLutLength (uColorLutLength * uColorLutLength);
+ color.g * (uColorLutLength - 1.0)) // The upperY is the same position moved up by one LUT plane.
/ (uColorLutLength * uColorLutLength); float upperY = lowerY + 1.0 / uColorLutLength;
// The upperY is the same position moved up by one LUT plane.
float upperY = lowerY + 1.0 / uColorLutLength;
// The x position is the blue color channel (x-axis in LUT[R][G][B]). // The x position is the blue color channel (x-axis in LUT[R][G][B]).
float x = (0.5 + color.b * (uColorLutLength - 1.0)) / uColorLutLength; float x = (0.5 + color.b * (uColorLutLength - 1.0)) / uColorLutLength;
vec3 lowerRgb = texture2D(uColorLut, vec2(x, lowerY)).rgb; vec3 lowerRgb = texture2D(uColorLut, vec2(x, lowerY)).rgb;
vec3 upperRgb = texture2D(uColorLut, vec2(x, upperY)).rgb; vec3 upperRgb = texture2D(uColorLut, vec2(x, upperY)).rgb;
// Linearly interpolate between lowerRgb and upperRgb based on the // Linearly interpolate between lowerRgb and upperRgb based on the
// distance of the actual in the plane and the lower sampling position. // distance of the actual in the plane and the lower sampling position.
return mix(lowerRgb, upperRgb, redCoord - redCoordLow); return mix(lowerRgb, upperRgb, redCoord - redCoordLow);
} }
void main() { void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord); vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
gl_FragColor.rgb = applyLookup(inputColor.rgb); gl_FragColor.rgb = applyLookup(inputColor.rgb);
gl_FragColor.a = inputColor.a; gl_FragColor.a = inputColor.a;
} }
...@@ -44,17 +44,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) { ...@@ -44,17 +44,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) {
const highp float b = 0.28466892; const highp float b = 0.28466892;
const highp float c = 0.55991073; const highp float c = 0.55991073;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel) : return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel)
a * log(12.0 * linearChannel - b) + c; : a * log(12.0 * linearChannel - b) + c;
} }
// BT.2100 / BT.2020 HLG OETF. // BT.2100 / BT.2020 HLG OETF.
highp vec3 hlgOetf(highp vec3 linearColor) { highp vec3 hlgOetf(highp vec3 linearColor) {
return vec3( return vec3(hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.r), hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.g), hlgOetfSingleChannel(linearColor.b));
hlgOetfSingleChannel(linearColor.b)
);
} }
// BT.2100 / BT.2020, PQ / ST2084 OETF. // BT.2100 / BT.2020, PQ / ST2084 OETF.
......
...@@ -23,7 +23,7 @@ uniform mat4 uRgbMatrix; ...@@ -23,7 +23,7 @@ uniform mat4 uRgbMatrix;
varying vec2 vTexSamplingCoord; varying vec2 vTexSamplingCoord;
void main() { void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord); vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
gl_FragColor = uRgbMatrix * vec4(inputColor.rgb, 1); gl_FragColor = uRgbMatrix * vec4(inputColor.rgb, 1);
gl_FragColor.a = inputColor.a; gl_FragColor.a = inputColor.a;
} }
...@@ -66,17 +66,15 @@ highp float hlgEotfSingleChannel(highp float hlgChannel) { ...@@ -66,17 +66,15 @@ highp float hlgEotfSingleChannel(highp float hlgChannel) {
const highp float a = 0.17883277; const highp float a = 0.17883277;
const highp float b = 0.28466892; const highp float b = 0.28466892;
const highp float c = 0.55991073; const highp float c = 0.55991073;
return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0 : return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0
(b + exp((hlgChannel - c) / a)) / 12.0; : (b + exp((hlgChannel - c) / a)) / 12.0;
} }
// BT.2100 / BT.2020 HLG EOTF. // BT.2100 / BT.2020 HLG EOTF.
highp vec3 hlgEotf(highp vec3 hlgColor) { highp vec3 hlgEotf(highp vec3 hlgColor) {
return vec3( return vec3(hlgEotfSingleChannel(hlgColor.r),
hlgEotfSingleChannel(hlgColor.r), hlgEotfSingleChannel(hlgColor.g),
hlgEotfSingleChannel(hlgColor.g), hlgEotfSingleChannel(hlgColor.b));
hlgEotfSingleChannel(hlgColor.b)
);
} }
// BT.2100 / BT.2020 PQ EOTF. // BT.2100 / BT.2020 PQ EOTF.
...@@ -115,18 +113,17 @@ highp vec3 applyHlgBt2020ToBt709Ootf(highp vec3 linearRgbBt2020) { ...@@ -115,18 +113,17 @@ highp vec3 applyHlgBt2020ToBt709Ootf(highp vec3 linearRgbBt2020) {
// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2100-2-201807-I!!PDF-E.pdf // https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2100-2-201807-I!!PDF-E.pdf
// Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint) // Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint)
// https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687 // https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687
const mat3 RGB_TO_XYZ_BT2020 = mat3( const mat3 RGB_TO_XYZ_BT2020 =
0.63695805f, 0.26270021f, 0.00000000f, mat3(0.63695805f, 0.26270021f, 0.00000000f, 0.14461690f, 0.67799807f,
0.14461690f, 0.67799807f, 0.02807269f, 0.02807269f, 0.16888098f, 0.05930172f, 1.06098506f);
0.16888098f, 0.05930172f, 1.06098506f);
// Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint) // Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint)
const mat3 XYZ_TO_RGB_BT709 = mat3( const mat3 XYZ_TO_RGB_BT709 =
3.24096994f, -0.96924364f, 0.05563008f, mat3(3.24096994f, -0.96924364f, 0.05563008f, -1.53738318f, 1.87596750f,
-1.53738318f, 1.87596750f, -0.20397696f, -0.20397696f, -0.49861076f, 0.04155506f, 1.05697151f);
-0.49861076f, 0.04155506f, 1.05697151f);
// hlgGamma is 1.2 + 0.42 * log10(nominalPeakLuminance/1000); // hlgGamma is 1.2 + 0.42 * log10(nominalPeakLuminance/1000);
// nominalPeakLuminance was selected to use a 500 as a typical value, used // nominalPeakLuminance was selected to use a 500 as a typical value, used
// in https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62, // in
// https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62,
// b/199162498#comment35, and // b/199162498#comment35, and
// https://www.microsoft.com/applied-sciences/uploads/projects/investigation-of-hdr-vs-tone-mapped-sdr/investigation-of-hdr-vs-tone-mapped-sdr.pdf. // https://www.microsoft.com/applied-sciences/uploads/projects/investigation-of-hdr-vs-tone-mapped-sdr/investigation-of-hdr-vs-tone-mapped-sdr.pdf.
const float hlgGamma = 1.0735674018211279; const float hlgGamma = 1.0735674018211279;
...@@ -167,17 +164,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) { ...@@ -167,17 +164,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) {
const highp float b = 0.28466892; const highp float b = 0.28466892;
const highp float c = 0.55991073; const highp float c = 0.55991073;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel) : return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel)
a * log(12.0 * linearChannel - b) + c; : a * log(12.0 * linearChannel - b) + c;
} }
// BT.2100 / BT.2020 HLG OETF. // BT.2100 / BT.2020 HLG OETF.
highp vec3 hlgOetf(highp vec3 linearColor) { highp vec3 hlgOetf(highp vec3 linearColor) {
return vec3( return vec3(hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.r), hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.g), hlgOetfSingleChannel(linearColor.b));
hlgOetfSingleChannel(linearColor.b)
);
} }
// BT.2100 / BT.2020, PQ / ST2084 OETF. // BT.2100 / BT.2020, PQ / ST2084 OETF.
...@@ -199,17 +194,16 @@ highp vec3 pqOetf(highp vec3 linearColor) { ...@@ -199,17 +194,16 @@ highp vec3 pqOetf(highp vec3 linearColor) {
// BT.709 gamma 2.2 OETF for one channel. // BT.709 gamma 2.2 OETF for one channel.
float gamma22OetfSingleChannel(highp float linearChannel) { float gamma22OetfSingleChannel(highp float linearChannel) {
// Reference: // Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2 // https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
return pow(linearChannel, (1.0 / 2.2)); return pow(linearChannel, (1.0 / 2.2));
} }
// BT.709 gamma 2.2 OETF. // BT.709 gamma 2.2 OETF.
vec3 gamma22Oetf(highp vec3 linearColor) { vec3 gamma22Oetf(highp vec3 linearColor) {
return vec3( return vec3(gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.r), gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.g), gamma22OetfSingleChannel(linearColor.b));
gamma22OetfSingleChannel(linearColor.b));
} }
// Applies the appropriate OETF to convert linear optical signals to nonlinear // Applies the appropriate OETF to convert linear optical signals to nonlinear
...@@ -237,9 +231,10 @@ vec3 yuvToRgb(vec3 yuv) { ...@@ -237,9 +231,10 @@ vec3 yuvToRgb(vec3 yuv) {
void main() { void main() {
vec3 srcYuv = texture(uTexSampler, vTexSamplingCoord).xyz; vec3 srcYuv = texture(uTexSampler, vTexSamplingCoord).xyz;
vec3 opticalColorBt2020 = applyEotf(yuvToRgb(srcYuv)); vec3 opticalColorBt2020 = applyEotf(yuvToRgb(srcYuv));
vec4 opticalColor = (uApplyHdrToSdrToneMapping == 1) vec4 opticalColor =
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0) (uApplyHdrToSdrToneMapping == 1)
: vec4(opticalColorBt2020, 1.0); ? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0);
vec4 transformedColors = uRgbMatrix * opticalColor; vec4 transformedColors = uRgbMatrix * opticalColor;
outColor = vec4(applyOetf(transformedColors.rgb), 1.0); outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
} }
...@@ -58,17 +58,15 @@ highp float hlgEotfSingleChannel(highp float hlgChannel) { ...@@ -58,17 +58,15 @@ highp float hlgEotfSingleChannel(highp float hlgChannel) {
const highp float a = 0.17883277; const highp float a = 0.17883277;
const highp float b = 0.28466892; const highp float b = 0.28466892;
const highp float c = 0.55991073; const highp float c = 0.55991073;
return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0 : return hlgChannel <= 0.5 ? hlgChannel * hlgChannel / 3.0
(b + exp((hlgChannel - c) / a)) / 12.0; : (b + exp((hlgChannel - c) / a)) / 12.0;
} }
// BT.2100 / BT.2020 HLG EOTF. // BT.2100 / BT.2020 HLG EOTF.
highp vec3 hlgEotf(highp vec3 hlgColor) { highp vec3 hlgEotf(highp vec3 hlgColor) {
return vec3( return vec3(hlgEotfSingleChannel(hlgColor.r),
hlgEotfSingleChannel(hlgColor.r), hlgEotfSingleChannel(hlgColor.g),
hlgEotfSingleChannel(hlgColor.g), hlgEotfSingleChannel(hlgColor.b));
hlgEotfSingleChannel(hlgColor.b)
);
} }
// BT.2100 / BT.2020 PQ EOTF. // BT.2100 / BT.2020 PQ EOTF.
...@@ -107,18 +105,17 @@ highp vec3 applyHlgBt2020ToBt709Ootf(highp vec3 linearRgbBt2020) { ...@@ -107,18 +105,17 @@ highp vec3 applyHlgBt2020ToBt709Ootf(highp vec3 linearRgbBt2020) {
// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2100-2-201807-I!!PDF-E.pdf // https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2100-2-201807-I!!PDF-E.pdf
// Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint) // Matrix values based on computeXYZMatrix(BT2020Primaries, BT2020WhitePoint)
// https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687 // https://cs.android.com/android/platform/superproject/+/master:frameworks/base/libs/hwui/utils/HostColorSpace.cpp;l=200-232;drc=86bd214059cd6150304888a285941bf74af5b687
const mat3 RGB_TO_XYZ_BT2020 = mat3( const mat3 RGB_TO_XYZ_BT2020 =
0.63695805f, 0.26270021f, 0.00000000f, mat3(0.63695805f, 0.26270021f, 0.00000000f, 0.14461690f, 0.67799807f,
0.14461690f, 0.67799807f, 0.02807269f, 0.02807269f, 0.16888098f, 0.05930172f, 1.06098506f);
0.16888098f, 0.05930172f, 1.06098506f);
// Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint) // Matrix values based on computeXYZMatrix(BT709Primaries, BT709WhitePoint)
const mat3 XYZ_TO_RGB_BT709 = mat3( const mat3 XYZ_TO_RGB_BT709 =
3.24096994f, -0.96924364f, 0.05563008f, mat3(3.24096994f, -0.96924364f, 0.05563008f, -1.53738318f, 1.87596750f,
-1.53738318f, 1.87596750f, -0.20397696f, -0.20397696f, -0.49861076f, 0.04155506f, 1.05697151f);
-0.49861076f, 0.04155506f, 1.05697151f);
// hlgGamma is 1.2 + 0.42 * log10(nominalPeakLuminance/1000); // hlgGamma is 1.2 + 0.42 * log10(nominalPeakLuminance/1000);
// nominalPeakLuminance was selected to use a 500 as a typical value, used // nominalPeakLuminance was selected to use a 500 as a typical value, used
// in https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62, // in
// https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/tonemap/tonemap.cpp;drc=7a577450e536aa1e99f229a0cb3d3531c82e8a8d;l=62,
// b/199162498#comment35, and // b/199162498#comment35, and
// https://www.microsoft.com/applied-sciences/uploads/projects/investigation-of-hdr-vs-tone-mapped-sdr/investigation-of-hdr-vs-tone-mapped-sdr.pdf. // https://www.microsoft.com/applied-sciences/uploads/projects/investigation-of-hdr-vs-tone-mapped-sdr/investigation-of-hdr-vs-tone-mapped-sdr.pdf.
const float hlgGamma = 1.0735674018211279; const float hlgGamma = 1.0735674018211279;
...@@ -159,17 +156,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) { ...@@ -159,17 +156,15 @@ highp float hlgOetfSingleChannel(highp float linearChannel) {
const highp float b = 0.28466892; const highp float b = 0.28466892;
const highp float c = 0.55991073; const highp float c = 0.55991073;
return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel) : return linearChannel <= 1.0 / 12.0 ? sqrt(3.0 * linearChannel)
a * log(12.0 * linearChannel - b) + c; : a * log(12.0 * linearChannel - b) + c;
} }
// BT.2100 / BT.2020 HLG OETF. // BT.2100 / BT.2020 HLG OETF.
highp vec3 hlgOetf(highp vec3 linearColor) { highp vec3 hlgOetf(highp vec3 linearColor) {
return vec3( return vec3(hlgOetfSingleChannel(linearColor.r),
hlgOetfSingleChannel(linearColor.r), hlgOetfSingleChannel(linearColor.g),
hlgOetfSingleChannel(linearColor.g), hlgOetfSingleChannel(linearColor.b));
hlgOetfSingleChannel(linearColor.b)
);
} }
// BT.2100 / BT.2020, PQ / ST2084 OETF. // BT.2100 / BT.2020, PQ / ST2084 OETF.
...@@ -191,17 +186,16 @@ highp vec3 pqOetf(highp vec3 linearColor) { ...@@ -191,17 +186,16 @@ highp vec3 pqOetf(highp vec3 linearColor) {
// BT.709 gamma 2.2 OETF for one channel. // BT.709 gamma 2.2 OETF for one channel.
float gamma22OetfSingleChannel(highp float linearChannel) { float gamma22OetfSingleChannel(highp float linearChannel) {
// Reference: // Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2 // https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_GAMMA2_2
return pow(linearChannel, (1.0 / 2.2)); return pow(linearChannel, (1.0 / 2.2));
} }
// BT.709 gamma 2.2 OETF. // BT.709 gamma 2.2 OETF.
vec3 gamma22Oetf(highp vec3 linearColor) { vec3 gamma22Oetf(highp vec3 linearColor) {
return vec3( return vec3(gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.r), gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.g), gamma22OetfSingleChannel(linearColor.b));
gamma22OetfSingleChannel(linearColor.b));
} }
// Applies the appropriate OETF to convert linear optical signals to nonlinear // Applies the appropriate OETF to convert linear optical signals to nonlinear
...@@ -222,11 +216,12 @@ highp vec3 applyOetf(highp vec3 linearColor) { ...@@ -222,11 +216,12 @@ highp vec3 applyOetf(highp vec3 linearColor) {
} }
void main() { void main() {
vec3 opticalColorBt2020 = applyEotf( vec3 opticalColorBt2020 =
texture(uTexSampler, vTexSamplingCoord).xyz); applyEotf(texture(uTexSampler, vTexSamplingCoord).xyz);
vec4 opticalColor = (uApplyHdrToSdrToneMapping == 1) vec4 opticalColor =
? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0) (uApplyHdrToSdrToneMapping == 1)
: vec4(opticalColorBt2020, 1.0); ? vec4(applyBt2020ToBt709Ootf(opticalColorBt2020), 1.0)
: vec4(opticalColorBt2020, 1.0);
vec4 transformedColors = uRgbMatrix * opticalColor; vec4 transformedColors = uRgbMatrix * opticalColor;
outColor = vec4(applyOetf(transformedColors.rgb), 1.0); outColor = vec4(applyOetf(transformedColors.rgb), 1.0);
} }
...@@ -13,7 +13,6 @@ ...@@ -13,7 +13,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
// ES 2 fragment shader that: // ES 2 fragment shader that:
// 1. Samples from an external texture with uTexSampler copying from this // 1. Samples from an external texture with uTexSampler copying from this
// texture to the current output. // texture to the current output.
...@@ -39,22 +38,21 @@ const float gamma = 1.0 / inverseGamma; ...@@ -39,22 +38,21 @@ const float gamma = 1.0 / inverseGamma;
const int GL_FALSE = 0; const int GL_FALSE = 0;
const int GL_TRUE = 1; const int GL_TRUE = 1;
// Transforms a single channel from electrical to optical SDR using the SMPTE // Transforms a single channel from electrical to optical SDR using the SMPTE
// 170M OETF. // 170M OETF.
float smpte170mEotfSingleChannel(float electricalChannel) { float smpte170mEotfSingleChannel(float electricalChannel) {
// Specification: // Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en // https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return electricalChannel < 0.0812 return electricalChannel < 0.0812
? electricalChannel / 4.500 ? electricalChannel / 4.500
: pow((electricalChannel + 0.099) / 1.099, gamma); : pow((electricalChannel + 0.099) / 1.099, gamma);
} }
// Transforms electrical to optical SDR using the SMPTE 170M EOTF. // Transforms electrical to optical SDR using the SMPTE 170M EOTF.
vec3 smpte170mEotf(vec3 electricalColor) { vec3 smpte170mEotf(vec3 electricalColor) {
return vec3( return vec3(smpte170mEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.r), smpte170mEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.g), smpte170mEotfSingleChannel(electricalColor.b));
smpte170mEotfSingleChannel(electricalColor.b));
} }
// Transforms a single channel from optical to electrical SDR. // Transforms a single channel from optical to electrical SDR.
...@@ -62,16 +60,15 @@ float smpte170mOetfSingleChannel(float opticalChannel) { ...@@ -62,16 +60,15 @@ float smpte170mOetfSingleChannel(float opticalChannel) {
// Specification: // Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en // https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018 return opticalChannel < 0.018
? opticalChannel * 4.500 ? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099; : 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
} }
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF. // Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
vec3 smpte170mOetf(vec3 opticalColor) { vec3 smpte170mOetf(vec3 opticalColor) {
return vec3( return vec3(smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.r), smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.g), smpte170mOetfSingleChannel(opticalColor.b));
smpte170mOetfSingleChannel(opticalColor.b));
} }
// Applies the appropriate OETF to convert linear optical signals to nonlinear // Applies the appropriate OETF to convert linear optical signals to nonlinear
...@@ -80,8 +77,8 @@ highp vec3 applyOetf(highp vec3 linearColor) { ...@@ -80,8 +77,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
// LINT.IfChange(color_transfer) // LINT.IfChange(color_transfer)
const int COLOR_TRANSFER_LINEAR = 1; const int COLOR_TRANSFER_LINEAR = 1;
const int COLOR_TRANSFER_SDR_VIDEO = 3; const int COLOR_TRANSFER_SDR_VIDEO = 3;
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR ||
|| uEnableColorTransfer == GL_FALSE) { uEnableColorTransfer == GL_FALSE) {
return linearColor; return linearColor;
} else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) { } else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
return smpte170mOetf(linearColor); return smpte170mOetf(linearColor);
...@@ -91,8 +88,8 @@ highp vec3 applyOetf(highp vec3 linearColor) { ...@@ -91,8 +88,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
} }
} }
vec3 applyEotf(vec3 electricalColor){ vec3 applyEotf(vec3 electricalColor) {
if (uEnableColorTransfer == GL_TRUE){ if (uEnableColorTransfer == GL_TRUE) {
return smpte170mEotf(electricalColor); return smpte170mEotf(electricalColor);
} else if (uEnableColorTransfer == GL_FALSE) { } else if (uEnableColorTransfer == GL_FALSE) {
return electricalColor; return electricalColor;
......
...@@ -13,7 +13,6 @@ ...@@ -13,7 +13,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
// ES 2 fragment shader that: // ES 2 fragment shader that:
// 1. Samples from an input texture created from an internal texture (e.g. a // 1. Samples from an input texture created from an internal texture (e.g. a
// texture created from a bitmap), with uTexSampler copying from this texture // texture created from a bitmap), with uTexSampler copying from this texture
...@@ -50,17 +49,15 @@ float srgbEotfSingleChannel(float electricalChannel) { ...@@ -50,17 +49,15 @@ float srgbEotfSingleChannel(float electricalChannel) {
// Specification: // Specification:
// https://developer.android.com/ndk/reference/group/a-data-space#group___a_data_space_1gga2759ad19cae46646cc5f7002758c4a1cac1bef6aa3a72abbf4a651a0bfb117f96 // https://developer.android.com/ndk/reference/group/a-data-space#group___a_data_space_1gga2759ad19cae46646cc5f7002758c4a1cac1bef6aa3a72abbf4a651a0bfb117f96
return electricalChannel <= 0.04045 return electricalChannel <= 0.04045
? electricalChannel / 12.92 ? electricalChannel / 12.92
: pow((electricalChannel + 0.055) / 1.055, 2.4); : pow((electricalChannel + 0.055) / 1.055, 2.4);
} }
// Transforms electrical to optical SDR using the sRGB EOTF. // Transforms electrical to optical SDR using the sRGB EOTF.
vec3 srgbEotf(const vec3 electricalColor) { vec3 srgbEotf(const vec3 electricalColor) {
return vec3( return vec3(srgbEotfSingleChannel(electricalColor.r),
srgbEotfSingleChannel(electricalColor.r), srgbEotfSingleChannel(electricalColor.g),
srgbEotfSingleChannel(electricalColor.g), srgbEotfSingleChannel(electricalColor.b));
srgbEotfSingleChannel(electricalColor.b)
);
} }
// Transforms a single channel from electrical to optical SDR using the SMPTE // Transforms a single channel from electrical to optical SDR using the SMPTE
...@@ -69,16 +66,15 @@ float smpte170mEotfSingleChannel(float electricalChannel) { ...@@ -69,16 +66,15 @@ float smpte170mEotfSingleChannel(float electricalChannel) {
// Specification: // Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en // https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return electricalChannel < 0.0812 return electricalChannel < 0.0812
? electricalChannel / 4.500 ? electricalChannel / 4.500
: pow((electricalChannel + 0.099) / 1.099, gamma); : pow((electricalChannel + 0.099) / 1.099, gamma);
} }
// Transforms electrical to optical SDR using the SMPTE 170M EOTF. // Transforms electrical to optical SDR using the SMPTE 170M EOTF.
vec3 smpte170mEotf(vec3 electricalColor) { vec3 smpte170mEotf(vec3 electricalColor) {
return vec3( return vec3(smpte170mEotfSingleChannel(electricalColor.r),
smpte170mEotfSingleChannel(electricalColor.r), smpte170mEotfSingleChannel(electricalColor.g),
smpte170mEotfSingleChannel(electricalColor.g), smpte170mEotfSingleChannel(electricalColor.b));
smpte170mEotfSingleChannel(electricalColor.b));
} }
// Transforms a single channel from optical to electrical SDR. // Transforms a single channel from optical to electrical SDR.
...@@ -86,23 +82,22 @@ float smpte170mOetfSingleChannel(float opticalChannel) { ...@@ -86,23 +82,22 @@ float smpte170mOetfSingleChannel(float opticalChannel) {
// Specification: // Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en // https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018 return opticalChannel < 0.018
? opticalChannel * 4.500 ? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099; : 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
} }
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF. // Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
vec3 smpte170mOetf(vec3 opticalColor) { vec3 smpte170mOetf(vec3 opticalColor) {
return vec3( return vec3(smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.r), smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.g), smpte170mOetfSingleChannel(opticalColor.b));
smpte170mOetfSingleChannel(opticalColor.b));
} }
// Applies the appropriate EOTF to convert nonlinear electrical signals to linear // Applies the appropriate EOTF to convert nonlinear electrical signals to
// optical signals. Input and output are both normalized to [0, 1]. // linear optical signals. Input and output are both normalized to [0, 1].
vec3 applyEotf(vec3 electricalColor){ vec3 applyEotf(vec3 electricalColor) {
if (uEnableColorTransfer == GL_TRUE){ if (uEnableColorTransfer == GL_TRUE) {
if (uInputColorTransfer == COLOR_TRANSFER_SRGB){ if (uInputColorTransfer == COLOR_TRANSFER_SRGB) {
return srgbEotf(electricalColor) ; return srgbEotf(electricalColor);
} else if (uInputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) { } else if (uInputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
return smpte170mEotf(electricalColor); return smpte170mEotf(electricalColor);
} else { } else {
...@@ -120,8 +115,8 @@ vec3 applyEotf(vec3 electricalColor){ ...@@ -120,8 +115,8 @@ vec3 applyEotf(vec3 electricalColor){
// Applies the appropriate OETF to convert linear optical signals to nonlinear // Applies the appropriate OETF to convert linear optical signals to nonlinear
// electrical signals. Input and output are both normalized to [0, 1]. // electrical signals. Input and output are both normalized to [0, 1].
highp vec3 applyOetf(highp vec3 linearColor) { highp vec3 applyOetf(highp vec3 linearColor) {
if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR if (uOutputColorTransfer == COLOR_TRANSFER_LINEAR ||
|| uEnableColorTransfer == GL_FALSE) { uEnableColorTransfer == GL_FALSE) {
return linearColor; return linearColor;
} else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) { } else if (uOutputColorTransfer == COLOR_TRANSFER_SDR_VIDEO) {
return smpte170mOetf(linearColor); return smpte170mOetf(linearColor);
...@@ -131,8 +126,8 @@ highp vec3 applyOetf(highp vec3 linearColor) { ...@@ -131,8 +126,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
} }
} }
vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord){ vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord) {
if (uInputColorTransfer == COLOR_TRANSFER_SRGB){ if (uInputColorTransfer == COLOR_TRANSFER_SRGB) {
// Whereas the Android system uses the top-left corner as (0,0) of the // Whereas the Android system uses the top-left corner as (0,0) of the
// coordinate system, OpenGL uses the bottom-left corner as (0,0), so the // coordinate system, OpenGL uses the bottom-left corner as (0,0), so the
// texture gets flipped. We flip the texture vertically to ensure the // texture gets flipped. We flip the texture vertically to ensure the
...@@ -144,8 +139,8 @@ vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord){ ...@@ -144,8 +139,8 @@ vec2 getAdjustedTexSamplingCoord(vec2 originalTexSamplingCoord){
} }
void main() { void main() {
vec4 inputColor = texture2D( vec4 inputColor =
uTexSampler, getAdjustedTexSamplingCoord(vTexSamplingCoord)); texture2D(uTexSampler, getAdjustedTexSamplingCoord(vTexSamplingCoord));
vec3 linearInputColor = applyEotf(inputColor.rgb); vec3 linearInputColor = applyEotf(inputColor.rgb);
vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1); vec4 transformedColors = uRgbMatrix * vec4(linearInputColor, 1);
......
...@@ -30,37 +30,35 @@ uniform int uOutputColorTransfer; ...@@ -30,37 +30,35 @@ uniform int uOutputColorTransfer;
const float inverseGamma = 0.4500; const float inverseGamma = 0.4500;
// Transforms a single channel from optical to electrical SDR using the SMPTE // Transforms a single channel from optical to electrical SDR using the SMPTE
// 170M OETF. // 170M OETF.
float smpte170mOetfSingleChannel(float opticalChannel) { float smpte170mOetfSingleChannel(float opticalChannel) {
// Specification: // Specification:
// https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en // https://www.itu.int/rec/R-REC-BT.1700-0-200502-I/en
return opticalChannel < 0.018 return opticalChannel < 0.018
? opticalChannel * 4.500 ? opticalChannel * 4.500
: 1.099 * pow(opticalChannel, inverseGamma) - 0.099; : 1.099 * pow(opticalChannel, inverseGamma) - 0.099;
} }
// Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF. // Transforms optical SDR colors to electrical SDR using the SMPTE 170M OETF.
vec3 smpte170mOetf(vec3 opticalColor) { vec3 smpte170mOetf(vec3 opticalColor) {
return vec3( return vec3(smpte170mOetfSingleChannel(opticalColor.r),
smpte170mOetfSingleChannel(opticalColor.r), smpte170mOetfSingleChannel(opticalColor.g),
smpte170mOetfSingleChannel(opticalColor.g), smpte170mOetfSingleChannel(opticalColor.b));
smpte170mOetfSingleChannel(opticalColor.b));
} }
// BT.709 gamma 2.2 OETF for one channel. // BT.709 gamma 2.2 OETF for one channel.
float gamma22OetfSingleChannel(highp float linearChannel) { float gamma22OetfSingleChannel(highp float linearChannel) {
// Reference: // Reference:
// https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_gamma22 // https://developer.android.com/reference/android/hardware/DataSpace#TRANSFER_gamma22
return pow(linearChannel, (1.0 / 2.2)); return pow(linearChannel, (1.0 / 2.2));
} }
// BT.709 gamma 2.2 OETF. // BT.709 gamma 2.2 OETF.
vec3 gamma22Oetf(highp vec3 linearColor) { vec3 gamma22Oetf(highp vec3 linearColor) {
return vec3( return vec3(gamma22OetfSingleChannel(linearColor.r),
gamma22OetfSingleChannel(linearColor.r), gamma22OetfSingleChannel(linearColor.g),
gamma22OetfSingleChannel(linearColor.g), gamma22OetfSingleChannel(linearColor.b));
gamma22OetfSingleChannel(linearColor.b));
} }
// Applies the appropriate OETF to convert linear optical signals to nonlinear // Applies the appropriate OETF to convert linear optical signals to nonlinear
...@@ -80,8 +78,8 @@ highp vec3 applyOetf(highp vec3 linearColor) { ...@@ -80,8 +78,8 @@ highp vec3 applyOetf(highp vec3 linearColor) {
} }
void main() { void main() {
vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord); vec4 inputColor = texture2D(uTexSampler, vTexSamplingCoord);
vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1); vec4 transformedColors = uRgbMatrix * vec4(inputColor.rgb, 1);
gl_FragColor = vec4(applyOetf(transformedColors.rgb), inputColor.a); gl_FragColor = vec4(applyOetf(transformedColors.rgb), inputColor.a);
} }
...@@ -22,6 +22,7 @@ uniform mat4 uTexTransformationMatrix; ...@@ -22,6 +22,7 @@ uniform mat4 uTexTransformationMatrix;
varying vec2 vTexSamplingCoord; varying vec2 vTexSamplingCoord;
void main() { void main() {
gl_Position = uTransformationMatrix * aFramePosition; gl_Position = uTransformationMatrix * aFramePosition;
vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5, aFramePosition.y * 0.5 + 0.5, 0.0, 1.0); vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5,
aFramePosition.y * 0.5 + 0.5, 0.0, 1.0);
vTexSamplingCoord = (uTexTransformationMatrix * texturePosition).xy; vTexSamplingCoord = (uTexTransformationMatrix * texturePosition).xy;
} }
...@@ -22,6 +22,7 @@ uniform mat4 uTexTransformationMatrix; ...@@ -22,6 +22,7 @@ uniform mat4 uTexTransformationMatrix;
out vec2 vTexSamplingCoord; out vec2 vTexSamplingCoord;
void main() { void main() {
gl_Position = uTransformationMatrix * aFramePosition; gl_Position = uTransformationMatrix * aFramePosition;
vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5, aFramePosition.y * 0.5 + 0.5, 0.0, 1.0); vec4 texturePosition = vec4(aFramePosition.x * 0.5 + 0.5,
aFramePosition.y * 0.5 + 0.5, 0.0, 1.0);
vTexSamplingCoord = (uTexTransformationMatrix * texturePosition).xy; vTexSamplingCoord = (uTexTransformationMatrix * texturePosition).xy;
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment