Browse Source

fx

lizzie/astc-mp4-improv
lizzie 1 month ago
committed by crueter
parent
commit
7ec09305d0
  1. 41
      src/video_core/host_shaders/astc_decoder.comp

41
src/video_core/host_shaders/astc_decoder.comp

@ -402,23 +402,7 @@ void DecodeIntegerSequence(uint max_range, uint num_values) {
}
}
void DecodeColorValues(uvec4 modes, uint num_partitions, uint color_data_bits, out uint color_values[32]) {
// TODO: modes[] zero on invalid, so less ops
const uvec4 num_values_tmp = (((modes >> 2) + 1) << 1) & ((uvec4(0, 1, 2, 3) - num_partitions) >> 8);
uint num_values = num_values_tmp.x + num_values_tmp.y + num_values_tmp.z + num_values_tmp.w;
// Find the largest encoding that's within color_data_bits
// TODO(ameerj): profile with binary search
int range = 0;
while (++range < encoding_values.length()) {
const uint bit_length = GetBitLength(num_values, range);
if (bit_length > color_data_bits) {
break;
}
}
const uint upper_bound = num_values;
DecodeIntegerSequence(range - 1, num_values);
for (int i = 0; i < upper_bound; ++i) {
const EncodingData val = GetEncodingFromVector(i);
uint DecodeSingleColorValue(EncodingData val) {
const uint encoding = Encoding(val);
const uint bitlen = NumBits(val);
const uint bitval = BitValue(val);
@ -503,9 +487,26 @@ void DecodeColorValues(uvec4 modes, uint num_partitions, uint color_data_bits, o
uint unq = D * C + B;
unq = unq ^ A;
unq = (A & 0x80) | (unq >> 2);
color_values[i + 1] = encoding == JUST_BITS
? FastReplicateTo8(bitval, bitlen)
: unq;
return encoding == JUST_BITS ? FastReplicateTo8(bitval, bitlen) : unq;
}
void DecodeColorValues(uvec4 modes, uint num_partitions, uint color_data_bits, out uint color_values[32]) {
// TODO: modes[] zero on invalid, so less ops
const uvec4 num_values_tmp = (((modes >> 2) + 1) << 1) & ((uvec4(0, 1, 2, 3) - num_partitions) >> 8);
uint num_values = num_values_tmp.x + num_values_tmp.y + num_values_tmp.z + num_values_tmp.w;
// Find the largest encoding that's within color_data_bits
// TODO(ameerj): profile with binary search
uint range = 0;
while (++range < encoding_values.length()) {
const uint bit_length = GetBitLength(num_values, int(range));
if (bit_length > color_data_bits) {
break;
}
}
const uint upper_bound = num_values;
DecodeIntegerSequence(range - 1, num_values);
for (int i = 0; i < upper_bound; ++i) {
color_values[i + 1] = DecodeSingleColorValue(GetEncodingFromVector(i));
}
}

Loading…
Cancel
Save