Browse Source

Initial Implementation for Android Native Hardware Decode

pull/2956/head
CamilleLaVey 3 months ago
parent
commit
a0242d05d0
  1. 162
      src/android/app/src/main/java/org/yuzu/yuzu_emu/media/NativeMediaCodec.java
  2. 7
      src/common/android/id_cache.cpp
  3. 6
      src/video_core/CMakeLists.txt
  4. 5
      src/video_core/host1x/codecs/decoder.cpp
  5. 6
      src/video_core/host1x/codecs/decoder.h
  6. 16
      src/video_core/host1x/codecs/h264.cpp
  7. 3
      src/video_core/host1x/codecs/h264.h
  8. 9
      src/video_core/host1x/codecs/vp8.cpp
  9. 3
      src/video_core/host1x/codecs/vp8.h
  10. 9
      src/video_core/host1x/codecs/vp9.cpp
  11. 3
      src/video_core/host1x/codecs/vp9.h
  12. 132
      src/video_core/host1x/ffmpeg/ffmpeg.cpp
  13. 10
      src/video_core/host1x/ffmpeg/ffmpeg.h
  14. 22
      src/video_core/host1x/ffmpeg/mediacodec_bridge.h
  15. 114
      src/video_core/host1x/ffmpeg/mediacodec_bridge_android.cpp
  16. 19
      src/video_core/vulkan_common/vulkan_device.cpp
  17. 51
      src/video_core/vulkan_common/vulkan_device.h

162
src/android/app/src/main/java/org/yuzu/yuzu_emu/media/NativeMediaCodec.java

@ -0,0 +1,162 @@
// SPDX-FileCopyrightText: Copyright 2025 Eden Emulator Project
// SPDX-License-Identifier: GPL-3.0-or-later
package org.yuzu.yuzu_emu.media;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.util.Log;
import java.nio.ByteBuffer;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
public class NativeMediaCodec {
private static final String TAG = "NativeMediaCodec";
private static final ConcurrentHashMap<Integer, MediaCodec> decoders = new ConcurrentHashMap<>();
private static final AtomicInteger nextId = new AtomicInteger(1);
// Called from native code to create a decoder for the given mime (e.g. "video/avc").
// Returns a decoder id (>0) on success, or 0 on failure.
public static int createDecoder(String mime, int width, int height) {
try {
MediaCodec codec = MediaCodec.createDecoderByType(mime);
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
}
final int id = nextId.getAndIncrement();
decoders.put(id, codec);
// Request YUV_420_888 output (Image) if available
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
codec.setCallback(new MediaCodec.Callback() {
private final int decoderId = id;
@Override
public void onInputBufferAvailable(MediaCodec mc, int index) {
// input will be fed by native code via dequeue
}
@Override
public void onOutputBufferAvailable(MediaCodec mc, int index, MediaCodec.BufferInfo info) {
try {
Image image = mc.getOutputImage(index);
if (image != null) {
byte[] data = ImageToNV12(image);
onFrameDecoded(decoderId, data, image.getWidth(), image.getHeight(), info.presentationTimeUs);
image.close();
}
} catch (Throwable t) {
Log.w(TAG, "onOutputBufferAvailable failed: " + t);
} finally {
try { mc.releaseOutputBuffer(index, false); } catch (Throwable ignored) {}
}
}
@Override
public void onError(MediaCodec mc, MediaCodec.CodecException e) {
Log.w(TAG, "MediaCodec error: " + e);
}
@Override
public void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
Log.i(TAG, "Output format changed: " + format);
}
});
}
codec.configure(format, null, null, 0);
codec.start();
return id;
} catch (Exception e) {
Log.w(TAG, "createDecoder failed: " + e);
return 0;
}
}
private static byte[] ImageToNV12(Image image) {
// Convert YUV_420_888 to NV12 (Y plane, interleaved UV)
final Image.Plane[] planes = image.getPlanes();
int w = image.getWidth();
int h = image.getHeight();
int ySize = w * h;
int chromaWidth = (w + 1) / 2;
int chromaHeight = (h + 1) / 2;
int uvRowStrideOut = chromaWidth * 2;
int uvSize = uvRowStrideOut * chromaHeight;
byte[] out = new byte[ySize + uvSize];
Image.Plane yPlane = planes[0];
ByteBuffer yBuffer = yPlane.getBuffer().duplicate();
int yRowStride = yPlane.getRowStride();
int yPixelStride = yPlane.getPixelStride();
for (int row = 0; row < h; row++) {
int srcRow = row * yRowStride;
int dstRow = row * w;
for (int col = 0; col < w; col++) {
out[dstRow + col] = yBuffer.get(srcRow + col * yPixelStride);
}
}
Image.Plane uPlane = planes[1];
Image.Plane vPlane = planes[2];
ByteBuffer uBuffer = uPlane.getBuffer().duplicate();
ByteBuffer vBuffer = vPlane.getBuffer().duplicate();
int uRowStride = uPlane.getRowStride();
int vRowStride = vPlane.getRowStride();
int uPixelStride = uPlane.getPixelStride();
int vPixelStride = vPlane.getPixelStride();
int uvOffset = ySize;
for (int row = 0; row < chromaHeight; row++) {
int uRow = row * uRowStride;
int vRow = row * vRowStride;
int dstRow = uvOffset + row * uvRowStrideOut;
for (int col = 0; col < chromaWidth; col++) {
int dst = dstRow + col * 2;
out[dst] = uBuffer.get(uRow + col * uPixelStride);
out[dst + 1] = vBuffer.get(vRow + col * vPixelStride);
}
}
return out;
}
// Native callback to deliver decoded frames to native code
private static native void onFrameDecoded(int decoderId, byte[] data, int width, int height, long pts);
// Called from native code to feed packet data to decoder
public static boolean decode(int decoderId, byte[] packet, long pts) {
MediaCodec codec = decoders.get(decoderId);
if (codec == null) return false;
try {
int inputIndex = codec.dequeueInputBuffer(10000);
if (inputIndex >= 0) {
ByteBuffer inputBuf = codec.getInputBuffer(inputIndex);
if (inputBuf == null) {
Log.w(TAG, "decode input buffer null");
codec.queueInputBuffer(inputIndex, 0, 0, pts, 0);
return false;
}
inputBuf.clear();
inputBuf.put(packet);
codec.queueInputBuffer(inputIndex, 0, packet.length, pts, 0);
}
return true;
} catch (Exception e) {
Log.w(TAG, "decode error: " + e);
return false;
}
}
public static void releaseDecoder(int decoderId) {
MediaCodec codec = decoders.remove(decoderId);
if (codec != null) {
try { codec.stop(); } catch (Throwable ignored) {}
try { codec.release(); } catch (Throwable ignored) {}
}
}
}

7
src/common/android/id_cache.cpp

@ -524,6 +524,13 @@ namespace Common::Android {
s_patch_title_id_field = env->GetFieldID(patch_class, "titleId", "Ljava/lang/String;");
env->DeleteLocalRef(patch_class);
// Prefer hardware decoding on Android by default, forcing this setting will
// make the native side attempt GPU decoding first. If the platform lacks a usable
// FFmpeg HW device, FFmpeg will fall back to CPU automatically.
Settings::values.nvdec_emulation.SetValue(Settings::NvdecEmulation::Gpu);
LOG_INFO(HW_GPU, "Android JNI_OnLoad: forced nvdec_emulation = GPU");
const jclass double_class = env->FindClass("java/lang/Double");
s_double_class = reinterpret_cast<jclass>(env->NewGlobalRef(double_class));
s_double_constructor = env->GetMethodID(double_class, "<init>", "(D)V");

6
src/video_core/CMakeLists.txt

@ -329,6 +329,12 @@ target_include_directories(video_core PRIVATE ${FFmpeg_INCLUDE_DIR})
target_link_libraries(video_core PRIVATE ${FFmpeg_LIBRARIES})
target_link_options(video_core PRIVATE ${FFmpeg_LDFLAGS})
# Android-specific Java/ JNI bridge for MediaCodec
if(ANDROID)
target_sources(video_core PRIVATE host1x/ffmpeg/mediacodec_bridge_android.cpp)
endif()
add_dependencies(video_core host_shaders)
target_include_directories(video_core PRIVATE ${HOST_SHADERS_INCLUDE})

5
src/video_core/host1x/codecs/decoder.cpp

@ -25,6 +25,11 @@ void Decoder::Decode() {
}
const auto packet_data = ComposeFrame();
#ifdef __ANDROID__
if (const auto frame_dims = CurrentFrameDimensions()) {
decode_api.EnsureMediaCodecDecoder(frame_dims->first, frame_dims->second);
}
#endif
// Send assembled bitstream to decoder.
if (!decode_api.SendPacket(packet_data)) {
return;

6
src/video_core/host1x/codecs/decoder.h

@ -6,6 +6,7 @@
#include <memory>
#include <mutex>
#include <optional>
#include <utility>
#include <string_view>
#include <unordered_map>
#include <queue>
@ -40,6 +41,11 @@ public:
/// Return name of the current codec
[[nodiscard]] virtual std::string_view GetCurrentCodecName() const = 0;
/// Returns the current frame dimensions if available
[[nodiscard]] virtual std::optional<std::pair<int, int>> CurrentFrameDimensions() const {
return std::nullopt;
}
protected:
explicit Decoder(Host1x::Host1x& host1x, s32 id,
const Host1x::NvdecCommon::NvdecRegisters& regs,

16
src/video_core/host1x/codecs/h264.cpp

@ -318,4 +318,20 @@ void H264BitWriter::Flush() {
buffer = 0;
buffer_pos = 0;
}
std::optional<std::pair<int, int>> H264::CurrentFrameDimensions() const {
const u32 width_mbs = current_context.h264_parameter_set.pic_width_in_mbs;
const u32 height_mbs = current_context.h264_parameter_set.frame_height_in_mbs;
if (width_mbs == 0 || height_mbs == 0) {
return std::nullopt;
}
const bool frame_mbs_only = current_context.h264_parameter_set.frame_mbs_only_flag != 0;
const u32 pic_height_mbs = height_mbs / (frame_mbs_only ? 1u : 2u);
const int width = static_cast<int>(width_mbs) * 16;
const int height = static_cast<int>(pic_height_mbs) * 16;
if (width <= 0 || height <= 0) {
return std::nullopt;
}
return std::pair{width, height};
}
} // namespace Tegra::Decoders

3
src/video_core/host1x/codecs/h264.h

@ -3,6 +3,7 @@
#pragma once
#include <optional>
#include <span>
#include <vector>
@ -262,6 +263,8 @@ public:
return "H264";
}
[[nodiscard]] std::optional<std::pair<int, int>> CurrentFrameDimensions() const override;
private:
bool is_first_frame{true};
Common::ScratchBuffer<u8> frame_scratch;

9
src/video_core/host1x/codecs/vp8.cpp

@ -73,4 +73,13 @@ std::span<const u8> VP8::ComposeFrame() {
return frame_scratch;
}
std::optional<std::pair<int, int>> VP8::CurrentFrameDimensions() const {
const int width = static_cast<int>(current_context.frame_width);
const int height = static_cast<int>(current_context.frame_height);
if (width <= 0 || height <= 0) {
return std::nullopt;
}
return std::pair{width, height};
}
} // namespace Tegra::Decoders

3
src/video_core/host1x/codecs/vp8.h

@ -4,6 +4,7 @@
#pragma once
#include <array>
#include <optional>
#include <span>
#include "common/common_funcs.h"
@ -51,6 +52,8 @@ public:
return "VP8";
}
[[nodiscard]] std::optional<std::pair<int, int>> CurrentFrameDimensions() const override;
private:
Common::ScratchBuffer<u8> frame_scratch;

9
src/video_core/host1x/codecs/vp9.cpp

@ -489,6 +489,15 @@ Vp9FrameContainer VP9::GetCurrentFrame() {
return current_frame;
}
std::optional<std::pair<int, int>> VP9::CurrentFrameDimensions() const {
const int width = static_cast<int>(current_frame_info.frame_size.width);
const int height = static_cast<int>(current_frame_info.frame_size.height);
if (width <= 0 || height <= 0) {
return std::nullopt;
}
return std::pair{width, height};
}
std::vector<u8> VP9::ComposeCompressedHeader() {
VpxRangeEncoder writer{};
const bool update_probs = !current_frame_info.is_key_frame && current_frame_info.show_frame;

3
src/video_core/host1x/codecs/vp9.h

@ -4,6 +4,7 @@
#pragma once
#include <array>
#include <optional>
#include <span>
#include <vector>
@ -136,6 +137,8 @@ public:
return "VP9";
}
[[nodiscard]] std::optional<std::pair<int, int>> CurrentFrameDimensions() const override;
private:
/// Returns true if the most recent frame was a hidden frame.
[[nodiscard]] bool WasFrameHidden() const {

132
src/video_core/host1x/ffmpeg/ffmpeg.cpp

@ -4,12 +4,18 @@
// SPDX-FileCopyrightText: Copyright 2023 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later
#include <algorithm>
#include <cstring>
#include "common/assert.h"
#include "common/logging/log.h"
#include "common/scope_exit.h"
#include "common/settings.h"
#include "core/memory.h"
#include "video_core/host1x/ffmpeg/ffmpeg.h"
#ifdef __ANDROID__
#include "video_core/host1x/ffmpeg/mediacodec_bridge.h"
#endif
#include "video_core/memory_manager.h"
extern "C" {
@ -270,6 +276,15 @@ std::shared_ptr<Frame> DecoderContext::ReceiveFrame() {
}
void DecodeApi::Reset() {
#ifdef __ANDROID__
if (m_mediacodec_decoder_id != 0) {
FFmpeg::MediaCodecBridge::DestroyDecoder(m_mediacodec_decoder_id);
m_mediacodec_decoder_id = 0;
}
m_mediacodec_mime = nullptr;
m_mediacodec_width = 0;
m_mediacodec_height = 0;
#endif
m_hardware_context.reset();
m_decoder_context.reset();
m_decoder.reset();
@ -282,8 +297,34 @@ bool DecodeApi::Initialize(Tegra::Host1x::NvdecCommon::VideoCodec codec) {
// Enable GPU decoding if requested.
if (Settings::values.nvdec_emulation.GetValue() == Settings::NvdecEmulation::Gpu) {
#ifdef __ANDROID__
if (FFmpeg::MediaCodecBridge::IsAvailable()) {
// Register mime type for deferred MediaCodec creation.
switch (codec) {
case Tegra::Host1x::NvdecCommon::VideoCodec::H264:
m_mediacodec_mime = "video/avc";
break;
case Tegra::Host1x::NvdecCommon::VideoCodec::VP8:
m_mediacodec_mime = "video/x-vnd.on2.vp8";
break;
case Tegra::Host1x::NvdecCommon::VideoCodec::VP9:
m_mediacodec_mime = "video/x-vnd.on2.vp9";
break;
default:
m_mediacodec_mime = nullptr;
break;
}
}
#endif
#ifdef __ANDROID__
if (m_mediacodec_mime == nullptr) {
m_hardware_context.emplace();
m_hardware_context->InitializeForDecoder(*m_decoder_context, *m_decoder);
}
#else
m_hardware_context.emplace();
m_hardware_context->InitializeForDecoder(*m_decoder_context, *m_decoder);
#endif
}
// Open the decoder context.
@ -295,12 +336,103 @@ bool DecodeApi::Initialize(Tegra::Host1x::NvdecCommon::VideoCodec codec) {
return true;
}
#ifdef __ANDROID__
void DecodeApi::EnsureMediaCodecDecoder(int width, int height) {
if (!m_mediacodec_mime || width <= 0 || height <= 0) {
return;
}
if (!FFmpeg::MediaCodecBridge::IsAvailable()) {
return;
}
if (m_mediacodec_decoder_id > 0 && width == m_mediacodec_width && height == m_mediacodec_height) {
return;
}
if (m_mediacodec_decoder_id != 0) {
FFmpeg::MediaCodecBridge::DestroyDecoder(m_mediacodec_decoder_id);
m_mediacodec_decoder_id = 0;
m_mediacodec_width = 0;
m_mediacodec_height = 0;
}
const int id = FFmpeg::MediaCodecBridge::CreateDecoder(m_mediacodec_mime, width, height);
if (id > 0) {
m_mediacodec_decoder_id = id;
m_mediacodec_width = width;
m_mediacodec_height = height;
LOG_INFO(HW_GPU, "MediaCodec bridge created decoder id={} ({}x{})", id, width, height);
} else {
LOG_DEBUG(HW_GPU, "MediaCodec bridge failed to create decoder for {} ({}x{})", m_mediacodec_mime,
width, height);
m_mediacodec_mime = nullptr;
m_mediacodec_width = 0;
m_mediacodec_height = 0;
}
}
#endif
bool DecodeApi::SendPacket(std::span<const u8> packet_data) {
#ifdef __ANDROID__
if (m_mediacodec_decoder_id > 0) {
if (FFmpeg::MediaCodecBridge::SendPacket(m_mediacodec_decoder_id, packet_data.data(), packet_data.size(), 0)) {
return true;
}
LOG_DEBUG(HW_GPU, "MediaCodec bridge failed to queue packet, falling back to FFmpeg");
}
#endif
FFmpeg::Packet packet(packet_data);
return m_decoder_context->SendPacket(packet);
}
std::shared_ptr<Frame> DecodeApi::ReceiveFrame() {
#ifdef __ANDROID__
if (m_mediacodec_decoder_id > 0) {
int width = 0;
int height = 0;
int64_t pts = 0;
if (auto frame_data = FFmpeg::MediaCodecBridge::PopDecodedFrame(m_mediacodec_decoder_id, width, height, pts)) {
if (width > 0 && height > 0 && !frame_data->empty()) {
auto frame = std::make_shared<Frame>();
AVFrame* av_frame = frame->GetFrame();
av_frame->format = AV_PIX_FMT_NV12;
av_frame->width = width;
av_frame->height = height;
av_frame->pts = pts;
if (const int ret = av_frame_get_buffer(av_frame, 32); ret < 0) {
LOG_ERROR(HW_GPU, "av_frame_get_buffer failed: {}", AVError(ret));
} else {
const size_t y_stride = static_cast<size_t>(width);
const size_t y_plane_size = y_stride * static_cast<size_t>(height);
if (frame_data->size() < y_plane_size) {
LOG_WARNING(HW_GPU, "MediaCodec frame too small: {} < {}", frame_data->size(), y_plane_size);
} else {
const u8* src_y = frame_data->data();
u8* dst_y = av_frame->data[0];
for (int row = 0; row < height; ++row) {
std::memcpy(dst_y + static_cast<size_t>(row) * av_frame->linesize[0],
src_y + static_cast<size_t>(row) * y_stride, y_stride);
}
const int chroma_height = (height + 1) / 2;
const size_t chroma_plane_size = frame_data->size() - y_plane_size;
const size_t chroma_stride = chroma_height > 0
? chroma_plane_size / static_cast<size_t>(chroma_height)
: 0;
if (chroma_height > 0 && chroma_stride * static_cast<size_t>(chroma_height) != chroma_plane_size) {
LOG_WARNING(HW_GPU, "MediaCodec chroma plane misaligned: stride {} * height {} != {}",
chroma_stride, chroma_height, chroma_plane_size);
}
const u8* src_uv = frame_data->data() + y_plane_size;
u8* dst_uv = av_frame->data[1];
const size_t copy_stride = std::min(chroma_stride, static_cast<size_t>(av_frame->linesize[1]));
for (int row = 0; row < chroma_height; ++row) {
std::memcpy(dst_uv + static_cast<size_t>(row) * av_frame->linesize[1],
src_uv + static_cast<size_t>(row) * chroma_stride, copy_stride);
}
return frame;
}
}
}
}
}
#endif
// Receive raw frame from decoder.
return m_decoder_context->ReceiveFrame();
}

10
src/video_core/host1x/ffmpeg/ffmpeg.h

@ -216,10 +216,20 @@ public:
bool SendPacket(std::span<const u8> packet_data);
std::shared_ptr<Frame> ReceiveFrame();
#ifdef __ANDROID__
void EnsureMediaCodecDecoder(int width, int height);
#endif
private:
std::optional<FFmpeg::Decoder> m_decoder;
std::optional<FFmpeg::DecoderContext> m_decoder_context;
std::optional<FFmpeg::HardwareContext> m_hardware_context;
#ifdef __ANDROID__
int m_mediacodec_decoder_id = 0;
const char* m_mediacodec_mime = nullptr;
int m_mediacodec_width = 0;
int m_mediacodec_height = 0;
#endif
};
} // namespace FFmpeg

22
src/video_core/host1x/ffmpeg/mediacodec_bridge.h

@ -0,0 +1,22 @@
#pragma once
#include <optional>
#include <vector>
#include <cstdint>
namespace FFmpeg::MediaCodecBridge {
bool IsAvailable();
// Create a platform decoder for the given mime type ("video/avc", "video/x-vnd.on2.vp9", ...)
// Returns decoder id (>0) on success, or 0 on failure.
int CreateDecoder(const char* mime, int width, int height);
void DestroyDecoder(int id);
// Feed an encoded packet to the decoder. Returns true if accepted.
bool SendPacket(int id, const uint8_t* data, size_t size, int64_t pts);
// Pop a decoded NV12 frame. Returns std::nullopt if none available. On success, fills width,height,pts
std::optional<std::vector<uint8_t>> PopDecodedFrame(int id, int& width, int& height, int64_t& pts);
} // namespace FFmpeg::MediaCodecBridge

114
src/video_core/host1x/ffmpeg/mediacodec_bridge_android.cpp

@ -0,0 +1,114 @@
// Android-specific JNI bridge implementation
#ifdef __ANDROID__
#include "mediacodec_bridge.h"
#include <jni.h>
#include <vector>
#include <map>
#include <memory>
#include <mutex>
#include <condition_variable>
#include <optional>
#include <cstdint>
#include "common/android/id_cache.h"
#include "common/logging/log.h"
namespace FFmpeg::MediaCodecBridge {
static jclass g_native_media_codec_class = nullptr;
static jmethodID g_create_decoder = nullptr;
static jmethodID g_release_decoder = nullptr;
static jmethodID g_decode_method = nullptr;
struct DecoderState {
int id;
std::mutex mtx;
std::vector<uint8_t> frame;
int width = 0;
int height = 0;
int64_t pts = 0;
bool has_frame = false;
};
static std::mutex s_global_mtx;
static std::map<int, std::shared_ptr<DecoderState>> s_decoders;
extern "C" JNIEXPORT void JNICALL Java_org_yuzu_yuzu_1emu_media_NativeMediaCodec_onFrameDecoded(
JNIEnv* env, jclass, jint decoderId, jbyteArray data, jint width, jint height, jlong pts) {
std::lock_guard lock(s_global_mtx);
auto it = s_decoders.find(decoderId);
if (it == s_decoders.end()) return;
auto& st = it->second;
const jsize len = env->GetArrayLength(data);
st->frame.resize(len);
env->GetByteArrayRegion(data, 0, len, reinterpret_cast<jbyte*>(st->frame.data()));
st->width = width;
st->height = height;
st->pts = pts;
st->has_frame = true;
}
bool IsAvailable() {
// We assume the bridge is available if the Java class can be found.
auto env = Common::Android::GetEnvForThread();
if (!env) return false;
if (!g_native_media_codec_class) {
jclass cls = env->FindClass("org/yuzu/yuzu_emu/media/NativeMediaCodec");
if (!cls) return false;
g_native_media_codec_class = reinterpret_cast<jclass>(env->NewGlobalRef(cls));
g_create_decoder = env->GetStaticMethodID(g_native_media_codec_class, "createDecoder", "(Ljava/lang/String;II)I");
g_release_decoder = env->GetStaticMethodID(g_native_media_codec_class, "releaseDecoder", "(I)V");
g_decode_method = env->GetStaticMethodID(g_native_media_codec_class, "decode", "(I[BJ)Z");
}
return g_native_media_codec_class != nullptr;
}
int CreateDecoder(const char* mime, int width, int height) {
auto env = Common::Android::GetEnvForThread();
if (!env) return 0;
jstring jmime = env->NewStringUTF(mime);
const int id = env->CallStaticIntMethod(g_native_media_codec_class, g_create_decoder, jmime, width, height);
env->DeleteLocalRef(jmime);
if (id <= 0) return 0;
std::lock_guard lock(s_global_mtx);
auto st = std::make_shared<DecoderState>();
st->id = id;
s_decoders[id] = st;
return id;
}
void DestroyDecoder(int id) {
auto env = Common::Android::GetEnvForThread();
if (!env) return;
env->CallStaticVoidMethod(g_native_media_codec_class, g_release_decoder, id);
std::lock_guard lock(s_global_mtx);
s_decoders.erase(id);
}
bool SendPacket(int id, const uint8_t* data, size_t size, int64_t pts) {
auto env = Common::Android::GetEnvForThread();
if (!env) return false;
std::lock_guard lock(s_global_mtx);
auto it = s_decoders.find(id);
if (it == s_decoders.end()) return false;
jbyteArray arr = env->NewByteArray(static_cast<jsize>(size));
env->SetByteArrayRegion(arr, 0, static_cast<jsize>(size), reinterpret_cast<const jbyte*>(data));
jboolean ok = env->CallStaticBooleanMethod(g_native_media_codec_class, g_decode_method, id, arr, static_cast<jlong>(pts));
env->DeleteLocalRef(arr);
return ok;
}
std::optional<std::vector<uint8_t>> PopDecodedFrame(int id, int& width, int& height, int64_t& pts) {
std::lock_guard lock(s_global_mtx);
auto it = s_decoders.find(id);
if (it == s_decoders.end()) return std::nullopt;
auto& st = it->second;
if (!st->has_frame) return std::nullopt;
st->has_frame = false;
width = st->width;
height = st->height;
pts = st->pts;
return st->frame;
}
#endif // __ANDROID__

19
src/video_core/vulkan_common/vulkan_device.cpp

@ -1035,6 +1035,25 @@ bool Device::GetSuitability(bool requires_swapchain) {
FOR_EACH_VK_FEATURE_EXT(FEATURE_EXTENSION);
FOR_EACH_VK_EXTENSION(EXTENSION);
// Maintenance extensions may not have corresponding macros in older Vulkan
// headers. Detect them by name and enable them if present.
if (supported_extensions.contains("VK_KHR_maintenance1")) {
loaded_extensions.insert("VK_KHR_maintenance1");
extensions.maintenance1 = true;
}
if (supported_extensions.contains("VK_KHR_maintenance2")) {
loaded_extensions.insert("VK_KHR_maintenance2");
extensions.maintenance2 = true;
}
if (supported_extensions.contains("VK_KHR_maintenance3")) {
loaded_extensions.insert("VK_KHR_maintenance3");
extensions.maintenance3 = true;
}
if (supported_extensions.contains("VK_KHR_maintenance4")) {
loaded_extensions.insert("VK_KHR_maintenance4");
extensions.maintenance4 = true;
}
#undef FEATURE_EXTENSION
#undef EXTENSION

51
src/video_core/vulkan_common/vulkan_device.h

@ -445,6 +445,51 @@ public:
return extensions.shader_float_controls;
}
/// Returns true if VK_KHR_maintenance1 is enabled.
bool IsKhrMaintenance1Supported() const {
return extensions.maintenance1;
}
/// Returns true if VK_KHR_maintenance2 is enabled.
bool IsKhrMaintenance2Supported() const {
return extensions.maintenance2;
}
/// Returns true if VK_KHR_maintenance3 is enabled.
bool IsKhrMaintenance3Supported() const {
return extensions.maintenance3;
}
/// Returns true if VK_KHR_maintenance4 is enabled.
bool IsKhrMaintenance4Supported() const {
return extensions.maintenance4;
}
/// Returns true if VK_KHR_maintenance5 is enabled.
bool IsKhrMaintenance5Supported() const {
return extensions.maintenance5;
}
/// Returns true if VK_KHR_maintenance6 is enabled.
bool IsKhrMaintenance6Supported() const {
return extensions.maintenance6;
}
/// Returns true if VK_KHR_maintenance7 is enabled.
bool IsKhrMaintenance7Supported() const {
return extensions.maintenance7;
}
/// Returns true if VK_KHR_maintenance8 is enabled.
bool IsKhrMaintenance8Supported() const {
return extensions.maintenance8;
}
/// Returns true if VK_KHR_maintenance9 is enabled.
bool IsKhrMaintenance9Supported() const {
return extensions.maintenance9;
}
/// Returns true if VK_KHR_sampler_mirror_clamp_to_edge is enabled.
bool IsKhrSamplerMirrorClampToEdgeSupported() const {
return extensions.sampler_mirror_clamp_to_edge;
@ -794,6 +839,12 @@ private:
FOR_EACH_VK_FEATURE_EXT(FEATURE);
FOR_EACH_VK_EXTENSION(EXTENSION);
// Maintenance extensions (may not be present in older Vulkan headers).
bool maintenance1{};
bool maintenance2{};
bool maintenance3{};
bool maintenance4{};
#undef EXTENSION
#undef FEATURE
};

Loading…
Cancel
Save