Skip to content

Commit

Permalink
Remove BackendTexture, BackendRenderTarget and BackendSemaphore from …
Browse files Browse the repository at this point in the history
…tgfx. (#135)

* Remove the hasAlpha property from OESTexture.

* Expose GLTexture and GLRenderTarget classes as Public APIs.

* Remove BackendTexture, BackendRenderTarget and BackendSemaphore from tgfx.

* Fix build error on iOS, macOS, Android and QT.
  • Loading branch information
domchen authored Feb 24, 2022
1 parent 316bb94 commit 66e558d
Show file tree
Hide file tree
Showing 67 changed files with 858 additions and 1,013 deletions.
34 changes: 34 additions & 0 deletions src/base/utils/TGFXCast.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -141,4 +141,38 @@ ColorType ToPAG(tgfx::ColorType colorType) {
return ColorType::Unknown;
}
}

tgfx::GLSemaphore ToTGFX(const BackendSemaphore& semaphore) {
tgfx::GLSemaphore glSemaphore = {};
glSemaphore.glSync = semaphore.glSync();
return glSemaphore;
}

bool GetGLSampler(const BackendTexture& texture, tgfx::GLSampler* sampler) {
GLTextureInfo glInfo = {};
if (!texture.getGLTextureInfo(&glInfo)) {
return false;
}
sampler->id = glInfo.id;
sampler->target = glInfo.target;
sampler->format = tgfx::PixelFormat::RGBA_8888;
return true;
}

bool GetGLFrameBuffer(const BackendRenderTarget& renderTarget, tgfx::GLFrameBuffer* frameBuffer) {
GLFrameBufferInfo glInfo = {};
if (!renderTarget.getGLFramebufferInfo(&glInfo)) {
return false;
}
frameBuffer->id = glInfo.id;
frameBuffer->format = tgfx::PixelFormat::RGBA_8888;
return true;
}

BackendTexture ToBackendTexture(const tgfx::GLSampler& sampler, int width, int height) {
GLTextureInfo glInfo = {};
glInfo.id = sampler.id;
glInfo.target = sampler.target;
return BackendTexture(glInfo, width, height);
}
} // namespace pag
27 changes: 7 additions & 20 deletions src/base/utils/TGFXCast.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@
#include "core/Matrix.h"
#include "core/Stroke.h"
#include "gpu/Backend.h"
#include "gpu/opengl/GLFrameBuffer.h"
#include "gpu/opengl/GLSampler.h"
#include "gpu/opengl/GLSemaphore.h"
#include "pag/file.h"
#include "pag/gpu.h"

Expand Down Expand Up @@ -52,29 +55,13 @@ tgfx::ColorType ToTGFX(ColorType colorType);

ColorType ToPAG(tgfx::ColorType colorType);

static inline tgfx::BackendTexture ToTGFX(const BackendTexture& texture) {
return *reinterpret_cast<const tgfx::BackendTexture*>(&texture);
}

static inline BackendTexture ToPAG(const tgfx::BackendTexture& texture) {
return *reinterpret_cast<const BackendTexture*>(&texture);
}

static inline tgfx::BackendRenderTarget ToTGFX(const BackendRenderTarget& renderTarget) {
return *reinterpret_cast<const tgfx::BackendRenderTarget*>(&renderTarget);
}
bool GetGLSampler(const BackendTexture& texture, tgfx::GLSampler* sampler);

static inline BackendRenderTarget ToPAG(const tgfx::BackendRenderTarget& renderTarget) {
return *reinterpret_cast<const BackendRenderTarget*>(&renderTarget);
}
bool GetGLFrameBuffer(const BackendRenderTarget& renderTarget, tgfx::GLFrameBuffer* frameBuffer);

static inline tgfx::BackendSemaphore* ToTGFX(BackendSemaphore* semaphore) {
return reinterpret_cast<tgfx::BackendSemaphore*>(semaphore);
}
BackendTexture ToBackendTexture(const tgfx::GLSampler& sampler, int width, int height);

static inline tgfx::BackendSemaphore ToTGFX(const BackendSemaphore& semaphore) {
return *reinterpret_cast<const tgfx::BackendSemaphore*>(&semaphore);
}
tgfx::GLSemaphore ToTGFX(const BackendSemaphore& semaphore);

static inline const tgfx::Matrix* ToTGFX(const Matrix* matrix) {
return reinterpret_cast<const tgfx::Matrix*>(matrix);
Expand Down
5 changes: 3 additions & 2 deletions src/platform/android/GPUDecoder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ GPUDecoder::GPUDecoder(const VideoConfig& config) {
if (env == nullptr) {
return;
}
videoSurface = VideoSurface::Make(config.width, config.height, config.hasAlpha);
videoSurface = VideoSurface::Make(config.width, config.height);
if (videoSurface == nullptr) {
return;
}
Expand Down Expand Up @@ -192,11 +192,12 @@ std::shared_ptr<VideoBuffer> GPUDecoder::onRenderFrame() {
if (env == nullptr) {
return nullptr;
}
videoSurface->updateTexImage();
videoSurface->clearPendingTexImage();
auto result = env->CallBooleanMethod(videoDecoder.get(), GPUDecoder_onRenderFrame);
if (!result) {
return nullptr;
}
videoSurface->markPendingTexImage();
return VideoImage::MakeFrom(videoSurface, videoWidth, videoHeight);
}
} // namespace pag
7 changes: 5 additions & 2 deletions src/platform/android/JPAGSurface.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@
/////////////////////////////////////////////////////////////////////////////////////////////////

#include "JPAGSurface.h"
#include <GLES/gl.h>
#include <GLES/glext.h>
#include <GLES3/gl3.h>
#include <android/native_window_jni.h>
#include "GPUDecoder.h"
#include "GPUDrawable.h"
Expand Down Expand Up @@ -130,9 +133,9 @@ JNIEXPORT jlong Java_org_libpag_PAGSurface_SetupFromTexture(JNIEnv*, jclass, jin
jint width, jint height, jboolean flipY,
jboolean forAsyncThread) {
GLTextureInfo glInfo = {};
glInfo.target = GL::TEXTURE_2D;
glInfo.target = GL_TEXTURE_2D;
glInfo.id = static_cast<unsigned>(textureID);
glInfo.format = GL::RGBA8;
glInfo.format = GL_RGBA8;
BackendTexture glTexture(glInfo, width, height);
auto origin = flipY ? ImageOrigin::BottomLeft : ImageOrigin::TopLeft;

Expand Down
9 changes: 1 addition & 8 deletions src/platform/android/VideoImage.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,10 @@ std::shared_ptr<VideoImage> VideoImage::MakeFrom(std::shared_ptr<VideoSurface> v

VideoImage::VideoImage(std::shared_ptr<VideoSurface> videoSurface, int width, int height)
: VideoBuffer(width, height), videoSurface(std::move(videoSurface)) {
this->videoSurface->markHasNewTextureImage();
}

std::shared_ptr<tgfx::Texture> VideoImage::makeTexture(tgfx::Context* context) const {
std::lock_guard<std::mutex> autoLock(locker);
if (!videoSurface->attachToContext(context)) {
return nullptr;
}
if (!videoSurface->updateTexImage()) {
return nullptr;
}
return videoSurface->getTexture();
return videoSurface->makeTexture(context);
}
} // namespace pag
117 changes: 59 additions & 58 deletions src/platform/android/VideoSurface.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

#include "VideoSurface.h"
#include "android/native_window_jni.h"
#include "gpu/opengl/GLContext.h"
#include "gpu/opengl/GLTexture.h"

namespace pag {
Expand All @@ -44,10 +45,9 @@ void VideoSurface::InitJNI(JNIEnv* env, const std::string& className) {
VideoSurface_onRelease = env->GetMethodID(VideoSurfaceClass.get(), "onRelease", "()V");
}

OESTexture::OESTexture(tgfx::GLTextureInfo info, int width, int height, bool hasAlpha)
: GLTexture(width, height, tgfx::ImageOrigin::TopLeft), hasAlpha(hasAlpha) {
sampler.glInfo = info;
sampler.config = tgfx::PixelConfig::RGBA_8888;
OESTexture::OESTexture(const tgfx::GLSampler& glSampler, int width, int height)
: GLTexture(width, height, tgfx::ImageOrigin::TopLeft) {
sampler = glSampler;
}

void OESTexture::setTextureSize(int width, int height) {
Expand All @@ -57,7 +57,7 @@ void OESTexture::setTextureSize(int width, int height) {
}

void OESTexture::computeTransform() {
if (textureWidth == 0 || textureHeight == 0 || hasAlpha) {
if (textureWidth == 0 || textureHeight == 0) {
return;
}
// https://cs.android.com/android/platform/superproject/+/master:frameworks/native/libs/nativedisplay/surfacetexture/SurfaceTexture.cpp;l=275;drc=master;bpv=0;bpt=1
Expand All @@ -81,21 +81,17 @@ void OESTexture::computeTransform() {
}

tgfx::Point OESTexture::getTextureCoord(float x, float y) const {
if (hasAlpha) {
// 如果有 alpha 通道,不需要缩小纹素
return {x / static_cast<float>(textureWidth), y / static_cast<float>(textureHeight)};
}
return {x / static_cast<float>(width()) * sx + tx, y / static_cast<float>(height()) * sy + ty};
}

void OESTexture::onRelease(tgfx::Context* context) {
if (sampler.glInfo.id > 0) {
if (sampler.id > 0) {
auto gl = tgfx::GLContext::Unwrap(context);
gl->deleteTextures(1, &sampler.glInfo.id);
gl->deleteTextures(1, &sampler.id);
}
}

std::shared_ptr<VideoSurface> VideoSurface::Make(int width, int height, bool hasAlpha) {
std::shared_ptr<VideoSurface> VideoSurface::Make(int width, int height) {
auto env = JNIEnvironment::Current();
if (env == nullptr) {
return nullptr;
Expand All @@ -105,12 +101,11 @@ std::shared_ptr<VideoSurface> VideoSurface::Make(int width, int height, bool has
if (surface.empty()) {
return nullptr;
}
return std::shared_ptr<VideoSurface>(
new VideoSurface(env, surface.get(), width, height, hasAlpha));
return std::shared_ptr<VideoSurface>(new VideoSurface(env, surface.get(), width, height));
}

VideoSurface::VideoSurface(JNIEnv* env, jobject surface, int width, int height, bool hasAlpha)
: width(width), height(height), hasAlpha(hasAlpha) {
VideoSurface::VideoSurface(JNIEnv* env, jobject surface, int width, int height)
: width(width), height(height) {
videoSurface.reset(env, surface);
}

Expand All @@ -126,68 +121,74 @@ jobject VideoSurface::getOutputSurface(JNIEnv* env) const {
return env->CallObjectMethod(videoSurface.get(), VideoSurface_getOutputSurface);
}

bool VideoSurface::attachToContext(tgfx::Context* context) {
if (oesTexture) {
void VideoSurface::markPendingTexImage() {
hasPendingTextureImage = true;
}

void VideoSurface::clearPendingTexImage() {
auto env = JNIEnvironment::Current();
if (env == nullptr) {
return;
}
updateTexImage(env);
}

std::shared_ptr<tgfx::Texture> VideoSurface::makeTexture(tgfx::Context* context) {
auto env = JNIEnvironment::Current();
if (env == nullptr) {
return nullptr;
}
if (!attachToContext(env, context)) {
return nullptr;
}
if (!updateTexImage(env)) {
return nullptr;
}
if (oesTexture == nullptr) {
auto textureWidth = env->CallIntMethod(videoSurface.get(), VideoSurface_videoWidth);
auto textureHeight = env->CallIntMethod(videoSurface.get(), VideoSurface_videoHeight);
oesTexture = tgfx::Resource::Wrap(context, new OESTexture(glInfo, width, height));
oesTexture->setTextureSize(textureWidth, textureHeight);
oesTexture->attachedSurface.reset(env, videoSurface.get());
}
return oesTexture;
}

bool VideoSurface::attachToContext(JNIEnv* env, tgfx::Context* context) {
if (glInfo.id > 0) {
if (deviceID != context->device()->uniqueID()) {
LOGE("VideoSurface::attachToGLContext(): VideoSurface has already attached to a Context!");
return false;
}
return true;
}
auto gl = tgfx::GLContext::Unwrap(context);
tgfx::GLTextureInfo glInfo = {};
glInfo.target = GL::TEXTURE_EXTERNAL_OES;
glInfo.format = GL::RGBA8;
gl->genTextures(1, &glInfo.id);
oesTexture = tgfx::Resource::Wrap(context, new OESTexture(glInfo, width, height, hasAlpha));
auto env = JNIEnvironment::Current();
if (env == nullptr) {
tgfx::GLSampler sampler = {};
sampler.target = GL::TEXTURE_EXTERNAL_OES;
sampler.format = tgfx::PixelFormat::RGBA_8888;
gl->genTextures(1, &sampler.id);
if (sampler.id == 0) {
return false;
}
auto result = env->CallBooleanMethod(videoSurface.get(), VideoSurface_attachToGLContext,
oesTexture->getGLInfo().id);
deviceID = context->device()->uniqueID();
auto result =
env->CallBooleanMethod(videoSurface.get(), VideoSurface_attachToGLContext, sampler.id);
if (!result) {
gl->deleteTextures(1, &sampler.id);
LOGE("VideoSurface::attachToGLContext(): failed to attached to a Surface!");
oesTexture = nullptr;
deviceID = 0;
return false;
}
oesTexture->attachedSurface.reset(env, videoSurface.get());
glInfo = sampler;
deviceID = context->device()->uniqueID();
return true;
}

void VideoSurface::markHasNewTextureImage() {
hasPendingTextureImage = true;
}

bool VideoSurface::updateTexImage() {
bool VideoSurface::updateTexImage(JNIEnv* env) {
if (!hasPendingTextureImage) {
return false;
}
auto env = JNIEnvironment::Current();
if (env == nullptr) {
return false;
}
bool status = env->CallBooleanMethod(videoSurface.get(), VideoSurface_updateTexImage);
auto result = env->CallBooleanMethod(videoSurface.get(), VideoSurface_updateTexImage);
hasPendingTextureImage = false;
return status;
return result;
}

std::shared_ptr<OESTexture> VideoSurface::getTexture() {
if (oesTexture == nullptr) {
return nullptr;
}
if (oesTexture->textureWidth > 0) {
return oesTexture;
}
auto env = JNIEnvironment::Current();
if (env == nullptr) {
return nullptr;
}
auto textureWidth = env->CallIntMethod(videoSurface.get(), VideoSurface_videoWidth);
auto textureHeight = env->CallIntMethod(videoSurface.get(), VideoSurface_videoHeight);
oesTexture->setTextureSize(textureWidth, textureHeight);
return oesTexture;
}
} // namespace pag
19 changes: 9 additions & 10 deletions src/platform/android/VideoSurface.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
namespace pag {
class OESTexture : public tgfx::GLTexture {
public:
OESTexture(tgfx::GLTextureInfo info, int width, int height, bool hasAlpha);
OESTexture(const tgfx::GLSampler& sampler, int width, int height);

tgfx::Point getTextureCoord(float x, float y) const override;

Expand All @@ -42,7 +42,6 @@ class OESTexture : public tgfx::GLTexture {

int textureWidth = 0;
int textureHeight = 0;
bool hasAlpha = false;
// 持有 Java 的 Surface,确保即使 GPUDecoder 提前释放也能正常被使用。
Global<jobject> attachedSurface;
float sx = 1.0f;
Expand All @@ -57,29 +56,29 @@ class VideoSurface {
public:
static void InitJNI(JNIEnv* env, const std::string& className);

static std::shared_ptr<VideoSurface> Make(int width, int height, bool hasAlpha = false);
static std::shared_ptr<VideoSurface> Make(int width, int height);

~VideoSurface();

jobject getOutputSurface(JNIEnv* env) const;

bool attachToContext(tgfx::Context* context);
void markPendingTexImage();

bool updateTexImage();
void clearPendingTexImage();

std::shared_ptr<OESTexture> getTexture();

void markHasNewTextureImage();
std::shared_ptr<tgfx::Texture> makeTexture(tgfx::Context* context);

private:
Global<jobject> videoSurface;
int width = 0;
int height = 0;
bool hasAlpha = false;
uint32_t deviceID = 0;
tgfx::GLSampler glInfo = {};
std::shared_ptr<OESTexture> oesTexture = nullptr;
mutable std::atomic_bool hasPendingTextureImage = {false};

VideoSurface(JNIEnv* env, jobject surface, int width, int height, bool hasAlpha);
VideoSurface(JNIEnv* env, jobject surface, int width, int height);
bool attachToContext(JNIEnv* env, tgfx::Context* context);
bool updateTexImage(JNIEnv* env);
};
} // namespace pag
Loading

0 comments on commit 66e558d

Please sign in to comment.