2025-08-26 13:32:53 +09:00
|
|
|
#include <android/native_window.h>
|
2025-08-26 18:24:06 +09:00
|
|
|
#include <android/native_window_jni.h>
|
2025-08-26 13:32:53 +09:00
|
|
|
#include <android/log.h>
|
|
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
#include <jni.h>
|
|
|
|
|
#include <mutex>
|
|
|
|
|
#include <vector>
|
|
|
|
|
#include <string>
|
|
|
|
|
#include <chrono>
|
|
|
|
|
#include <thread>
|
|
|
|
|
#include <algorithm>
|
|
|
|
|
|
|
|
|
|
extern "C" {
|
|
|
|
|
#include <libavformat/avformat.h>
|
|
|
|
|
#include <libavcodec/avcodec.h>
|
|
|
|
|
#include <libswscale/swscale.h>
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#define STB_IMAGE_IMPLEMENTATION
|
|
|
|
|
#include "stb_image.h"
|
|
|
|
|
|
2025-08-26 13:32:53 +09:00
|
|
|
#define LOG_TAG "NativeRenderer"
|
|
|
|
|
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
|
|
|
|
|
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
|
|
|
|
|
|
|
|
|
|
static ANativeWindow* window = nullptr;
|
|
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
// 비디오 변수
|
|
|
|
|
static AVFormatContext* fmt_ctx = nullptr;
|
|
|
|
|
static AVCodecContext* codec_ctx = nullptr;
|
|
|
|
|
static AVFrame* frame = nullptr;
|
|
|
|
|
static AVPacket* pkt = nullptr;
|
|
|
|
|
static SwsContext* sws_ctx = nullptr;
|
|
|
|
|
static int video_stream_idx = -1;
|
|
|
|
|
static int videoWidth = 0;
|
|
|
|
|
static int videoHeight = 0;
|
|
|
|
|
static std::vector<uint8_t> rgbBuffer;
|
|
|
|
|
|
|
|
|
|
// 이미지 변수
|
|
|
|
|
static uint8_t* imageData = nullptr;
|
|
|
|
|
static int imageWidth = 0;
|
|
|
|
|
static int imageHeight = 0;
|
|
|
|
|
static int imageChannels = 0;
|
|
|
|
|
static bool isImage = false;
|
|
|
|
|
|
|
|
|
|
// 다음 미디어 변수
|
|
|
|
|
static AVFormatContext* next_fmt_ctx = nullptr;
|
|
|
|
|
static AVCodecContext* next_codec_ctx = nullptr;
|
|
|
|
|
static AVFrame* next_frame = nullptr;
|
|
|
|
|
static AVPacket* next_pkt = nullptr;
|
|
|
|
|
static SwsContext* next_sws_ctx = nullptr;
|
|
|
|
|
static int next_video_stream_idx = -1;
|
|
|
|
|
static int next_videoWidth = 0;
|
|
|
|
|
static int next_videoHeight = 0;
|
|
|
|
|
static std::vector<uint8_t> next_rgbBuffer;
|
|
|
|
|
|
|
|
|
|
static uint8_t* next_imageData = nullptr;
|
|
|
|
|
static int next_imageWidth = 0;
|
|
|
|
|
static int next_imageHeight = 0;
|
|
|
|
|
static int next_imageChannels = 0;
|
|
|
|
|
static bool nextIsImage = false;
|
|
|
|
|
|
|
|
|
|
static bool nextMediaReady = false;
|
|
|
|
|
|
|
|
|
|
static std::mutex renderMutex;
|
|
|
|
|
|
|
|
|
|
static constexpr float frameDurationMs = 16.0f;
|
|
|
|
|
static constexpr long long displayDurationMs = 20000;
|
|
|
|
|
static constexpr long long fadeDurationMs = 3000;
|
|
|
|
|
|
|
|
|
|
static std::vector<std::string> mediaPaths;
|
|
|
|
|
static int currentMediaIndex = 0;
|
|
|
|
|
static int nextMediaIndex = 1;
|
|
|
|
|
|
|
|
|
|
// 애니메이션 변수
|
|
|
|
|
static float offsetX = 0.f;
|
|
|
|
|
static float offsetY = 0.f;
|
|
|
|
|
static bool movingForwardLocX = true;
|
|
|
|
|
static bool movingDownLocY = true;
|
|
|
|
|
|
|
|
|
|
// 페이드 및 미디어 전환 시간 상태
|
|
|
|
|
static std::chrono::steady_clock::time_point mediaStartTime;
|
|
|
|
|
static std::chrono::steady_clock::time_point fadeStartTime;
|
|
|
|
|
static bool isFading = false;
|
|
|
|
|
|
|
|
|
|
// 페이드 알파값
|
|
|
|
|
static float fadeOutAlpha = 1.f;
|
|
|
|
|
static float fadeInAlpha = 0.f;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// ==================== 메모리 해제: 이미지 ====================
|
|
|
|
|
static void releaseImageData(uint8_t** data) {
|
|
|
|
|
if (*data) {
|
|
|
|
|
stbi_image_free(*data);
|
|
|
|
|
*data = nullptr;
|
2025-08-26 13:32:53 +09:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
// ==================== 메모리 해제: FFmpeg 컨텍스트 ====================
|
|
|
|
|
static void releaseFFmpegContext(
|
|
|
|
|
AVFormatContext** fctx, AVCodecContext** cctx,
|
|
|
|
|
AVFrame** frm, AVPacket** pck,
|
|
|
|
|
SwsContext** sws, std::vector<uint8_t>* buffer) {
|
|
|
|
|
if (*cctx) avcodec_free_context(cctx);
|
|
|
|
|
if (*fctx) avformat_close_input(fctx);
|
|
|
|
|
if (*frm) av_frame_free(frm);
|
|
|
|
|
if (*pck) av_packet_free(pck);
|
|
|
|
|
if (*sws) sws_freeContext(*sws);
|
|
|
|
|
if (buffer) buffer->clear();
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
*cctx = nullptr;
|
|
|
|
|
*fctx = nullptr;
|
|
|
|
|
*frm = nullptr;
|
|
|
|
|
*pck = nullptr;
|
|
|
|
|
*sws = nullptr;
|
|
|
|
|
}
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
// ==================== 미디어 데이터 해제 ====================
|
|
|
|
|
static void releaseMediaData(bool loadIsImage,
|
|
|
|
|
uint8_t** imgData,
|
|
|
|
|
AVFormatContext** fmtCtx, AVCodecContext** codecCtx,
|
|
|
|
|
AVFrame** frm, AVPacket** pck, SwsContext** sws,
|
|
|
|
|
std::vector<uint8_t>* rgbBuf) {
|
|
|
|
|
if (loadIsImage) {
|
|
|
|
|
releaseImageData(imgData);
|
|
|
|
|
} else {
|
|
|
|
|
releaseFFmpegContext(fmtCtx, codecCtx, frm, pck, sws, rgbBuf);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================== 스케일 계산 구조체 및 함수 ====================
|
|
|
|
|
struct ScaleResult {
|
|
|
|
|
float scale;
|
|
|
|
|
float scaledW;
|
|
|
|
|
float scaledH;
|
|
|
|
|
float overflowX;
|
|
|
|
|
float overflowY;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
static ScaleResult calculateScale(float mediaW, float mediaH, float bufW, float bufH) {
|
|
|
|
|
ScaleResult res{};
|
|
|
|
|
if ((mediaW / mediaH) > (bufW / bufH)) {
|
|
|
|
|
res.scale = bufH / mediaH;
|
|
|
|
|
res.scaledW = mediaW * res.scale;
|
|
|
|
|
res.scaledH = bufH;
|
|
|
|
|
} else {
|
|
|
|
|
res.scale = bufW / mediaW;
|
|
|
|
|
res.scaledW = bufW;
|
|
|
|
|
res.scaledH = mediaH * res.scale;
|
|
|
|
|
}
|
|
|
|
|
res.overflowX = std::max(0.f, res.scaledW - bufW);
|
|
|
|
|
res.overflowY = std::max(0.f, res.scaledH - bufH);
|
|
|
|
|
return res;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================== 오프셋 애니메이션 업데이트 ====================
|
|
|
|
|
static void updateOffset(float& offsetX, float& offsetY,
|
|
|
|
|
bool& movingX, bool& movingY,
|
|
|
|
|
float overflowX, float overflowY) {
|
|
|
|
|
if (overflowX > 0) {
|
|
|
|
|
float speedX = overflowX / displayDurationMs;
|
|
|
|
|
float deltaX = speedX * frameDurationMs;
|
|
|
|
|
if (movingX) {
|
|
|
|
|
offsetX += deltaX;
|
|
|
|
|
if (offsetX >= overflowX) {
|
|
|
|
|
offsetX = overflowX;
|
|
|
|
|
movingX = false;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
offsetX -= deltaX;
|
|
|
|
|
if (offsetX <= 0) {
|
|
|
|
|
offsetX = 0.f;
|
|
|
|
|
movingX = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
offsetX = 0.f;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (overflowY > 0) {
|
|
|
|
|
float speedY = overflowY / displayDurationMs;
|
|
|
|
|
float deltaY = speedY * frameDurationMs;
|
|
|
|
|
if (movingY) {
|
|
|
|
|
offsetY += deltaY;
|
|
|
|
|
if (offsetY >= overflowY) {
|
|
|
|
|
offsetY = overflowY;
|
|
|
|
|
movingY = false;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
offsetY -= deltaY;
|
|
|
|
|
if (offsetY <= 0) {
|
|
|
|
|
offsetY = 0.f;
|
|
|
|
|
movingY = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
offsetY = 0.f;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================== 버퍼 클리어 함수 ====================
|
|
|
|
|
static void clearBufferIfNeeded(ANativeWindow_Buffer& buffer, bool shouldClear) {
|
|
|
|
|
if (!shouldClear) return;
|
|
|
|
|
uint32_t* dstPixels = (uint32_t*)buffer.bits;
|
|
|
|
|
int dstStride = buffer.stride;
|
|
|
|
|
for (int y = 0; y < buffer.height; ++y) {
|
|
|
|
|
uint32_t* dstRow = dstPixels + y * dstStride;
|
|
|
|
|
for (int x = 0; x < buffer.width; ++x) {
|
|
|
|
|
dstRow[x] = 0x00000000; // 완전 투명 또는 검은색
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================== 픽셀 그리기: 이미지 및 비디오 프레임 ====================
|
|
|
|
|
static void drawToBuffer(ANativeWindow_Buffer& buffer,
|
|
|
|
|
uint8_t* pixelData, int imgW, int imgH,
|
|
|
|
|
float scale, float offsetX, float offsetY, float alpha) {
|
|
|
|
|
if (alpha <= 0.f) return;
|
|
|
|
|
alpha = std::clamp(alpha, 0.f, 1.f);
|
|
|
|
|
|
|
|
|
|
uint32_t* dstPixels = (uint32_t*)buffer.bits;
|
|
|
|
|
int dstStride = buffer.stride;
|
|
|
|
|
|
|
|
|
|
for (int y = 0; y < buffer.height; ++y) {
|
|
|
|
|
int srcY = (int)((y + offsetY) / scale);
|
|
|
|
|
if (srcY < 0 || srcY >= imgH) continue;
|
|
|
|
|
|
|
|
|
|
uint32_t* dstRow = dstPixels + y * dstStride;
|
|
|
|
|
for (int x = 0; x < buffer.width; ++x) {
|
|
|
|
|
int srcX = (int)((x + offsetX) / scale);
|
|
|
|
|
if (srcX < 0 || srcX >= imgW) continue;
|
|
|
|
|
|
|
|
|
|
uint8_t* px = &pixelData[(srcY * imgW + srcX) * 4];
|
|
|
|
|
uint8_t r = (uint8_t)(px[0] * alpha);
|
|
|
|
|
uint8_t g = (uint8_t)(px[1] * alpha);
|
|
|
|
|
uint8_t b = (uint8_t)(px[2] * alpha);
|
|
|
|
|
uint8_t a = (uint8_t)(px[3] * alpha);
|
|
|
|
|
|
|
|
|
|
dstRow[x] = (a << 24) | (r << 16) | (g << 8) | b;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================== 미디어 로딩 함수(이미지/비디오) ====================
|
|
|
|
|
static bool loadMedia(const std::string& path, bool loadIsImage,
|
|
|
|
|
uint8_t** imgData, int* imgW, int* imgH, int* imgCh,
|
|
|
|
|
AVFormatContext** fmtCtx, AVCodecContext** codecCtx, AVFrame** frm, AVPacket** pck,
|
|
|
|
|
SwsContext** sws, int* videoIdx, int* vidW, int* vidH,
|
|
|
|
|
std::vector<uint8_t>* rgbBuf) {
|
|
|
|
|
try {
|
|
|
|
|
if (!loadIsImage) {
|
|
|
|
|
*fmtCtx = avformat_alloc_context();
|
|
|
|
|
if (avformat_open_input(fmtCtx, path.c_str(), nullptr, nullptr) != 0) {
|
|
|
|
|
LOGE("Failed to open video: %s", path.c_str());
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
if (avformat_find_stream_info(*fmtCtx, nullptr) < 0) {
|
|
|
|
|
LOGE("Failed to get stream info: %s", path.c_str());
|
|
|
|
|
avformat_close_input(fmtCtx);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
*videoIdx = -1;
|
|
|
|
|
for (unsigned int i = 0; i < (*fmtCtx)->nb_streams; ++i) {
|
|
|
|
|
if ((*fmtCtx)->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
|
|
|
|
*videoIdx = i;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (*videoIdx == -1) {
|
|
|
|
|
LOGE("No video stream found: %s", path.c_str());
|
|
|
|
|
avformat_close_input(fmtCtx);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
AVCodecParameters* codecpar = (*fmtCtx)->streams[*videoIdx]->codecpar;
|
|
|
|
|
const AVCodec* codec = avcodec_find_decoder(codecpar->codec_id);
|
|
|
|
|
if (!codec) {
|
|
|
|
|
LOGE("Decoder not found");
|
|
|
|
|
avformat_close_input(fmtCtx);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
*codecCtx = avcodec_alloc_context3(codec);
|
|
|
|
|
if (!*codecCtx) {
|
|
|
|
|
LOGE("Failed to alloc codec context");
|
|
|
|
|
avformat_close_input(fmtCtx);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
if (avcodec_parameters_to_context(*codecCtx, codecpar) < 0) {
|
|
|
|
|
LOGE("Failed to copy codec params");
|
|
|
|
|
avcodec_free_context(codecCtx);
|
|
|
|
|
avformat_close_input(fmtCtx);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
if (avcodec_open2(*codecCtx, codec, nullptr) < 0) {
|
|
|
|
|
LOGE("Failed to open codec");
|
|
|
|
|
avcodec_free_context(codecCtx);
|
|
|
|
|
avformat_close_input(fmtCtx);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
*vidW = (*codecCtx)->width;
|
|
|
|
|
*vidH = (*codecCtx)->height;
|
|
|
|
|
|
|
|
|
|
*frm = av_frame_alloc();
|
|
|
|
|
*pck = av_packet_alloc();
|
|
|
|
|
*sws = sws_getContext(*vidW, *vidH, (*codecCtx)->pix_fmt, *vidW, *vidH,
|
|
|
|
|
AV_PIX_FMT_RGBA, SWS_BILINEAR, nullptr, nullptr, nullptr);
|
|
|
|
|
|
|
|
|
|
if (!*sws) {
|
|
|
|
|
LOGE("Failed to create sws context");
|
|
|
|
|
av_frame_free(frm);
|
|
|
|
|
av_packet_free(pck);
|
|
|
|
|
avcodec_free_context(codecCtx);
|
|
|
|
|
avformat_close_input(fmtCtx);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
rgbBuf->resize((*vidW) * (*vidH) * 4);
|
|
|
|
|
*imgData = nullptr;
|
|
|
|
|
*imgW = 0;
|
|
|
|
|
*imgH = 0;
|
|
|
|
|
*imgCh = 0;
|
|
|
|
|
} else {
|
|
|
|
|
*imgData = stbi_load(path.c_str(), imgW, imgH, imgCh, 4);
|
|
|
|
|
if (!*imgData) {
|
|
|
|
|
LOGE("Failed to load image: %s", path.c_str());
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
*fmtCtx = nullptr;
|
|
|
|
|
*codecCtx = nullptr;
|
|
|
|
|
*frm = nullptr;
|
|
|
|
|
*pck = nullptr;
|
|
|
|
|
*sws = nullptr;
|
|
|
|
|
*videoIdx = -1;
|
|
|
|
|
*vidW = 0;
|
|
|
|
|
*vidH = 0;
|
|
|
|
|
rgbBuf->clear();
|
|
|
|
|
}
|
|
|
|
|
LOGI("Successfully loaded media: %s", path.c_str());
|
|
|
|
|
return true;
|
|
|
|
|
} catch (...) {
|
|
|
|
|
LOGE("Exception occurred during media loading: %s", path.c_str());
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================== 다음 미디어 비동기 로드 ====================
|
|
|
|
|
static bool loadNextMedia() {
|
|
|
|
|
LOGI("loadNextMedia: Trying to load media index %d", nextMediaIndex);
|
|
|
|
|
|
|
|
|
|
releaseMediaData(nextIsImage, &next_imageData, &next_fmt_ctx, &next_codec_ctx,
|
|
|
|
|
&next_frame, &next_pkt, &next_sws_ctx, &next_rgbBuffer);
|
|
|
|
|
|
|
|
|
|
if (mediaPaths.empty()) {
|
|
|
|
|
LOGE("loadNextMedia: mediaPaths is empty");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const std::string& nextPath = mediaPaths[nextMediaIndex];
|
|
|
|
|
LOGI("loadNextMedia: nextPath=%s", nextPath.c_str());
|
|
|
|
|
|
|
|
|
|
nextIsImage = (nextPath.find(".mp4") == std::string::npos &&
|
|
|
|
|
nextPath.find(".mkv") == std::string::npos);
|
|
|
|
|
|
|
|
|
|
bool ok = loadMedia(nextPath, nextIsImage,
|
|
|
|
|
&next_imageData, &next_imageWidth, &next_imageHeight, &next_imageChannels,
|
|
|
|
|
&next_fmt_ctx, &next_codec_ctx, &next_frame, &next_pkt, &next_sws_ctx,
|
|
|
|
|
&next_video_stream_idx, &next_videoWidth, &next_videoHeight, &next_rgbBuffer);
|
|
|
|
|
if (!ok) {
|
|
|
|
|
LOGE("loadNextMedia: Failed to load media %s", nextPath.c_str());
|
|
|
|
|
} else {
|
|
|
|
|
LOGI("loadNextMedia: Successfully loaded media");
|
|
|
|
|
}
|
|
|
|
|
return ok;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================== 비디오/이미지 렌더링 ====================
|
|
|
|
|
static void renderMedia(ANativeWindow_Buffer& buffer,
|
|
|
|
|
uint8_t* imgData, int imgW, int imgH, int imgCh,
|
|
|
|
|
AVFormatContext* fctx, AVCodecContext* cctx,
|
|
|
|
|
AVFrame* frm, AVPacket* pck, SwsContext* sws,
|
|
|
|
|
int vidStreamIdx, int vidW, int vidH,
|
|
|
|
|
std::vector<uint8_t>& rgbBuf,
|
|
|
|
|
bool isImageLocal,
|
|
|
|
|
float scale,
|
|
|
|
|
float offsetXLocal,
|
|
|
|
|
float offsetYLocal,
|
|
|
|
|
float alpha) {
|
|
|
|
|
|
|
|
|
|
if (isImageLocal) {
|
|
|
|
|
drawToBuffer(buffer, imgData, imgW, imgH, scale, offsetXLocal, offsetYLocal, alpha);
|
2025-08-26 13:32:53 +09:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
if (!fctx || !cctx) return;
|
|
|
|
|
|
|
|
|
|
int ret = av_read_frame(fctx, pck);
|
|
|
|
|
bool gotFrame = false;
|
|
|
|
|
while (ret >= 0) {
|
|
|
|
|
if (pck->stream_index == vidStreamIdx) {
|
|
|
|
|
ret = avcodec_send_packet(cctx, pck);
|
|
|
|
|
if (ret < 0) break;
|
|
|
|
|
ret = avcodec_receive_frame(cctx, frm);
|
|
|
|
|
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
|
|
|
|
|
av_packet_unref(pck);
|
|
|
|
|
ret = av_read_frame(fctx, pck);
|
|
|
|
|
continue;
|
|
|
|
|
} else if (ret < 0) break;
|
|
|
|
|
|
|
|
|
|
uint8_t* dst[4] = { rgbBuf.data(), nullptr, nullptr, nullptr };
|
|
|
|
|
int dstStride_arr[4] = { vidW * 4, 0, 0, 0 };
|
|
|
|
|
sws_scale(sws, frm->data, frm->linesize, 0, vidH, dst, dstStride_arr);
|
|
|
|
|
|
|
|
|
|
gotFrame = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
av_packet_unref(pck);
|
|
|
|
|
ret = av_read_frame(fctx, pck);
|
|
|
|
|
}
|
|
|
|
|
av_packet_unref(pck);
|
|
|
|
|
|
|
|
|
|
if (!gotFrame) {
|
|
|
|
|
av_seek_frame(fctx, vidStreamIdx, 0, AVSEEK_FLAG_BACKWARD);
|
2025-08-26 13:32:53 +09:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
drawToBuffer(buffer, rgbBuf.data(), vidW, vidH, scale, offsetXLocal, offsetYLocal, alpha);
|
|
|
|
|
}
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
// ==================== 페이드 인/아웃 크로스렌더링 ====================
|
|
|
|
|
static void renderWithFade(ANativeWindow_Buffer& buffer,
|
|
|
|
|
float bufW, float bufH,
|
|
|
|
|
uint8_t* curImgData, int curImgW, int curImgH, int curImgCh,
|
|
|
|
|
AVFormatContext* curFmtCtx, AVCodecContext* curCodecCtx,
|
|
|
|
|
AVFrame* curFrame, AVPacket* curPkt, SwsContext* curSwsCtx,
|
|
|
|
|
int curVidStreamIdx, int curVidW, int curVidH,
|
|
|
|
|
std::vector<uint8_t>& curRgbBuf,
|
|
|
|
|
bool curIsImage,
|
|
|
|
|
uint8_t* nextImgData, int nextImgW, int nextImgH, int nextImgCh,
|
|
|
|
|
AVFormatContext* nextFmtCtx, AVCodecContext* nextCodecCtx,
|
|
|
|
|
AVFrame* nextFrame, AVPacket* nextPkt, SwsContext* nextSwsCtx,
|
|
|
|
|
int nextVidStreamIdx, int nextVidW, int nextVidH,
|
|
|
|
|
std::vector<uint8_t>& nextRgbBuf,
|
|
|
|
|
bool nextIsImage,
|
|
|
|
|
float fadeOutAlpha, float fadeInAlpha,
|
|
|
|
|
float& curOffsetX, float& curOffsetY,
|
|
|
|
|
bool& curMovingX, bool& curMovingY,
|
|
|
|
|
float& nextOffsetX, float& nextOffsetY,
|
|
|
|
|
bool& nextMovingX, bool& nextMovingY) {
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
auto curScaleRes = calculateScale(
|
|
|
|
|
curIsImage ? (float)curImgW : (float)curVidW,
|
|
|
|
|
curIsImage ? (float)curImgH : (float)curVidH,
|
|
|
|
|
bufW, bufH);
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
auto nextScaleRes = calculateScale(
|
|
|
|
|
nextIsImage ? (float)nextImgW : (float)nextVidW,
|
|
|
|
|
nextIsImage ? (float)nextImgH : (float)nextVidH,
|
|
|
|
|
bufW, bufH);
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
updateOffset(curOffsetX, curOffsetY, curMovingX, curMovingY, curScaleRes.overflowX, curScaleRes.overflowY);
|
|
|
|
|
updateOffset(nextOffsetX, nextOffsetY, nextMovingX, nextMovingY, nextScaleRes.overflowX, nextScaleRes.overflowY);
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
if (curIsImage) {
|
|
|
|
|
drawToBuffer(buffer, curImgData, curImgW, curImgH,
|
|
|
|
|
curScaleRes.scale, curOffsetX, curOffsetY, fadeOutAlpha);
|
|
|
|
|
} else {
|
|
|
|
|
renderMedia(buffer, curImgData, curImgW, curImgH, curImgCh,
|
|
|
|
|
curFmtCtx, curCodecCtx, curFrame, curPkt, curSwsCtx,
|
|
|
|
|
curVidStreamIdx, curVidW, curVidH, curRgbBuf,
|
|
|
|
|
false,
|
|
|
|
|
curScaleRes.scale, curOffsetX, curOffsetY,
|
|
|
|
|
fadeOutAlpha);
|
|
|
|
|
}
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
if (nextIsImage) {
|
|
|
|
|
drawToBuffer(buffer, nextImgData, nextImgW, nextImgH,
|
|
|
|
|
nextScaleRes.scale, nextOffsetX, nextOffsetY, fadeInAlpha);
|
|
|
|
|
} else {
|
|
|
|
|
renderMedia(buffer, nextImgData, nextImgW, nextImgH, nextImgCh,
|
|
|
|
|
nextFmtCtx, nextCodecCtx, nextFrame, nextPkt, nextSwsCtx,
|
|
|
|
|
nextVidStreamIdx, nextVidW, nextVidH, nextRgbBuf,
|
|
|
|
|
false,
|
|
|
|
|
nextScaleRes.scale, nextOffsetX, nextOffsetY,
|
|
|
|
|
fadeInAlpha);
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-08-26 13:32:53 +09:00
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
// ==================== JNI 함수: 미디어 리스트 세팅 ====================
|
|
|
|
|
extern "C" {
|
|
|
|
|
|
|
|
|
|
JNIEXPORT void JNICALL
|
|
|
|
|
Java_bums_lunatic_launcher_wall_NativeRenderer_nativeSetMediaList(JNIEnv* env, jobject, jobjectArray paths) {
|
|
|
|
|
std::lock_guard<std::mutex> lock(renderMutex);
|
|
|
|
|
mediaPaths.clear();
|
|
|
|
|
jsize len = env->GetArrayLength(paths);
|
|
|
|
|
for (jsize i = 0; i < len; ++i) {
|
|
|
|
|
jstring pathStr = (jstring) env->GetObjectArrayElement(paths, i);
|
|
|
|
|
const char* pathCStr = env->GetStringUTFChars(pathStr, nullptr);
|
|
|
|
|
mediaPaths.push_back(std::string(pathCStr));
|
|
|
|
|
env->ReleaseStringUTFChars(pathStr, pathCStr);
|
|
|
|
|
env->DeleteLocalRef(pathStr);
|
|
|
|
|
}
|
|
|
|
|
currentMediaIndex = 0;
|
|
|
|
|
nextMediaIndex = (len > 1) ? 1 : 0;
|
|
|
|
|
|
|
|
|
|
releaseMediaData(isImage, &imageData, &fmt_ctx, &codec_ctx, &frame, &pkt, &sws_ctx, &rgbBuffer);
|
|
|
|
|
isImage = false;
|
|
|
|
|
|
|
|
|
|
if (!mediaPaths.empty()) {
|
|
|
|
|
const bool loadIsImage = (mediaPaths[0].find(".mp4") == std::string::npos &&
|
|
|
|
|
mediaPaths[0].find(".mkv") == std::string::npos);
|
|
|
|
|
isImage = loadIsImage;
|
|
|
|
|
|
|
|
|
|
if (!loadMedia(mediaPaths[0], loadIsImage,
|
|
|
|
|
&imageData, &imageWidth, &imageHeight, &imageChannels,
|
|
|
|
|
&fmt_ctx, &codec_ctx, &frame, &pkt, &sws_ctx,
|
|
|
|
|
&video_stream_idx, &videoWidth, &videoHeight, &rgbBuffer)) {
|
|
|
|
|
LOGE("Failed to load the first media");
|
|
|
|
|
return;
|
2025-08-26 13:32:53 +09:00
|
|
|
}
|
2025-08-26 18:24:06 +09:00
|
|
|
|
|
|
|
|
offsetX = 0.f; offsetY = 0.f;
|
|
|
|
|
movingForwardLocX = true; movingDownLocY = true;
|
|
|
|
|
mediaStartTime = std::chrono::steady_clock::now();
|
|
|
|
|
isFading = false;
|
|
|
|
|
nextMediaReady = false;
|
|
|
|
|
|
|
|
|
|
releaseMediaData(nextIsImage, &next_imageData, &next_fmt_ctx, &next_codec_ctx,
|
|
|
|
|
&next_frame, &next_pkt, &next_sws_ctx, &next_rgbBuffer);
|
|
|
|
|
|
|
|
|
|
std::thread([](){
|
|
|
|
|
std::lock_guard<std::mutex> preloadLock(renderMutex);
|
|
|
|
|
nextMediaReady = loadNextMedia();
|
|
|
|
|
if (nextMediaReady) {
|
|
|
|
|
LOGI("Preloaded next media ready");
|
|
|
|
|
} else {
|
|
|
|
|
LOGE("Preload failed");
|
|
|
|
|
}
|
|
|
|
|
}).detach();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
JNIEXPORT void JNICALL
|
|
|
|
|
Java_bums_lunatic_launcher_wall_NativeRenderer_nativeRender(JNIEnv* env, jobject) {
|
|
|
|
|
std::lock_guard<std::mutex> lock(renderMutex);
|
|
|
|
|
if (!window || mediaPaths.empty()) {
|
|
|
|
|
LOGI("nativeRender: no window or empty media");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ANativeWindow_Buffer buffer;
|
|
|
|
|
if (ANativeWindow_lock(window, &buffer, nullptr) < 0) {
|
|
|
|
|
LOGE("nativeRender: Failed to lock window");
|
|
|
|
|
return;
|
2025-08-26 13:32:53 +09:00
|
|
|
}
|
|
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
clearBufferIfNeeded(buffer, !isFading);
|
|
|
|
|
|
|
|
|
|
auto now = std::chrono::steady_clock::now();
|
|
|
|
|
auto elapsedMs = std::chrono::duration_cast<std::chrono::milliseconds>(now - mediaStartTime).count();
|
|
|
|
|
elapsedMs = std::max(elapsedMs, 0LL);
|
|
|
|
|
|
|
|
|
|
if (!isFading && elapsedMs > (displayDurationMs - fadeDurationMs - 10) && nextMediaReady) {
|
|
|
|
|
isFading = true;
|
|
|
|
|
fadeStartTime = now;
|
|
|
|
|
LOGI("Fade started");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (isFading) {
|
|
|
|
|
auto fadeElapsed = std::chrono::duration_cast<std::chrono::milliseconds>(now - fadeStartTime).count();
|
|
|
|
|
fadeOutAlpha = std::clamp(1.f - (float)fadeElapsed / fadeDurationMs, 0.f, 1.f);
|
|
|
|
|
fadeInAlpha = std::clamp((float)fadeElapsed / fadeDurationMs, 0.f, 1.f);
|
|
|
|
|
|
|
|
|
|
static float nextOffsetX = 0.f;
|
|
|
|
|
static float nextOffsetY = 0.f;
|
|
|
|
|
static bool nextMovingForwardX = true;
|
|
|
|
|
static bool nextMovingDownY = true;
|
|
|
|
|
|
|
|
|
|
renderWithFade(buffer,
|
|
|
|
|
(float)buffer.width, (float)buffer.height,
|
|
|
|
|
imageData, imageWidth, imageHeight, imageChannels,
|
|
|
|
|
fmt_ctx, codec_ctx, frame, pkt, sws_ctx,
|
|
|
|
|
video_stream_idx, videoWidth, videoHeight,
|
|
|
|
|
rgbBuffer,
|
|
|
|
|
isImage,
|
|
|
|
|
next_imageData, next_imageWidth, next_imageHeight, next_imageChannels,
|
|
|
|
|
next_fmt_ctx, next_codec_ctx, next_frame, next_pkt, next_sws_ctx,
|
|
|
|
|
next_video_stream_idx, next_videoWidth, next_videoHeight,
|
|
|
|
|
next_rgbBuffer,
|
|
|
|
|
nextIsImage,
|
|
|
|
|
fadeOutAlpha, fadeInAlpha,
|
|
|
|
|
offsetX, offsetY,
|
|
|
|
|
movingForwardLocX, movingDownLocY,
|
|
|
|
|
nextOffsetX, nextOffsetY,
|
|
|
|
|
nextMovingForwardX, nextMovingDownY);
|
|
|
|
|
|
|
|
|
|
if (fadeElapsed >= fadeDurationMs) {
|
|
|
|
|
LOGI("Fade ended, switching media");
|
|
|
|
|
|
|
|
|
|
releaseMediaData(isImage, &imageData, &fmt_ctx, &codec_ctx, &frame, &pkt, &sws_ctx, &rgbBuffer);
|
|
|
|
|
|
|
|
|
|
imageData = next_imageData;
|
|
|
|
|
imageWidth = next_imageWidth;
|
|
|
|
|
imageHeight = next_imageHeight;
|
|
|
|
|
imageChannels = next_imageChannels;
|
|
|
|
|
|
|
|
|
|
fmt_ctx = next_fmt_ctx;
|
|
|
|
|
codec_ctx = next_codec_ctx;
|
|
|
|
|
frame = next_frame;
|
|
|
|
|
pkt = next_pkt;
|
|
|
|
|
sws_ctx = next_sws_ctx;
|
|
|
|
|
|
|
|
|
|
video_stream_idx = next_video_stream_idx;
|
|
|
|
|
videoWidth = next_videoWidth;
|
|
|
|
|
videoHeight = next_videoHeight;
|
|
|
|
|
rgbBuffer = std::move(next_rgbBuffer);
|
|
|
|
|
|
|
|
|
|
isImage = nextIsImage;
|
|
|
|
|
|
|
|
|
|
currentMediaIndex = nextMediaIndex;
|
|
|
|
|
nextMediaIndex = (nextMediaIndex + 1) % mediaPaths.size();
|
|
|
|
|
|
|
|
|
|
offsetX = 0.f; offsetY = 0.f;
|
|
|
|
|
movingForwardLocX = true; movingDownLocY = true;
|
|
|
|
|
|
|
|
|
|
nextOffsetX = 0.f; nextOffsetY = 0.f;
|
|
|
|
|
nextMovingForwardX = true; nextMovingDownY = true;
|
|
|
|
|
|
|
|
|
|
mediaStartTime = std::chrono::steady_clock::now();
|
|
|
|
|
|
|
|
|
|
isFading = false;
|
|
|
|
|
nextMediaReady = false;
|
|
|
|
|
|
|
|
|
|
std::thread([](){
|
|
|
|
|
std::lock_guard<std::mutex> preloadLock(renderMutex);
|
|
|
|
|
nextMediaReady = loadNextMedia();
|
|
|
|
|
if (nextMediaReady) {
|
|
|
|
|
LOGI("Preloaded next media ready");
|
|
|
|
|
} else {
|
|
|
|
|
LOGE("Preload failed");
|
|
|
|
|
}
|
|
|
|
|
}).detach();
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
auto curScaleRes = calculateScale(
|
|
|
|
|
isImage ? (float)imageWidth : (float)videoWidth,
|
|
|
|
|
isImage ? (float)imageHeight : (float)videoHeight,
|
|
|
|
|
(float)buffer.width, (float)buffer.height);
|
|
|
|
|
|
|
|
|
|
updateOffset(offsetX, offsetY,
|
|
|
|
|
movingForwardLocX, movingDownLocY,
|
|
|
|
|
curScaleRes.overflowX, curScaleRes.overflowY);
|
|
|
|
|
|
|
|
|
|
if (isImage) {
|
|
|
|
|
drawToBuffer(buffer, imageData, imageWidth, imageHeight,
|
|
|
|
|
curScaleRes.scale, offsetX, offsetY, 1.f);
|
|
|
|
|
} else {
|
|
|
|
|
renderMedia(buffer, imageData, imageWidth, imageHeight, imageChannels,
|
|
|
|
|
fmt_ctx, codec_ctx, frame, pkt, sws_ctx,
|
|
|
|
|
video_stream_idx, videoWidth, videoHeight, rgbBuffer,
|
|
|
|
|
false,
|
|
|
|
|
curScaleRes.scale, offsetX, offsetY,
|
|
|
|
|
1.f);
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-08-26 13:32:53 +09:00
|
|
|
|
|
|
|
|
ANativeWindow_unlockAndPost(window);
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-26 18:24:06 +09:00
|
|
|
JNIEXPORT jboolean JNICALL
|
|
|
|
|
Java_bums_lunatic_launcher_wall_NativeRenderer_nativeInit(JNIEnv* env, jobject, jobject surface) {
|
|
|
|
|
std::lock_guard<std::mutex> lock(renderMutex);
|
2025-08-26 13:32:53 +09:00
|
|
|
if (window) {
|
|
|
|
|
ANativeWindow_release(window);
|
|
|
|
|
window = nullptr;
|
|
|
|
|
}
|
2025-08-26 18:24:06 +09:00
|
|
|
window = ANativeWindow_fromSurface(env, surface);
|
|
|
|
|
LOGI("Native window initialized");
|
|
|
|
|
return true;
|
2025-08-26 13:32:53 +09:00
|
|
|
}
|
2025-08-26 18:24:06 +09:00
|
|
|
|
|
|
|
|
JNIEXPORT void JNICALL
|
|
|
|
|
Java_bums_lunatic_launcher_wall_NativeRenderer_nativeDestroy(JNIEnv* env, jobject) {
|
|
|
|
|
std::lock_guard<std::mutex> lock(renderMutex);
|
|
|
|
|
releaseMediaData(isImage, &imageData, &fmt_ctx, &codec_ctx, &frame, &pkt, &sws_ctx, &rgbBuffer);
|
|
|
|
|
releaseMediaData(nextIsImage, &next_imageData, &next_fmt_ctx, &next_codec_ctx, &next_frame, &next_pkt, &next_sws_ctx, &next_rgbBuffer);
|
|
|
|
|
|
|
|
|
|
if (window) {
|
|
|
|
|
ANativeWindow_release(window);
|
|
|
|
|
window = nullptr;
|
|
|
|
|
}
|
|
|
|
|
LOGI("Native window released");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} // extern "C"
|