This commit is contained in:
lunaticbum 2026-04-09 17:16:23 +09:00
parent 5e7c121cf8
commit 1da8d5bb62
11 changed files with 800 additions and 24 deletions

View File

@ -106,6 +106,15 @@
</intent-filter> </intent-filter>
</activity> </activity>
<activity
android:name=".player.PlayerActivity"
android:launchMode="singleInstance"
android:configChanges="orientation|screenSize|screenLayout|smallestScreenSize"
android:screenOrientation="sensor"
android:exported="false">
</activity>
<activity <activity
android:name=".LauncherActivity" android:name=".LauncherActivity"
android:theme="@style/Theme.LunarLauncher.Starting" android:theme="@style/Theme.LunarLauncher.Starting"

View File

@ -39,6 +39,8 @@ add_library(native_renderer SHARED
Renderer.cpp Renderer.cpp
Preloader.cpp Preloader.cpp
MediaAsset.cpp MediaAsset.cpp
native_player.cpp
PlayerEngine.cpp
) )
@ -55,6 +57,8 @@ target_link_libraries(native_renderer
swscale swscale
swresample swresample
jnigraphics jnigraphics
swresample # 💡 !
aaudio # 💡 !
# #
${log-lib} ${log-lib}
${android-lib} ${android-lib}

View File

@ -0,0 +1,273 @@
#include "PlayerEngine.h"
#include <android/log.h>
#include <chrono>
#include <unistd.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <errno.h>
#define LOG_TAG "NativePlayerEngine"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
// [1. 커스텀 I/O 함수]
static int custom_read_packet(void *opaque, uint8_t *buf, int buf_size) {
int fd = (int)(intptr_t)opaque;
int ret = read(fd, buf, buf_size);
if (ret == 0) return AVERROR_EOF;
if (ret < 0) return AVERROR(errno);
return ret;
}
static int64_t custom_seek_packet(void *opaque, int64_t offset, int whence) {
int fd = (int)(intptr_t)opaque;
if (whence == AVSEEK_SIZE) {
struct stat st;
if (fstat(fd, &st) == 0) return st.st_size;
return AVERROR(errno);
}
whence &= ~AVSEEK_FORCE;
int64_t ret = lseek(fd, offset, whence);
return ret < 0 ? AVERROR(errno) : ret;
}
// [2. 생성자 및 초기화]
PlayerEngine::PlayerEngine(JavaVM* vm, jobject listenerObj) : jvm_(vm) {
JNIEnv* env;
jvm_->GetEnv((void**)&env, JNI_VERSION_1_6);
listenerObj_ = env->NewGlobalRef(listenerObj);
jclass clazz = env->GetObjectClass(listenerObj_);
auto getMethod = [&](const char* name, const char* sig) -> jmethodID {
jmethodID id = env->GetMethodID(clazz, name, sig);
if (env->ExceptionCheck()) {
env->ExceptionClear();
LOGE("❌ Failed to find method: %s %s", name, sig);
return nullptr;
}
return id;
};
subtitleMethodId_ = getMethod("onSubtitleTextDecoded", "(Ljava/lang/String;)V");
videoSizeMethodId_ = getMethod("onVideoSizeChanged", "(II)V");
}
PlayerEngine::~PlayerEngine() {
stop();
if (listenerObj_) {
JNIEnv* env;
jvm_->GetEnv((void**)&env, JNI_VERSION_1_6);
env->DeleteGlobalRef(listenerObj_);
}
}
void PlayerEngine::setDataSource(int videoFd, const std::string& subtitlePath) {
videoFd_ = videoFd;
subtitlePath_ = subtitlePath;
}
void PlayerEngine::play(ANativeWindow* window) {
if (isPlaying_) {
isPaused_ = false; // 💡 이미 재생 중이면 일시정지만 해제
return;
}
window_ = window;
ANativeWindow_acquire(window_);
isPlaying_ = true;
renderThread_ = std::thread(&PlayerEngine::renderLoop, this);
}
void PlayerEngine::stop() {
if (!isPlaying_) return;
isPlaying_ = false;
if (renderThread_.joinable()) renderThread_.join();
if (window_) { ANativeWindow_release(window_); window_ = nullptr; }
}
void PlayerEngine::seekBy(double seconds) {
seekTargetOffset_ = seconds;
seekReq_ = true;
}
void PlayerEngine::setSpeed(float speed) {
playbackSpeed_ = speed > 0.0f ? speed : 1.0f;
}
void PlayerEngine::sendSubtitleToKotlin(const char* text) {
if (!jvm_ || !listenerObj_ || !subtitleMethodId_ || !text) return;
JNIEnv* env;
bool attached = false;
if (jvm_->GetEnv((void**)&env, JNI_VERSION_1_6) != JNI_OK) {
jvm_->AttachCurrentThread(&env, nullptr);
attached = true;
}
jstring jText = env->NewStringUTF(text);
env->CallVoidMethod(listenerObj_, subtitleMethodId_, jText);
env->DeleteLocalRef(jText);
if (attached) jvm_->DetachCurrentThread();
}
void PlayerEngine::pause() {
isPaused_ = true;
}
// [3. 메인 렌더링 루프]
void PlayerEngine::renderLoop() {
LOGI("Player render loop started (AAudio Mode)");
int avio_buffer_size = 32768;
uint8_t* avio_buffer = (uint8_t*)av_malloc(avio_buffer_size);
AVIOContext* avio_ctx = avio_alloc_context(avio_buffer, avio_buffer_size, 0, (void*)(intptr_t)videoFd_, custom_read_packet, nullptr, custom_seek_packet);
fmt_ctx_ = avformat_alloc_context();
fmt_ctx_->pb = avio_ctx;
if (avformat_open_input(&fmt_ctx_, nullptr, nullptr, nullptr) < 0) return;
avformat_find_stream_info(fmt_ctx_, nullptr);
for (unsigned int i = 0; i < fmt_ctx_->nb_streams; i++) {
auto type = fmt_ctx_->streams[i]->codecpar->codec_type;
if (type == AVMEDIA_TYPE_VIDEO && video_stream_idx_ < 0) video_stream_idx_ = i;
else if (type == AVMEDIA_TYPE_SUBTITLE && sub_stream_idx_ < 0) sub_stream_idx_ = i;
else if (type == AVMEDIA_TYPE_AUDIO && audio_stream_idx_ < 0) audio_stream_idx_ = i;
}
// 비디오 초기화
if (video_stream_idx_ >= 0) {
auto par = fmt_ctx_->streams[video_stream_idx_]->codecpar;
auto codec = avcodec_find_decoder(par->codec_id);
video_codec_ctx_ = avcodec_alloc_context3(codec);
avcodec_parameters_to_context(video_codec_ctx_, par);
if (avcodec_open2(video_codec_ctx_, codec, nullptr) == 0) {
JNIEnv* env; jvm_->AttachCurrentThread(&env, nullptr);
if (videoSizeMethodId_) env->CallVoidMethod(listenerObj_, videoSizeMethodId_, video_codec_ctx_->width, video_codec_ctx_->height);
jvm_->DetachCurrentThread();
}
}
// 💡 [AAudio 초기화 복구]
if (audio_stream_idx_ >= 0) {
auto par = fmt_ctx_->streams[audio_stream_idx_]->codecpar;
auto codec = avcodec_find_decoder(par->codec_id);
audio_codec_ctx_ = avcodec_alloc_context3(codec);
avcodec_parameters_to_context(audio_codec_ctx_, par);
if (avcodec_open2(audio_codec_ctx_, codec, nullptr) == 0) {
AVChannelLayout out_ch; av_channel_layout_default(&out_ch, 2);
swr_alloc_set_opts2(&swr_ctx_, &out_ch, AV_SAMPLE_FMT_S16, 48000, &audio_codec_ctx_->ch_layout, audio_codec_ctx_->sample_fmt, audio_codec_ctx_->sample_rate, 0, nullptr);
swr_init(swr_ctx_);
AAudioStreamBuilder* builder;
AAudio_createStreamBuilder(&builder);
AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_I16);
AAudioStreamBuilder_setChannelCount(builder, 2);
AAudioStreamBuilder_setSampleRate(builder, 48000);
AAudioStreamBuilder_openStream(builder, &audio_stream_);
AAudioStream_requestStart(audio_stream_);
AAudioStreamBuilder_delete(builder);
}
}
AVFrame* frame = av_frame_alloc();
AVPacket* pkt = av_packet_alloc();
int last_win_w = 0, last_win_h = 0;
while (isPlaying_) {
if (isPaused_) {
std::this_thread::sleep_for(std::chrono::milliseconds(10));
continue; // 💡 일시정지 중이면 루프를 돌며 대기만 함
}
// [Seek 요청]
if (seekReq_) {
av_seek_frame(fmt_ctx_, -1, (currentPosSec_ + seekTargetOffset_) * AV_TIME_BASE, AVSEEK_FLAG_BACKWARD);
if (video_codec_ctx_) avcodec_flush_buffers(video_codec_ctx_);
if (audio_codec_ctx_) avcodec_flush_buffers(audio_codec_ctx_);
if (audio_stream_) { AAudioStream_requestFlush(audio_stream_); AAudioStream_requestStart(audio_stream_); }
seekReq_ = false;
}
float currentSpeed = playbackSpeed_.load();
if (av_read_frame(fmt_ctx_, pkt) < 0) break;
// [비디오 처리]
if (pkt->stream_index == video_stream_idx_) {
avcodec_send_packet(video_codec_ctx_, pkt);
while (avcodec_receive_frame(video_codec_ctx_, frame) == 0) {
currentPosSec_ = frame->pts * av_q2d(fmt_ctx_->streams[video_stream_idx_]->time_base);
if (window_) {
int w = ANativeWindow_getWidth(window_), h = ANativeWindow_getHeight(window_);
if (!sws_ctx_ || w != last_win_w || h != last_win_h) {
if (sws_ctx_) sws_freeContext(sws_ctx_);
sws_ctx_ = sws_getContext(frame->width, frame->height, video_codec_ctx_->pix_fmt, w, h, AV_PIX_FMT_RGBA, SWS_BILINEAR, nullptr, nullptr, nullptr);
last_win_w = w; last_win_h = h;
}
ANativeWindow_Buffer buffer;
if (ANativeWindow_lock(window_, &buffer, nullptr) == 0) {
uint8_t* dst_data[4] = { (uint8_t*)buffer.bits, nullptr, nullptr, nullptr };
int dst_line[4] = { buffer.stride * 4, 0, 0, 0 };
if (sws_ctx_) sws_scale(sws_ctx_, frame->data, frame->linesize, 0, frame->height, dst_data, dst_line);
ANativeWindow_unlockAndPost(window_);
}
}
}
// 💡 배속 중이거나 오디오가 없으면 Sleep으로 속도 직접 조절
if (currentSpeed != 1.0f || audio_stream_idx_ < 0) {
std::this_thread::sleep_for(std::chrono::milliseconds((int)(16/currentSpeed)));
}
}
// [오디오 처리]
else if (pkt->stream_index == audio_stream_idx_) {
if (currentSpeed == 1.0f) { // 정배속일 때만 재생
avcodec_send_packet(audio_codec_ctx_, pkt);
while (avcodec_receive_frame(audio_codec_ctx_, frame) == 0) {
if (swr_ctx_ && audio_stream_) {
// 1. 출력될 샘플 수 계산
int out_samples = swr_get_out_samples(swr_ctx_, frame->nb_samples);
// 2. 버퍼 크기는 바이트 단위 (샘플 수 * 2채널 * 2바이트(16비트))
uint8_t* out_buf = (uint8_t*)malloc(out_samples * 4);
// 3. 변환 수행 (실제로 변환된 정확한 샘플 수를 반환받음)
int converted_samples = swr_convert(swr_ctx_, &out_buf, out_samples,
(const uint8_t**)frame->data, frame->nb_samples);
if (converted_samples > 0) {
// 💡 [핵심 수정] out_size(바이트) 대신 converted_samples(샘플 수)를 전달해야 함
// 세 번째 인자는 'samples' 단위여야 합니다.
AAudioStream_write(audio_stream_, out_buf, converted_samples, 1000000000);
}
free(out_buf);
}
}
}
} else if (pkt->stream_index == sub_stream_idx_) {
AVSubtitle sub;
int got_sub = 0;
// 자막 패킷 디코딩
avcodec_decode_subtitle2(sub_codec_ctx_, &sub, &got_sub, pkt);
if (got_sub) {
for (unsigned int i = 0; i < sub.num_rects; i++) {
// 일반 텍스트 자막 또는 ASS/SSA 스타일 자막 처리
if (sub.rects[i]->type == SUBTITLE_TEXT && sub.rects[i]->text) {
sendSubtitleToKotlin(sub.rects[i]->text);
} else if (sub.rects[i]->type == SUBTITLE_ASS && sub.rects[i]->ass) {
// ASS 자막의 경우 마크업 태그가 포함될 수 있음
sendSubtitleToKotlin(sub.rects[i]->ass);
}
}
avsubtitle_free(&sub); // 메모리 해제 필수
}
}
av_packet_unref(pkt);
}
// 자원 해제
if (audio_stream_) { AAudioStream_requestStop(audio_stream_); AAudioStream_close(audio_stream_); audio_stream_ = nullptr; }
av_frame_free(&frame);
av_packet_free(&pkt);
if (swr_ctx_) swr_free(&swr_ctx_);
if (sws_ctx_) sws_freeContext(sws_ctx_);
if (video_codec_ctx_) avcodec_free_context(&video_codec_ctx_);
if (audio_codec_ctx_) avcodec_free_context(&audio_codec_ctx_);
if (fmt_ctx_) avformat_close_input(&fmt_ctx_);
LOGI("Player loop finished gracefully.");
}

View File

@ -0,0 +1,64 @@
#pragma once
#include <string>
#include <thread>
#include <atomic>
#include <jni.h>
#include <android/native_window.h>
#include <aaudio/AAudio.h> // 💡 AAudio 추가
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include <libavutil/opt.h>
#include <libavutil/channel_layout.h>
}
class PlayerEngine {
public:
PlayerEngine(JavaVM* vm, jobject listenerObj);
~PlayerEngine();
void setDataSource(int videoFd, const std::string& subtitlePath);
void play(ANativeWindow* window);
void pause();
void stop();
void seekBy(double seconds);
void setSpeed(float speed);
private:
void renderLoop();
void sendSubtitleToKotlin(const char* text);
int videoFd_ = -1;
std::string subtitlePath_;
std::atomic<bool> isPlaying_{false};
std::atomic<bool> isPaused_{false}; // 💡 일시정지 플래그 추가
std::thread renderThread_;
ANativeWindow* window_ = nullptr;
std::atomic<bool> seekReq_{false};
std::atomic<double> seekTargetOffset_{0.0};
std::atomic<float> playbackSpeed_{1.0f};
double currentPosSec_ = 0.0;
AVFormatContext* fmt_ctx_ = nullptr;
AVCodecContext* video_codec_ctx_ = nullptr;
AVCodecContext* sub_codec_ctx_ = nullptr;
AVCodecContext* audio_codec_ctx_ = nullptr;
SwsContext* sws_ctx_ = nullptr;
SwrContext* swr_ctx_ = nullptr;
int video_stream_idx_ = -1;
int sub_stream_idx_ = -1;
int audio_stream_idx_ = -1;
AAudioStream* audio_stream_ = nullptr; // 💡 AAudio 복구
JavaVM* jvm_ = nullptr;
jobject listenerObj_ = nullptr;
jmethodID subtitleMethodId_ = nullptr;
jmethodID videoSizeMethodId_ = nullptr;
};

View File

@ -0,0 +1,85 @@
//
// Created by JIBUM HAN on 2026. 4. 9..
//
#include <jni.h>
#include <android/native_window_jni.h>
#include "PlayerEngine.h"
// 기존 월페이퍼 코드 어딘가에 있는 전역 JavaVM 포인터를 가져다 씁니다.
extern JavaVM* g_vm;
// C++ 객체를 포인터로 변환하는 헬퍼 함수
template<typename T>
T* toPlayerNative(jlong handle) {
return reinterpret_cast<T*>(handle);
}
extern "C" {
JNIEXPORT jlong JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeInit(JNIEnv *env, jobject thiz) {
// 엔진 생성 시 JavaVM과 Kotlin 콜백 객체(thiz)를 전달합니다.
PlayerEngine* engine = new PlayerEngine(g_vm, thiz);
return reinterpret_cast<jlong>(engine);
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeSeekBy(JNIEnv *env, jobject thiz, jlong handle, jdouble seconds) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) engine->seekBy(seconds);
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeSetSpeed(JNIEnv *env, jobject thiz, jlong handle, jfloat speed) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) engine->setSpeed(speed);
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeSetDataSource(JNIEnv *env, jobject thiz, jlong handle, jint videoFd, jstring jSubPath) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) {
const char* subPath = env->GetStringUTFChars(jSubPath, nullptr);
// 엔진으로 FD 번호를 전달
engine->setDataSource(videoFd, subPath);
env->ReleaseStringUTFChars(jSubPath, subPath);
}
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativePause(JNIEnv *env, jobject thiz, jlong handle) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) {
engine->pause(); // 💡 세미콜론 추가
}
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativePlay(JNIEnv *env, jobject thiz, jlong handle, jobject surface) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine && surface) {
ANativeWindow* window = ANativeWindow_fromSurface(env, surface);
// RGBA 포맷으로 버퍼 설정
ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBX_8888);
engine->play(window);
ANativeWindow_release(window);
}
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeStop(JNIEnv *env, jobject thiz, jlong handle) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) engine->stop();
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeDestroy(JNIEnv *env, jobject thiz, jlong handle) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) delete engine;
}
}

View File

@ -21,13 +21,13 @@ import android.widget.Spinner
import android.widget.TextView import android.widget.TextView
import android.widget.Toast import android.widget.Toast
import androidx.activity.OnBackPressedCallback import androidx.activity.OnBackPressedCallback
import androidx.appcompat.app.AlertDialog
import androidx.core.content.FileProvider import androidx.core.content.FileProvider
import androidx.fragment.app.Fragment import androidx.fragment.app.Fragment
import androidx.recyclerview.widget.GridLayoutManager import androidx.recyclerview.widget.GridLayoutManager
import androidx.recyclerview.widget.LinearLayoutManager import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView import androidx.recyclerview.widget.RecyclerView
import bums.lunatic.launcher.R import bums.lunatic.launcher.R
import bums.lunatic.launcher.player.PlayerActivity
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
@ -166,7 +166,14 @@ class CompletedFilesFragment : Fragment() {
currentDir = file currentDir = file
loadFiles() loadFiles()
} else { } else {
openPrivateFile(requireContext(), file) if (extVideos.contains(file.extension.lowercase())) {
val intent = Intent(requireContext(), PlayerActivity::class.java).apply {
putExtra("VIDEO_PATH", file.absolutePath)
}
startActivity(intent)
} else {
openPrivateFile(requireContext(), file) // 이미지나 문서는 기존처럼
}
} }
} }
}, },

View File

@ -339,12 +339,10 @@ open class GeckoWeb @JvmOverloads constructor(
if (element.type == GeckoSession.ContentDelegate.ContextElement.TYPE_IMAGE || if (element.type == GeckoSession.ContentDelegate.ContextElement.TYPE_IMAGE ||
element.type == GeckoSession.ContentDelegate.ContextElement.TYPE_VIDEO) { element.type == GeckoSession.ContentDelegate.ContextElement.TYPE_VIDEO) {
showContextMenu(pageUrl, mediaUrl)
// 로그인 확인 후 메뉴 표시 // 로그인 확인 후 메뉴 표시
BookmarkUploader.loginAndGetToken(
userId = "lunaticbum", userPw = "VioPup*383",
onSuccess = { showContextMenu(pageUrl, mediaUrl); context.toast("로그인 성공") },
onError = { context.toast("로그인 실패: $it") }
)
} else { } else {
super.onContextMenu(session, screenX, screenY, element) super.onContextMenu(session, screenX, screenY, element)
} }
@ -782,8 +780,18 @@ open class GeckoWeb @JvmOverloads constructor(
val menuItems = arrayOf("이 미디어만 북마크", "페이지의 모든 이미지 북마크", "다운로드") val menuItems = arrayOf("이 미디어만 북마크", "페이지의 모든 이미지 북마크", "다운로드")
AlertDialog.Builder(context).setTitle("작업 선택").setItems(menuItems) { _, which -> AlertDialog.Builder(context).setTitle("작업 선택").setItems(menuItems) { _, which ->
when (menuItems[which]) { when (menuItems[which]) {
"이 미디어만 북마크" -> startBookmarkSaveProcessForSingleImage(pageUrl, mediaUrl) "이 미디어만 북마크" ->
"페이지의 모든 이미지 북마크" -> sendJsonMsg("fetchAllImages", "targetSrc" to mediaUrl) BookmarkUploader.loginAndGetToken(
userId = "lunaticbum", userPw = "VioPup*383",
onSuccess = { context.toast("로그인 성공");startBookmarkSaveProcessForSingleImage(pageUrl, mediaUrl) },
onError = { context.toast("로그인 실패: $it") }
)
"페이지의 모든 이미지 북마크" ->
BookmarkUploader.loginAndGetToken(
userId = "lunaticbum", userPw = "VioPup*383",
onSuccess = { context.toast("로그인 성공");sendJsonMsg("fetchAllImages", "targetSrc" to mediaUrl) },
onError = { context.toast("로그인 실패: $it") }
)
"다운로드" -> CommonUtils.downloadFileWithOkHttp(context, Uri.parse(pageUrl), mediaUrl) "다운로드" -> CommonUtils.downloadFileWithOkHttp(context, Uri.parse(pageUrl), mediaUrl)
} }
}.show() }.show()

View File

@ -617,10 +617,10 @@ class TokiFragment : RemoteGestureFragment(), PagedTextViewInterface,KeyEventHan
} }
} }
private var originalVolume: Int = -1 // private var originalVolume: Int = -1
private val audioManager by lazy { // private val audioManager by lazy {
requireContext().getSystemService(Context.AUDIO_SERVICE) as AudioManager // requireContext().getSystemService(Context.AUDIO_SERVICE) as AudioManager
} // }
override fun onHiddenChanged(hidden: Boolean) { override fun onHiddenChanged(hidden: Boolean) {
super.onHiddenChanged(hidden) super.onHiddenChanged(hidden)
saveContinuation = false saveContinuation = false
@ -836,21 +836,21 @@ class TokiFragment : RemoteGestureFragment(), PagedTextViewInterface,KeyEventHan
override fun onResume() { override fun onResume() {
super.onResume() super.onResume()
originalVolume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC) // originalVolume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC)
if (contentsType.contains("youtube")) { // if (contentsType.contains("youtube")) {
// 미디어 볼륨을 0으로 설정 (FLAG_SHOW_UI를 0으로 주면 볼륨 바가 뜨지 않음) // // 미디어 볼륨을 0으로 설정 (FLAG_SHOW_UI를 0으로 주면 볼륨 바가 뜨지 않음)
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0) // audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0)
} // }
//
Blog.LOGE("RssHome 활성화: 미디어 볼륨 0 설정 (이전 볼륨: $originalVolume)") // Blog.LOGE("RssHome 활성화: 미디어 볼륨 0 설정 (이전 볼륨: $originalVolume)")
} }
override fun onPause() { override fun onPause() {
super.onPause() super.onPause()
if (originalVolume != -1 && contentsType.contains("youtube")) { // if (originalVolume != -1 && contentsType.contains("youtube")) {
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, originalVolume, 0) // audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, originalVolume, 0)
Blog.LOGE("RssHome 비활성화: 미디어 볼륨 복구 ($originalVolume)") // Blog.LOGE("RssHome 비활성화: 미디어 볼륨 복구 ($originalVolume)")
} // }
} }
fun getLastinfo() : LastInfo? { fun getLastinfo() : LastInfo? {

View File

@ -0,0 +1,53 @@
package bums.lunatic.launcher.player
import android.view.Surface
class NativePlayer {
private var nativeHandle: Long = 0
private var subtitleCallback: ((String) -> Unit)? = null
private var videoSizeCallback: ((Int, Int) -> Unit)? = null
fun initialize(): Boolean {
nativeHandle = nativeInit()
return nativeHandle != 0L
}
fun setDataSource(videoFd: Int, subPath: String) = nativeSetDataSource(nativeHandle, videoFd, subPath)
fun play(surface: Surface) = nativePlay(nativeHandle, surface)
fun stop() = nativeStop(nativeHandle)
fun seekBy(sec: Double) = nativeSeekBy(nativeHandle, sec)
fun setSpeed(speed: Float) = nativeSetSpeed(nativeHandle, speed)
fun pause() {nativePause(nativeHandle)}
@Suppress("unused")
private fun onSubtitleTextDecoded(text: String) {
subtitleCallback?.invoke(text)
}
@Suppress("unused")
private fun onVideoSizeChanged(w: Int, h: Int) {
videoSizeCallback?.invoke(w, h)
}
fun setSubtitleCallback(cb: (String) -> Unit) { subtitleCallback = cb }
fun setVideoSizeCallback(cb: (Int, Int) -> Unit) { videoSizeCallback = cb }
fun destroy() {
if (nativeHandle != 0L) {
nativeDestroy(nativeHandle)
nativeHandle = 0L
}
}
private external fun nativePause(h: Long)
private external fun nativeInit(): Long
private external fun nativeSetDataSource(h: Long, fd: Int, sub: String)
private external fun nativePlay(h: Long, s: Surface)
private external fun nativeStop(h: Long)
private external fun nativeDestroy(h: Long)
private external fun nativeSeekBy(h: Long, s: Double)
private external fun nativeSetSpeed(h: Long, sp: Float)
companion object {
init { System.loadLibrary("native_renderer") }
}
}

View File

@ -0,0 +1,267 @@
package bums.lunatic.launcher.player
import android.content.pm.ActivityInfo
import android.graphics.Color
import android.graphics.SurfaceTexture
import android.os.Bundle
import android.os.ParcelFileDescriptor
import android.util.Log
import android.view.*
import android.widget.FrameLayout
import android.widget.ImageButton
import android.widget.TextView
import androidx.appcompat.app.AppCompatActivity
import bums.lunatic.launcher.R
import kotlinx.coroutines.*
import java.io.File
class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
private lateinit var videoTextureView: TextureView
private lateinit var subtitleView: TextView
private lateinit var btnRotate: ImageButton
private lateinit var btnHideVideo: ImageButton
private var nativePlayer: NativePlayer? = null
private var videoPath: String = ""
private var subtitlePath: String = ""
private var isPlaying = true
private var isVideoHidden = false
private var leftLongPressJob: Job? = null
private var videoWidth: Int = 0
private var videoHeight: Int = 0
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// 1. 데이터 확인
videoPath = intent.getStringExtra("VIDEO_PATH") ?: ""
if (videoPath.isEmpty()) { finish(); return }
subtitlePath = findSubtitleFile(videoPath)
// 2. UI 동적 생성 및 구성
setupUI()
// 3. 네이티브 플레이어 초기화
nativePlayer = NativePlayer().apply {
initialize()
// 영상 해상도에 따른 화면 크기 조절 콜백
setVideoSizeCallback { width, height ->
videoWidth = width
videoHeight = height
adjustVideoAspectRatio(width, height)
}
// 자막 출력 콜백
setSubtitleCallback { text ->
runOnUiThread {
subtitleView.text = cleanSubtitleText(text)
subtitleView.visibility = if (text.isEmpty()) View.INVISIBLE else View.VISIBLE
}
}
}
// 4. 제스처 설정
setupGestures()
}
private fun setupUI() {
val root = FrameLayout(this).apply { setBackgroundColor(Color.BLACK) }
// 비디오 뷰
videoTextureView = TextureView(this).apply {
surfaceTextureListener = this@PlayerActivity
}
// 자막 뷰
subtitleView = TextView(this).apply {
setTextColor(Color.WHITE)
textSize = 22f
setShadowLayer(8f, 0f, 0f, Color.BLACK)
gravity = Gravity.CENTER_HORIZONTAL or Gravity.BOTTOM
setPadding(40, 0, 40, 180)
}
// 제스처 감지용 투명 레이어 (삼등분)
val gestureLayer = android.widget.LinearLayout(this).apply {
orientation = android.widget.LinearLayout.HORIZONTAL
weightSum = 3f
}
val leftZone = View(this).apply { id = View.generateViewId() }
val centerZone = View(this).apply { id = View.generateViewId() }
val rightZone = View(this).apply { id = View.generateViewId() }
gestureLayer.addView(leftZone, android.widget.LinearLayout.LayoutParams(0, -1, 1f))
gestureLayer.addView(centerZone, android.widget.LinearLayout.LayoutParams(0, -1, 1f))
gestureLayer.addView(rightZone, android.widget.LinearLayout.LayoutParams(0, -1, 1f))
// 컨트롤 버튼 (좌측 하단 회전, 우측 하단 숨기기)
val controls = FrameLayout(this)
btnRotate = ImageButton(this).apply {
setImageResource(android.R.drawable.ic_menu_rotate) // 기본 리소스 사용
setBackgroundColor(Color.TRANSPARENT)
setOnClickListener { toggleOrientation() }
}
btnHideVideo = ImageButton(this).apply {
setImageResource(android.R.drawable.ic_menu_close_clear_cancel)
setBackgroundColor(Color.TRANSPARENT)
setOnClickListener { toggleVideoVisibility() }
}
root.addView(videoTextureView, FrameLayout.LayoutParams(-2, -2, Gravity.CENTER))
root.addView(subtitleView)
root.addView(gestureLayer)
// 버튼 배치
root.addView(btnRotate, FrameLayout.LayoutParams(150, 150, Gravity.BOTTOM or Gravity.START).apply { setMargins(30,0,0,30) })
root.addView(btnHideVideo, FrameLayout.LayoutParams(150, 150, Gravity.BOTTOM or Gravity.END).apply { setMargins(0,0,30,30) })
setContentView(root)
hideSystemUI()
}
private fun setupGestures() {
val gestureLayer = (videoTextureView.parent as FrameLayout).getChildAt(2) as android.widget.LinearLayout
val left = gestureLayer.getChildAt(0)
val center = gestureLayer.getChildAt(1)
val right = gestureLayer.getChildAt(2)
// 1. 가운데: 재생/일시정지
center.setOnClickListener {
isPlaying = !isPlaying
if (isPlaying) nativePlayer?.play(Surface(videoTextureView.surfaceTexture))
else nativePlayer?.pause()
}
// 2. 오른쪽: 롱프레스 4배속
val rightDetector = GestureDetector(this, object : GestureDetector.SimpleOnGestureListener() {
override fun onLongPress(e: MotionEvent) { nativePlayer?.setSpeed(4.0f) }
override fun onSingleTapUp(e: MotionEvent): Boolean {
nativePlayer?.seekBy(20.0) // 탭하면 10초 앞으로
return true
}
})
right.setOnTouchListener { v, event ->
rightDetector.onTouchEvent(event)
if (event.action == MotionEvent.ACTION_UP || event.action == MotionEvent.ACTION_CANCEL) {
nativePlayer?.setSpeed(1.0f)
}
true
}
// 3. 왼쪽: 롱프레스 주기적 뒤로가기
val leftDetector = GestureDetector(this, object : GestureDetector.SimpleOnGestureListener() {
override fun onLongPress(e: MotionEvent) {
leftLongPressJob = CoroutineScope(Dispatchers.Main).launch {
while (isActive) {
nativePlayer?.seekBy(-20.0)
delay(500)
}
}
}
override fun onSingleTapUp(e: MotionEvent): Boolean {
nativePlayer?.seekBy(-10.0) // 탭하면 10초 뒤로
return true
}
})
left.setOnTouchListener { v, event ->
leftDetector.onTouchEvent(event)
if (event.action == MotionEvent.ACTION_UP || event.action == MotionEvent.ACTION_CANCEL) {
leftLongPressJob?.cancel()
}
true
}
}
override fun onConfigurationChanged(newConfig: android.content.res.Configuration) {
super.onConfigurationChanged(newConfig)
// 시스템 UI(상태바 등)를 다시 숨깁니다.
hideSystemUI()
// 저장해둔 영상 해상도가 있다면 현재 바뀐 화면 크기에 맞춰 다시 정렬합니다.
if (videoWidth > 0 && videoHeight > 0) {
adjustVideoAspectRatio(videoWidth, videoHeight)
}
}
private fun adjustVideoAspectRatio(videoW: Int, videoH: Int) {
runOnUiThread {
// 1. 현재 기기의 실제 가용 화면 크기를 가져옵니다.
val displayMetrics = resources.displayMetrics
val screenW = displayMetrics.widthPixels
val screenH = displayMetrics.heightPixels
// 2. 종횡비를 계산합니다.
val videoRatio = videoW.toFloat() / videoH.toFloat()
val screenRatio = screenW.toFloat() / screenH.toFloat()
val lp = videoTextureView.layoutParams as FrameLayout.LayoutParams
if (videoRatio > screenRatio) {
// 💡 영상이 화면보다 더 가로로 긴 경우 (가로를 꽉 채우고 세로는 비율대로 축소)
lp.width = screenW
lp.height = (screenW / videoRatio).toInt()
} else {
// 💡 영상이 화면보다 더 세로로 긴 경우 (세로를 꽉 채우고 가로는 비율대로 축소)
lp.width = (screenH * videoRatio).toInt()
lp.height = screenH
}
// 3. 뷰를 화면 정중앙에 배치합니다.
lp.gravity = Gravity.CENTER
videoTextureView.layoutParams = lp
Log.d("Player", "Video Resized: ${lp.width}x${lp.height} for Screen: ${screenW}x${screenH}")
}
}
private fun toggleOrientation() {
requestedOrientation = if (requestedOrientation == ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE)
ActivityInfo.SCREEN_ORIENTATION_PORTRAIT else ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
}
private fun toggleVideoVisibility() {
isVideoHidden = !isVideoHidden
videoTextureView.visibility = if (isVideoHidden) View.INVISIBLE else View.VISIBLE
}
private fun findSubtitleFile(videoPath: String): String {
val file = File(videoPath)
val name = file.nameWithoutExtension
val extensions = listOf("srt", "ass", "smi")
for (ext in extensions) {
val sub = File(file.parent, "$name.$ext")
if (sub.exists()) return sub.absolutePath
}
return ""
}
private fun cleanSubtitleText(text: String): String = text.replace(Regex("\\{.*?\\}"), "")
override fun onSurfaceTextureAvailable(st: SurfaceTexture, w: Int, h: Int) {
val file = File(videoPath)
if (file.exists()) {
val pfd = ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY)
nativePlayer?.setDataSource(pfd.detachFd(), subtitlePath)
nativePlayer?.play(Surface(st))
}
}
private fun hideSystemUI() {
window.decorView.systemUiVisibility = (View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
or View.SYSTEM_UI_FLAG_FULLSCREEN or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION)
}
override fun onSurfaceTextureSizeChanged(st: SurfaceTexture, w: Int, h: Int) {}
override fun onSurfaceTextureDestroyed(st: SurfaceTexture): Boolean { nativePlayer?.stop(); return true }
override fun onSurfaceTextureUpdated(st: SurfaceTexture) {}
override fun onDestroy() {
super.onDestroy()
nativePlayer?.destroy()
leftLongPressJob?.cancel()
}
}

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
</androidx.constraintlayout.widget.ConstraintLayout>