This commit is contained in:
lunaticbum 2026-04-10 15:31:07 +09:00
parent 964655af7f
commit 162d2b7563
18 changed files with 1151 additions and 195 deletions

View File

@ -229,6 +229,11 @@ dependencies {
// (선택) CameraX 확장 라이브러리 (보케, HDR 등)
implementation("androidx.camera:camera-extensions:$camerax_version")
implementation("com.google.mlkit:translate:17.0.3") // 버전 살짝 업그레이드 권장
implementation("com.google.mlkit:language-id:17.0.5")
implementation("com.google.android.gms:play-services-base:18.5.0")
implementation("com.google.mlkit:common:18.11.0")
constraints {
// ⚠️ 이 버전을 프로젝트 루트의 build.gradle.kts에 정의된 kotlinVersion 값과 정확히 일치시키세요.
val targetKotlinVersion = "2.0.20"

View File

@ -62,6 +62,14 @@
<uses-permission android:name="android.permission.RECEIVE_SMS" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" android:required="false" />
<queries>
<package android:name="com.google.android.gms" />
<intent>
<action android:name="com.google.android.gms.actions.SEARCH_ACTION" />
</intent>
</queries>
<application
android:name=".LunaticLauncher"
android:icon="@drawable/ic_launcher"
@ -105,9 +113,12 @@
<data android:mimeType="*/*"/>
</intent-filter>
</activity>
<activity
android:name=".home.TranslatorActivity"
android:windowSoftInputMode="adjustResize" />
<activity
android:name=".player.PlayerActivity"
android:theme="@style/Theme.Player"
android:launchMode="singleInstance"
android:configChanges="orientation|screenSize|screenLayout|smallestScreenSize"
android:screenOrientation="sensor"
@ -233,6 +244,8 @@
</service>
<service android:name="bums.lunatic.launcher.workers.LocationUpdateService" />
<meta-data
android:name="com.google.mlkit.vision.DEPENDENCIES"
android:value="ocr,langid,translate" />
</application>
</manifest>

View File

@ -10,7 +10,6 @@
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
// [1. 커스텀 I/O 함수]
static int custom_read_packet(void *opaque, uint8_t *buf, int buf_size) {
int fd = (int)(intptr_t)opaque;
int ret = read(fd, buf, buf_size);
@ -31,14 +30,13 @@ static int64_t custom_seek_packet(void *opaque, int64_t offset, int whence) {
return ret < 0 ? AVERROR(errno) : ret;
}
// [2. 생성자 및 초기화]
PlayerEngine::PlayerEngine(JavaVM* vm, jobject listenerObj) : jvm_(vm) {
JNIEnv* env;
jvm_->GetEnv((void**)&env, JNI_VERSION_1_6);
listenerObj_ = env->NewGlobalRef(listenerObj);
jclass clazz = env->GetObjectClass(listenerObj_);
auto getMethod = [&](const char* name, const char* sig) -> jmethodID {
auto safeGetMethod = [&](const char* name, const char* sig) -> jmethodID {
jmethodID id = env->GetMethodID(clazz, name, sig);
if (env->ExceptionCheck()) {
env->ExceptionClear();
@ -48,73 +46,44 @@ PlayerEngine::PlayerEngine(JavaVM* vm, jobject listenerObj) : jvm_(vm) {
return id;
};
subtitleMethodId_ = getMethod("onSubtitleTextDecoded", "(Ljava/lang/String;)V");
videoSizeMethodId_ = getMethod("onVideoSizeChanged", "(II)V");
subtitleMethodId_ = safeGetMethod("onSubtitleTextDecoded", "(Ljava/lang/String;)V");
videoSizeMethodId_ = safeGetMethod("onVideoSizeChanged", "(II)V");
preparedMethodId_ = safeGetMethod("onNativePrepared", "()V");
errorMethodId_ = safeGetMethod("onNativeError", "(ILjava/lang/String;)V");
}
PlayerEngine::~PlayerEngine() {
stop();
if (prepareThread_.joinable()) prepareThread_.join(); // 준비 스레드 대기
if (listenerObj_) {
JNIEnv* env;
jvm_->GetEnv((void**)&env, JNI_VERSION_1_6);
JNIEnv* env; jvm_->GetEnv((void**)&env, JNI_VERSION_1_6);
env->DeleteGlobalRef(listenerObj_);
}
}
void PlayerEngine::setDataSource(int videoFd, const std::string& subtitlePath) {
void PlayerEngine::setDataSource(int videoFd, int subtitleFd) {
videoFd_ = videoFd;
subtitlePath_ = subtitlePath;
subtitleFd_ = subtitleFd;
}
void PlayerEngine::play(ANativeWindow* window) {
if (isPlaying_) {
isPaused_ = false; // 💡 이미 재생 중이면 일시정지만 해제
return;
}
window_ = window;
ANativeWindow_acquire(window_);
isPlaying_ = true;
renderThread_ = std::thread(&PlayerEngine::renderLoop, this);
void PlayerEngine::prepareAsync() {
if (isPrepared_ || prepareThread_.joinable()) return;
prepareThread_ = std::thread(&PlayerEngine::prepareInternal, this);
}
void PlayerEngine::stop() {
if (!isPlaying_) return;
isPlaying_ = false;
if (renderThread_.joinable()) renderThread_.join();
if (window_) { ANativeWindow_release(window_); window_ = nullptr; }
}
void PlayerEngine::seekBy(double seconds) {
seekTargetOffset_ = seconds;
seekReq_ = true;
}
void PlayerEngine::setSpeed(float speed) {
playbackSpeed_ = speed > 0.0f ? speed : 1.0f;
}
void PlayerEngine::sendSubtitleToKotlin(const char* text) {
if (!jvm_ || !listenerObj_ || !subtitleMethodId_ || !text) return;
void PlayerEngine::prepareInternal() {
JNIEnv* env;
bool attached = false;
if (jvm_->GetEnv((void**)&env, JNI_VERSION_1_6) != JNI_OK) {
jvm_->AttachCurrentThread(&env, nullptr);
attached = true;
}
jstring jText = env->NewStringUTF(text);
env->CallVoidMethod(listenerObj_, subtitleMethodId_, jText);
env->DeleteLocalRef(jText);
if (attached) jvm_->DetachCurrentThread();
}
jvm_->AttachCurrentThread(&env, nullptr);
void PlayerEngine::pause() {
isPaused_ = true;
}
// [3. 메인 렌더링 루프]
void PlayerEngine::renderLoop() {
LOGI("Player render loop started (AAudio Mode)");
auto sendError = [&](int code, const char* msg) {
LOGE("Prepare Error [%d]: %s", code, msg);
if (errorMethodId_) {
jstring jMsg = env->NewStringUTF(msg);
env->CallVoidMethod(listenerObj_, errorMethodId_, code, jMsg);
env->DeleteLocalRef(jMsg);
}
jvm_->DetachCurrentThread();
};
int avio_buffer_size = 32768;
uint8_t* avio_buffer = (uint8_t*)av_malloc(avio_buffer_size);
@ -122,30 +91,38 @@ void PlayerEngine::renderLoop() {
fmt_ctx_ = avformat_alloc_context();
fmt_ctx_->pb = avio_ctx;
if (avformat_open_input(&fmt_ctx_, nullptr, nullptr, nullptr) < 0) return;
avformat_find_stream_info(fmt_ctx_, nullptr);
if (avformat_open_input(&fmt_ctx_, nullptr, nullptr, nullptr) < 0) {
return sendError(-1001, "Failed to open video file. Invalid FD or Format.");
}
if (avformat_find_stream_info(fmt_ctx_, nullptr) < 0) {
return sendError(-1002, "Failed to retrieve stream information.");
}
subtitle_tracks_info_ = "";
for (unsigned int i = 0; i < fmt_ctx_->nb_streams; i++) {
auto type = fmt_ctx_->streams[i]->codecpar->codec_type;
if (type == AVMEDIA_TYPE_VIDEO && video_stream_idx_ < 0) video_stream_idx_ = i;
else if (type == AVMEDIA_TYPE_SUBTITLE && sub_stream_idx_ < 0) sub_stream_idx_ = i;
else if (type == AVMEDIA_TYPE_AUDIO && audio_stream_idx_ < 0) audio_stream_idx_ = i;
else if (type == AVMEDIA_TYPE_SUBTITLE) {
AVDictionaryEntry *lang = av_dict_get(fmt_ctx_->streams[i]->metadata, "language", nullptr, 0);
AVDictionaryEntry *title = av_dict_get(fmt_ctx_->streams[i]->metadata, "title", nullptr, 0);
std::string trackName = "Track " + std::to_string(i);
if (title) trackName = title->value;
else if (lang) trackName = lang->value;
subtitle_tracks_info_ += std::to_string(i) + ":" + trackName + ",";
}
}
// 비디오 초기화
if (video_stream_idx_ >= 0) {
auto par = fmt_ctx_->streams[video_stream_idx_]->codecpar;
auto codec = avcodec_find_decoder(par->codec_id);
video_codec_ctx_ = avcodec_alloc_context3(codec);
avcodec_parameters_to_context(video_codec_ctx_, par);
if (avcodec_open2(video_codec_ctx_, codec, nullptr) == 0) {
JNIEnv* env; jvm_->AttachCurrentThread(&env, nullptr);
if (videoSizeMethodId_) env->CallVoidMethod(listenerObj_, videoSizeMethodId_, video_codec_ctx_->width, video_codec_ctx_->height);
jvm_->DetachCurrentThread();
if (avcodec_open2(video_codec_ctx_, codec, nullptr) == 0 && videoSizeMethodId_) {
env->CallVoidMethod(listenerObj_, videoSizeMethodId_, video_codec_ctx_->width, video_codec_ctx_->height);
}
}
// 💡 [AAudio 초기화 복구]
if (audio_stream_idx_ >= 0) {
auto par = fmt_ctx_->streams[audio_stream_idx_]->codecpar;
auto codec = avcodec_find_decoder(par->codec_id);
@ -162,33 +139,99 @@ void PlayerEngine::renderLoop() {
AAudioStreamBuilder_setChannelCount(builder, 2);
AAudioStreamBuilder_setSampleRate(builder, 48000);
AAudioStreamBuilder_openStream(builder, &audio_stream_);
AAudioStream_requestStart(audio_stream_);
// 💡 1. 버퍼 크기를 가용 가능한 최대치로 늘려서 비디오 렌더링 딜레이에 대비합니다.
AAudioStream_setBufferSizeInFrames(audio_stream_, AAudioStream_getBufferCapacityInFrames(audio_stream_));
// 💡 2. 여기서 호출하던 AAudioStream_requestStart(audio_stream_); 를 삭제합니다! (대기 상태로 둠)
AAudioStreamBuilder_delete(builder);
}
}
isPrepared_ = true;
if (preparedMethodId_) env->CallVoidMethod(listenerObj_, preparedMethodId_);
jvm_->DetachCurrentThread();
}
void PlayerEngine::play(ANativeWindow* window) {
if (!isPrepared_) return;
if (isPlaying_) {
isPaused_ = false;
// 💡 일시정지가 풀릴 때 오디오도 다시 시작
if (audio_stream_) AAudioStream_requestStart(audio_stream_);
return;
}
window_ = window;
ANativeWindow_acquire(window_);
isPlaying_ = true;
isPaused_ = false;
// 💡 처음 재생을 시작할 때 비로소 오디오 엔진을 가동합니다.
if (audio_stream_) AAudioStream_requestStart(audio_stream_);
renderThread_ = std::thread(&PlayerEngine::renderLoop, this);
}
void PlayerEngine::pause() {
isPaused_ = true;
// 💡 영상이 일시정지되면 오디오 버퍼도 소모되지 않게 멈춰줍니다.
if (audio_stream_) AAudioStream_requestPause(audio_stream_);
}
void PlayerEngine::stop() {
if (!isPlaying_) return;
isPlaying_ = false;
if (renderThread_.joinable()) renderThread_.join();
if (window_) { ANativeWindow_release(window_); window_ = nullptr; }
}
void PlayerEngine::seekBy(double seconds) {
seekTargetOffset_ = seconds;
seekReq_ = true;
}
void PlayerEngine::setSpeed(float speed) { playbackSpeed_ = speed > 0.0f ? speed : 1.0f; }
void PlayerEngine::sendSubtitleToKotlin(const char* text) {
if (!jvm_ || !listenerObj_ || !subtitleMethodId_ || !text) return;
JNIEnv* env; bool attached = false;
if (jvm_->GetEnv((void**)&env, JNI_VERSION_1_6) != JNI_OK) { jvm_->AttachCurrentThread(&env, nullptr); attached = true; }
jstring jText = env->NewStringUTF(text);
env->CallVoidMethod(listenerObj_, subtitleMethodId_, jText);
env->DeleteLocalRef(jText);
if (attached) jvm_->DetachCurrentThread();
}
void PlayerEngine::renderLoop() {
LOGI("Player render loop started (AAudio Mode)");
AVFrame* frame = av_frame_alloc();
AVPacket* pkt = av_packet_alloc();
int last_win_w = 0, last_win_h = 0;
while (isPlaying_) {
if (isPaused_) {
std::this_thread::sleep_for(std::chrono::milliseconds(10));
continue; // 💡 일시정지 중이면 루프를 돌며 대기만 함
std::this_thread::sleep_for(std::chrono::milliseconds(20));
continue;
}
// [Seek 요청]
if (seekReq_) {
av_seek_frame(fmt_ctx_, -1, (currentPosSec_ + seekTargetOffset_) * AV_TIME_BASE, AVSEEK_FLAG_BACKWARD);
if (video_codec_ctx_) avcodec_flush_buffers(video_codec_ctx_);
if (audio_codec_ctx_) avcodec_flush_buffers(audio_codec_ctx_);
if (audio_stream_) { AAudioStream_requestFlush(audio_stream_); AAudioStream_requestStart(audio_stream_); }
if (audio_stream_) {
// 💡 안전한 버퍼 초기화를 위해 정지 -> 비움 -> 재시작 순서로 호출
AAudioStream_requestPause(audio_stream_);
AAudioStream_requestFlush(audio_stream_);
AAudioStream_requestStart(audio_stream_);
}
seekReq_ = false;
}
float currentSpeed = playbackSpeed_.load();
if (av_read_frame(fmt_ctx_, pkt) < 0) break;
// [비디오 처리]
if (pkt->stream_index == video_stream_idx_) {
avcodec_send_packet(video_codec_ctx_, pkt);
while (avcodec_receive_frame(video_codec_ctx_, frame) == 0) {
@ -209,58 +252,40 @@ void PlayerEngine::renderLoop() {
}
}
}
// 💡 배속 중이거나 오디오가 없으면 Sleep으로 속도 직접 조절
if (currentSpeed != 1.0f || audio_stream_idx_ < 0) {
std::this_thread::sleep_for(std::chrono::milliseconds((int)(16/currentSpeed)));
}
}
// [오디오 처리]
else if (pkt->stream_index == audio_stream_idx_) {
if (currentSpeed == 1.0f) { // 정배속일 때만 재생
if (currentSpeed == 1.0f) {
avcodec_send_packet(audio_codec_ctx_, pkt);
while (avcodec_receive_frame(audio_codec_ctx_, frame) == 0) {
if (swr_ctx_ && audio_stream_) {
// 1. 출력될 샘플 수 계산
int out_samples = swr_get_out_samples(swr_ctx_, frame->nb_samples);
// 2. 버퍼 크기는 바이트 단위 (샘플 수 * 2채널 * 2바이트(16비트))
uint8_t* out_buf = (uint8_t*)malloc(out_samples * 4);
// 3. 변환 수행 (실제로 변환된 정확한 샘플 수를 반환받음)
int converted_samples = swr_convert(swr_ctx_, &out_buf, out_samples,
(const uint8_t**)frame->data, frame->nb_samples);
int converted_samples = swr_convert(swr_ctx_, &out_buf, out_samples, (const uint8_t**)frame->data, frame->nb_samples);
if (converted_samples > 0) {
// 💡 [핵심 수정] out_size(바이트) 대신 converted_samples(샘플 수)를 전달해야 함
// 세 번째 인자는 'samples' 단위여야 합니다.
AAudioStream_write(audio_stream_, out_buf, converted_samples, 1000000000);
}
free(out_buf);
}
}
}
} else if (pkt->stream_index == sub_stream_idx_) {
AVSubtitle sub;
int got_sub = 0;
// 자막 패킷 디코딩
}
else if (pkt->stream_index == current_sub_stream_idx_) {
AVSubtitle sub; int got_sub = 0;
avcodec_decode_subtitle2(sub_codec_ctx_, &sub, &got_sub, pkt);
if (got_sub) {
for (unsigned int i = 0; i < sub.num_rects; i++) {
// 일반 텍스트 자막 또는 ASS/SSA 스타일 자막 처리
if (sub.rects[i]->type == SUBTITLE_TEXT && sub.rects[i]->text) {
sendSubtitleToKotlin(sub.rects[i]->text);
} else if (sub.rects[i]->type == SUBTITLE_ASS && sub.rects[i]->ass) {
// ASS 자막의 경우 마크업 태그가 포함될 수 있음
sendSubtitleToKotlin(sub.rects[i]->ass);
}
if (sub.rects[i]->type == SUBTITLE_TEXT && sub.rects[i]->text) sendSubtitleToKotlin(sub.rects[i]->text);
else if (sub.rects[i]->type == SUBTITLE_ASS && sub.rects[i]->ass) sendSubtitleToKotlin(sub.rects[i]->ass);
}
avsubtitle_free(&sub); // 메모리 해제 필수
avsubtitle_free(&sub);
}
}
av_packet_unref(pkt);
}
// 자원 해제
if (audio_stream_) { AAudioStream_requestStop(audio_stream_); AAudioStream_close(audio_stream_); audio_stream_ = nullptr; }
av_frame_free(&frame);
av_packet_free(&pkt);
@ -270,4 +295,7 @@ void PlayerEngine::renderLoop() {
if (audio_codec_ctx_) avcodec_free_context(&audio_codec_ctx_);
if (fmt_ctx_) avformat_close_input(&fmt_ctx_);
LOGI("Player loop finished gracefully.");
}
}
std::string PlayerEngine::getSubtitleTracks() { return subtitle_tracks_info_; }
void PlayerEngine::setSubtitleTrack(int streamIndex) { current_sub_stream_idx_ = streamIndex; }

View File

@ -4,7 +4,7 @@
#include <atomic>
#include <jni.h>
#include <android/native_window.h>
#include <aaudio/AAudio.h> // 💡 AAudio 추가
#include <aaudio/AAudio.h>
extern "C" {
#include <libavformat/avformat.h>
@ -20,22 +20,31 @@ public:
PlayerEngine(JavaVM* vm, jobject listenerObj);
~PlayerEngine();
void setDataSource(int videoFd, const std::string& subtitlePath);
void setDataSource(int videoFd, int subtitleFd);
void prepareAsync(); // 💡 비동기 준비 시작 함수
void play(ANativeWindow* window);
void pause();
void stop();
void seekBy(double seconds);
void setSpeed(float speed);
double getCurrentPosition() const { return currentPosSec_; }
std::string getSubtitleTracks();
void setSubtitleTrack(int streamIndex);
private:
void prepareInternal(); // 💡 백그라운드 준비 스레드
void renderLoop();
void sendSubtitleToKotlin(const char* text);
int videoFd_ = -1;
std::string subtitlePath_;
int subtitleFd_ = -1;
std::atomic<bool> isPrepared_{false};
std::atomic<bool> isPlaying_{false};
std::atomic<bool> isPaused_{false}; // 💡 일시정지 플래그 추가
std::atomic<bool> isPaused_{false};
std::thread prepareThread_;
std::thread renderThread_;
ANativeWindow* window_ = nullptr;
@ -44,6 +53,9 @@ private:
std::atomic<float> playbackSpeed_{1.0f};
double currentPosSec_ = 0.0;
int current_sub_stream_idx_ = -1;
std::string subtitle_tracks_info_;
AVFormatContext* fmt_ctx_ = nullptr;
AVCodecContext* video_codec_ctx_ = nullptr;
AVCodecContext* sub_codec_ctx_ = nullptr;
@ -55,10 +67,12 @@ private:
int sub_stream_idx_ = -1;
int audio_stream_idx_ = -1;
AAudioStream* audio_stream_ = nullptr; // 💡 AAudio 복구
AAudioStream* audio_stream_ = nullptr;
JavaVM* jvm_ = nullptr;
jobject listenerObj_ = nullptr;
jmethodID subtitleMethodId_ = nullptr;
jmethodID videoSizeMethodId_ = nullptr;
jmethodID preparedMethodId_ = nullptr; // 💡 JNI 콜백 ID 추가
jmethodID errorMethodId_ = nullptr; // 💡 JNI 에러 콜백 ID 추가
};

View File

@ -1,24 +1,16 @@
//
// Created by JIBUM HAN on 2026. 4. 9..
//
#include <jni.h>
#include <android/native_window_jni.h>
#include "PlayerEngine.h"
// 기존 월페이퍼 코드 어딘가에 있는 전역 JavaVM 포인터를 가져다 씁니다.
extern JavaVM* g_vm;
// C++ 객체를 포인터로 변환하는 헬퍼 함수
template<typename T>
T* toPlayerNative(jlong handle) {
return reinterpret_cast<T*>(handle);
}
T* toPlayerNative(jlong handle) { return reinterpret_cast<T*>(handle); }
extern "C" {
JNIEXPORT jlong JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeInit(JNIEnv *env, jobject thiz) {
// 엔진 생성 시 JavaVM과 Kotlin 콜백 객체(thiz)를 전달합니다.
PlayerEngine* engine = new PlayerEngine(g_vm, thiz);
return reinterpret_cast<jlong>(engine);
}
@ -35,35 +27,49 @@ Java_bums_lunatic_launcher_player_NativePlayer_nativeSetSpeed(JNIEnv *env, jobje
if (engine) engine->setSpeed(speed);
}
JNIEXPORT jdouble JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeGetCurrentPosition(JNIEnv *env, jobject thiz, jlong handle) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
return engine ? engine->getCurrentPosition() : 0.0;
}
JNIEXPORT jstring JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeGetSubtitleTracks(JNIEnv *env, jobject thiz, jlong handle) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
std::string tracks = engine ? engine->getSubtitleTracks() : "";
return env->NewStringUTF(tracks.c_str());
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeSetDataSource(JNIEnv *env, jobject thiz, jlong handle, jint videoFd, jstring jSubPath) {
Java_bums_lunatic_launcher_player_NativePlayer_nativeSetSubtitleTrack(JNIEnv *env, jobject thiz, jlong handle, jint index) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) {
const char* subPath = env->GetStringUTFChars(jSubPath, nullptr);
if (engine) engine->setSubtitleTrack(index);
}
// 엔진으로 FD 번호를 전달
engine->setDataSource(videoFd, subPath);
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativeSetDataSource(JNIEnv *env, jobject thiz, jlong handle, jint videoFd, jint jSubFd) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) engine->setDataSource(videoFd, jSubFd);
}
env->ReleaseStringUTFChars(jSubPath, subPath);
}
// 💡 추가된 비동기 준비
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativePrepareAsync(JNIEnv *env, jobject thiz, jlong handle) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) engine->prepareAsync();
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativePause(JNIEnv *env, jobject thiz, jlong handle) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine) {
engine->pause(); // 💡 세미콜론 추가
}
if (engine) engine->pause();
}
JNIEXPORT void JNICALL
Java_bums_lunatic_launcher_player_NativePlayer_nativePlay(JNIEnv *env, jobject thiz, jlong handle, jobject surface) {
PlayerEngine* engine = toPlayerNative<PlayerEngine>(handle);
if (engine && surface) {
ANativeWindow* window = ANativeWindow_fromSurface(env, surface);
// RGBA 포맷으로 버퍼 설정
ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBX_8888);
engine->play(window);
ANativeWindow_release(window);

View File

@ -6,6 +6,7 @@ import android.content.Intent
import android.graphics.Color
import android.net.Uri
import android.os.Bundle
import android.os.Environment
import android.text.Editable
import android.text.TextWatcher
import android.view.LayoutInflater
@ -381,6 +382,45 @@ class CompletedFilesFragment : Fragment() {
.show()
}
private fun copyToDownloadFolder(filesToCopy: List<File>) {
if (filesToCopy.isEmpty()) return
// 1. 기기의 공용 Download 폴더 경로 가져오기
val downloadDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS)
// 다운로드 폴더 안에 'Lunatic'이라는 앱 전용 폴더를 만들어 깔끔하게 모아둡니다.
val targetDir = File(downloadDir, "Lunatic_Downloads")
if (!targetDir.exists()) targetDir.mkdirs()
Toast.makeText(context, "다운로드 폴더로 복사를 시작합니다...", Toast.LENGTH_SHORT).show()
// 2. 용량이 클 수 있으므로 백그라운드 스레드에서 복사 진행
CoroutineScope(Dispatchers.IO).launch {
var copyCount = 0
filesToCopy.forEach { file ->
try {
val destFile = File(targetDir, file.name)
if (file.isDirectory) {
// 폴더일 경우 내부 파일까지 통째로 복사
file.copyRecursively(destFile, overwrite = true)
} else {
// 단일 파일 복사
file.copyTo(destFile, overwrite = true)
}
copyCount++
} catch (e: Exception) {
e.printStackTrace()
}
}
// 3. 복사 완료 후 UI 스레드에서 결과 알림
withContext(Dispatchers.Main) {
Toast.makeText(requireContext(), "${copyCount}개 항목 복사 완료\n(Download/Lunatic_Downloads)", Toast.LENGTH_LONG).show()
toggleSelectionMode(false) // 선택 모드 해제
}
}
}
// 💡 선택한 파일 이동 다이얼로그
private fun showMoveDialog() {
if (selectedFiles.isEmpty()) return
@ -477,6 +517,18 @@ class CompletedFilesFragment : Fragment() {
view.findViewById<View>(R.id.btnCancelSelection)?.setOnClickListener { toggleSelectionMode(false) }
view.findViewById<View>(R.id.btnMoveSelected)?.setOnClickListener { showMoveDialog() }
view.findViewById<View>(R.id.btnRenameSelected)?.setOnClickListener { showBatchRenameDialog() }
view.findViewById<View>(R.id.btnCopySelected)?.setOnClickListener {
if (selectedFiles.isEmpty()) return@setOnClickListener
android.app.AlertDialog.Builder(requireContext())
.setTitle("다운로드 폴더로 내보내기")
.setMessage("선택한 ${selectedFiles.size}개 항목을 기기의 다운로드 폴더(Lunatic_Downloads)로 복사하시겠습니까?")
.setPositiveButton("복사") { _, _ ->
copyToDownloadFolder(selectedFiles.toList())
}
.setNegativeButton("취소", null)
.show()
}
view.findViewById<View>(R.id.btnDeleteSelected)?.setOnClickListener {
if (selectedFiles.isEmpty()) return@setOnClickListener

View File

@ -766,14 +766,28 @@ open class GeckoWeb @JvmOverloads constructor(
// Dialog Helpers
private fun showNewSessionDialog(uri: String) {
AlertDialog.Builder(context)
.setTitle("Move To\n$uri")
.setPositiveButton("브라우저로 이동") { _, _ ->
context.startActivity(Intent(Intent.ACTION_VIEW, Uri.parse(uri)).apply { flags = FLAG_ACTIVITY_NEW_TASK or FLAG_ACTIVITY_CLEAR_TOP })
if (uri.startsWith("magnet:?")) {
val intent = Intent(context, TorrentService::class.java).apply {
putExtra("EXTRA_MAGNET_URI", uri)
}
.setNeutralButton("페이지 이동") { _, _ -> loadUrl(uri) }
.setNegativeButton(android.R.string.cancel, null)
.show()
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(intent)
} else {
context.startService(intent)
}
} else {
AlertDialog.Builder(context)
.setTitle("Move To\n$uri")
.setPositiveButton("브라우저로 이동") { _, _ ->
context.startActivity(Intent(Intent.ACTION_VIEW, Uri.parse(uri)).apply {
flags = FLAG_ACTIVITY_NEW_TASK or FLAG_ACTIVITY_CLEAR_TOP
})
}
.setNeutralButton("페이지 이동") { _, _ -> loadUrl(uri) }
.setNegativeButton(android.R.string.cancel, null)
.show()
}
}
private fun showContextMenu(pageUrl: String, mediaUrl: String) {

View File

@ -54,6 +54,8 @@ import bums.lunatic.launcher.home.tokiz.TokiFragment
import bums.lunatic.launcher.utils.Blog
import bums.lunatic.launcher.utils.beforeDay
import bums.lunatic.launcher.workers.WorkersDb
import com.google.android.gms.wearable.MessageClient
import com.google.android.gms.wearable.Wearable
import com.google.android.material.color.DynamicColors
import com.yausername.ffmpeg.FFmpeg
import com.yausername.youtubedl_android.YoutubeDL
@ -121,6 +123,64 @@ open class NeoRssActivity : CommonActivity() {
var lastAction = MotionEvent.ACTION_HOVER_EXIT
private val messageListener = MessageClient.OnMessageReceivedListener { messageEvent ->
val currentFragment = targetFragment ?: supportFragmentManager.fragments.find { it.isVisible }
if (currentFragment is RemoteGestureFragment && currentFragment.isRemoteEnabled) {
// when(keyType) {
// "UP_ARROW"->{currentFragment.onRemoteUp(isDouble)}
// "DOWN_ARROW"->{currentFragment.onRemoteDown(isDouble)}
//
// "LEFT_ARROW"->{currentFragment.onRemoteLeft(isDouble)}
// "CENTER_SINGLE_TAP" -> { currentFragment.onRemoteCenterClick() }
// "CENTER_DOUBLE_TAP" -> { currentFragment.onRemoteCenterDoubleClick() }
// }
// when(currentFragment) {
// is RssHome ->{
// if (currentFragment.binding.layoutRssSummary.root.isVisible) {
// currentFragment.openGecko(rssData = currentFragment.randomOrNull())
// } else {
// currentFragment.doNextPage()
// }
// }
// is TokiFragment -> {
// currentFragment.back()
// }
// is CompletedFilesFragment -> {
// currentFragment.backPress()
// }
// else -> {
//// showContents(R.id.close)
// }
// }
when (messageEvent.path) {
"/gesture/next" -> {
when(currentFragment) {
is RssHome ->{
if (currentFragment.binding.layoutRssSummary.root.isVisible) {
currentFragment.openGecko(rssData = currentFragment.randomOrNull())
} else {
currentFragment.doNextPage()
}
}
is TokiFragment -> {
currentFragment.back()
}
is CompletedFilesFragment -> {
currentFragment.backPress()
}
else -> {
// showContents(R.id.close)
}
}
}
"/gesture/prev" -> {
currentFragment.onRemoteLeft(false)
}
}
}
}
override fun onKeyLongPress(keyCode: Int, ev: KeyEvent?): Boolean {
Blog.LOGE("keyEvent >>>>> ${ev?.device?.name}:${keyCode}: onKeyLongPress >>> ${ev} ")
return super.onKeyLongPress(keyCode, ev)
@ -494,7 +554,7 @@ open class NeoRssActivity : CommonActivity() {
override fun onPause() {
super.onPause()
Wearable.getMessageClient(this).removeListener(messageListener)
}
@ -545,7 +605,10 @@ open class NeoRssActivity : CommonActivity() {
R.id.btn_x -> TokiFragment.newInstanceX()
R.id.btn_i -> TokiFragment.newInstanceI()
R.id.btn_btsearch -> TokiFragment.newInstanceMagnet()
// R.id.btn_img4 -> TokiFragment.newInstanceTumblr()
R.id.btn_img4 -> {
startActivity(Intent(this@NeoRssActivity, TranslatorActivity::class.java))
targetFragment
}
R.id.btn_img3 -> TokiFragment.newInstanceTumblr()
// R.id.btn_img2 -> TokiFragment.newInstancePixiv()
// R.id.btn_img1 -> TokiFragment.newInstanceArtStation()
@ -605,6 +668,7 @@ open class NeoRssActivity : CommonActivity() {
override fun onResume() {
super.onResume()
Blog.LOGE("LauncherActivity onResume")
Wearable.getMessageClient(this).addListener(messageListener)
}
private fun openSearch() {

View File

@ -739,7 +739,7 @@ internal class RssHome : RemoteGestureFragment() , KeyEventHandler {
// 일반 WebView라면: webView.onPause() 및 webView.pauseTimers()
} else {
// 💡 다시 나타날 때: 다시 시작
// binding.geckoWeb?.onResume()
binding.lunaticBrowser.geckoWeb?.onResume()
// 일반 WebView라면: webView.onResume() 및 webView.resumeTimers()
}
}

View File

@ -0,0 +1,174 @@
package bums.lunatic.launcher.home
import android.os.Bundle
import android.speech.tts.TextToSpeech
import android.util.Log
import android.view.View
import android.widget.*
import androidx.appcompat.app.AppCompatActivity
import bums.lunatic.launcher.R
import com.google.mlkit.common.model.DownloadConditions
import com.google.mlkit.nl.languageid.LanguageIdentification
import com.google.mlkit.nl.translate.TranslateLanguage
import com.google.mlkit.nl.translate.Translation
import com.google.mlkit.nl.translate.TranslatorOptions
import java.util.*
class TranslatorActivity : AppCompatActivity(), TextToSpeech.OnInitListener {
private lateinit var etInputText: EditText
private lateinit var spinnerTargetLang: Spinner
private lateinit var btnTranslate: Button
private lateinit var tvOutputText: TextView
private lateinit var layoutProgress: LinearLayout
private lateinit var tvSwap: TextView
private lateinit var tvTtsInput: TextView
private lateinit var tvTtsOutput: TextView
private lateinit var sbTtsSpeed: SeekBar
private lateinit var tvInputLangStatus: TextView
private var tts: TextToSpeech? = null
private var detectedSourceLangCode: String? = null
private var ttsSpeed: Float = 1.0f
private val targetLanguages = listOf(
Pair("한국어 🇰🇷", TranslateLanguage.KOREAN),
Pair("영어 🇺🇸", TranslateLanguage.ENGLISH),
Pair("일본어 🇯🇵", TranslateLanguage.JAPANESE),
Pair("중국어 🇨🇳", TranslateLanguage.CHINESE),
Pair("스페인어 🇪🇸", TranslateLanguage.SPANISH),
Pair("프랑스어 🇫🇷", TranslateLanguage.FRENCH)
)
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_translator)
tts = TextToSpeech(this, this)
initViews()
setupListeners()
}
private fun initViews() {
etInputText = findViewById(R.id.etInputText)
spinnerTargetLang = findViewById(R.id.spinnerTargetLang)
btnTranslate = findViewById(R.id.btnTranslate)
tvOutputText = findViewById(R.id.tvOutputText)
layoutProgress = findViewById(R.id.layoutProgress)
tvSwap = findViewById(R.id.tvSwap)
tvTtsInput = findViewById(R.id.tvTtsInput)
tvTtsOutput = findViewById(R.id.tvTtsOutput)
sbTtsSpeed = findViewById(R.id.sbTtsSpeed)
tvInputLangStatus = findViewById(R.id.tvInputLangStatus)
val adapter = ArrayAdapter(this, android.R.layout.simple_spinner_dropdown_item, targetLanguages.map { it.first })
spinnerTargetLang.adapter = adapter
spinnerTargetLang.setSelection(1) // 기본 영어
}
private fun setupListeners() {
btnTranslate.setOnClickListener {
val text = etInputText.text.toString().trim()
if (text.isNotEmpty()) {
val targetCode = targetLanguages[spinnerTargetLang.selectedItemPosition].second
detectAndTranslate(text, targetCode)
}
}
tvSwap.setOnClickListener {
val input = etInputText.text.toString()
val output = tvOutputText.text.toString()
if (output.isNotEmpty() && !output.startsWith("번역")) {
etInputText.setText(output)
tvOutputText.text = input
detectedSourceLangCode?.let { source ->
val idx = targetLanguages.indexOfFirst { it.second == source }
if (idx != -1) spinnerTargetLang.setSelection(idx)
}
detectedSourceLangCode = null // 스왑 후에는 다시 감지 필요
}
}
sbTtsSpeed.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// 0~200 범위를 0.1~2.0배속으로 변환 (progress 100이 1.0배속)
ttsSpeed = progress / 100f
if (ttsSpeed < 0.1f) ttsSpeed = 0.1f
tts?.setSpeechRate(ttsSpeed)
}
override fun onStartTrackingTouch(p0: SeekBar?) {}
override fun onStopTrackingTouch(p0: SeekBar?) {}
})
tvTtsInput.setOnClickListener { playTts(etInputText.text.toString(), detectedSourceLangCode) }
tvTtsOutput.setOnClickListener {
val code = targetLanguages[spinnerTargetLang.selectedItemPosition].second
playTts(tvOutputText.text.toString(), code)
}
}
private fun detectAndTranslate(text: String, targetLangCode: String) {
tvOutputText.text = "언어 감지 중..."
btnTranslate.isEnabled = false
LanguageIdentification.getClient().identifyLanguage(text)
.addOnSuccessListener { languageCode ->
detectedSourceLangCode = TranslateLanguage.fromLanguageTag(languageCode)
if (detectedSourceLangCode != null && languageCode != "und") {
tvInputLangStatus.text = "원본: ${languageCode.uppercase()}"
tvTtsInput.visibility = View.VISIBLE
performTranslation(text, detectedSourceLangCode!!, targetLangCode)
} else {
tvOutputText.text = "언어를 감지할 수 없습니다."
btnTranslate.isEnabled = true
}
}
}
private fun performTranslation(text: String, source: String, target: String) {
if (source == target) {
tvOutputText.text = text
btnTranslate.isEnabled = true
return
}
layoutProgress.visibility = View.VISIBLE
val options = TranslatorOptions.Builder().setSourceLanguage(source).setTargetLanguage(target).build()
val translator = Translation.getClient(options)
translator.downloadModelIfNeeded(DownloadConditions.Builder().build())
.addOnSuccessListener {
layoutProgress.visibility = View.GONE
translator.translate(text)
.addOnSuccessListener { translated ->
tvOutputText.text = translated
tvTtsOutput.visibility = View.VISIBLE
btnTranslate.isEnabled = true
}
.addOnFailureListener { btnTranslate.isEnabled = true }
}
.addOnFailureListener {
layoutProgress.visibility = View.GONE
btnTranslate.isEnabled = true
}
}
private fun playTts(text: String, langCode: String?) {
if (text.isEmpty() || langCode == null) return
val locale = when (langCode) {
TranslateLanguage.KOREAN -> Locale.KOREAN
TranslateLanguage.ENGLISH -> Locale.ENGLISH
TranslateLanguage.JAPANESE -> Locale.JAPANESE
TranslateLanguage.CHINESE -> Locale.CHINESE
else -> Locale.getDefault()
}
tts?.apply {
language = locale
setSpeechRate(ttsSpeed) // 현재 SeekBar에 설정된 속도 적용
speak(text, TextToSpeech.QUEUE_FLUSH, null, "ID")
}
}
override fun onInit(status: Int) { if (status == TextToSpeech.SUCCESS) Log.d("TTS", "Ready") }
override fun onDestroy() { tts?.stop(); tts?.shutdown(); super.onDestroy() }
}

View File

@ -7,17 +7,22 @@ class NativePlayer {
private var subtitleCallback: ((String) -> Unit)? = null
private var videoSizeCallback: ((Int, Int) -> Unit)? = null
// 💡 JNI로부터 받을 콜백 리스너 설정
var onPreparedListener: (() -> Unit)? = null
var onErrorListener: ((Int, String) -> Unit)? = null
fun initialize(): Boolean {
nativeHandle = nativeInit()
return nativeHandle != 0L
}
fun setDataSource(videoFd: Int, subPath: String) = nativeSetDataSource(nativeHandle, videoFd, subPath)
fun setDataSource(videoFd: Int, subFd: Int) = nativeSetDataSource(nativeHandle, videoFd, subFd)
fun prepareAsync() { if (nativeHandle != 0L) nativePrepareAsync(nativeHandle) }
fun play(surface: Surface) = nativePlay(nativeHandle, surface)
fun pause() { if (nativeHandle != 0L) nativePause(nativeHandle) }
fun stop() = nativeStop(nativeHandle)
fun seekBy(sec: Double) = nativeSeekBy(nativeHandle, sec)
fun setSpeed(speed: Float) = nativeSetSpeed(nativeHandle, speed)
fun pause() {nativePause(nativeHandle)}
@Suppress("unused")
private fun onSubtitleTextDecoded(text: String) {
@ -29,6 +34,18 @@ class NativePlayer {
videoSizeCallback?.invoke(w, h)
}
// 💡 C++에서 준비 완료 시 호출
@Suppress("unused")
private fun onNativePrepared() {
onPreparedListener?.invoke()
}
// 💡 C++에서 오류 발생 시 호출
@Suppress("unused")
private fun onNativeError(errorCode: Int, errorMessage: String) {
onErrorListener?.invoke(errorCode, errorMessage)
}
fun setSubtitleCallback(cb: (String) -> Unit) { subtitleCallback = cb }
fun setVideoSizeCallback(cb: (Int, Int) -> Unit) { videoSizeCallback = cb }
@ -38,14 +55,42 @@ class NativePlayer {
nativeHandle = 0L
}
}
private external fun nativePause(h: Long)
data class SubtitleTrack(
val id: String,
val name: String,
val isExternal: Boolean
)
fun getInternalSubtitleTracks(): List<SubtitleTrack> {
if (nativeHandle == 0L) return emptyList()
val rawStr = nativeGetSubtitleTracks(nativeHandle) ?: return emptyList()
return rawStr.split(",").filter { it.isNotBlank() }.mapNotNull {
val parts = it.split(":")
if (parts.size >= 2) SubtitleTrack(parts[0], parts[1], false) else null
}
}
fun setInternalSubtitleTrack(streamIndex: Int) {
if (nativeHandle != 0L) nativeSetSubtitleTrack(nativeHandle, streamIndex)
}
fun getCurrentPosition(): Double {
return if (nativeHandle != 0L) nativeGetCurrentPosition(nativeHandle) else 0.0
}
private external fun nativeInit(): Long
private external fun nativeSetDataSource(h: Long, fd: Int, sub: String)
private external fun nativeSetDataSource(h: Long, videoFd: Int, subFd: Int)
private external fun nativePrepareAsync(h: Long)
private external fun nativePlay(h: Long, s: Surface)
private external fun nativePause(h: Long)
private external fun nativeStop(h: Long)
private external fun nativeDestroy(h: Long)
private external fun nativeSeekBy(h: Long, s: Double)
private external fun nativeSetSpeed(h: Long, sp: Float)
private external fun nativeGetCurrentPosition(h: Long): Double
private external fun nativeGetSubtitleTracks(h: Long): String
private external fun nativeSetSubtitleTrack(h: Long, index: Int)
companion object {
init { System.loadLibrary("native_renderer") }

View File

@ -1,8 +1,10 @@
package bums.lunatic.launcher.player
import android.app.AlertDialog
import android.content.pm.ActivityInfo
import android.graphics.Color
import android.graphics.SurfaceTexture
import android.os.Build
import android.os.Bundle
import android.os.ParcelFileDescriptor
import android.util.Log
@ -10,12 +12,28 @@ import android.view.*
import android.widget.FrameLayout
import android.widget.ImageButton
import android.widget.TextView
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import bums.lunatic.launcher.R
import bums.lunatic.launcher.player.NativePlayer.SubtitleTrack
import bums.lunatic.launcher.utils.Blog
import com.google.mlkit.nl.languageid.LanguageIdentification
import com.google.mlkit.nl.translate.TranslateLanguage
import com.google.mlkit.nl.translate.Translation
import com.google.mlkit.nl.translate.TranslatorOptions
import kotlinx.coroutines.*
import java.io.File
import java.nio.ByteBuffer
import java.nio.charset.Charset
import java.nio.charset.CodingErrorAction
class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
data class SubtitleBlock(
val startSec: Double,
val endSec: Double,
val text: String,
var translatedText: String? = null // 💡 번역본이 저장될 공간
)
private lateinit var videoTextureView: TextureView
private lateinit var subtitleView: TextView
@ -33,48 +51,74 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
private var videoWidth: Int = 0
private var videoHeight: Int = 0
private var externalSubtitles = listOf<SubtitleBlock>()
private var subtitleSyncJob: Job? = null
private val allSubtitleTracks = mutableListOf<SubtitleTrack>()
override fun onCreate(savedInstanceState: Bundle?) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
window.attributes.layoutInDisplayCutoutMode = WindowManager.LayoutParams.LAYOUT_IN_DISPLAY_CUTOUT_MODE_SHORT_EDGES
}
super.onCreate(savedInstanceState)
// 1. 데이터 확인
videoPath = intent.getStringExtra("VIDEO_PATH") ?: ""
if (videoPath.isEmpty()) { finish(); return }
subtitlePath = findSubtitleFile(videoPath)
// 2. UI 동적 생성 및 구성
subtitlePath = findSubtitleFile(videoPath)
if (subtitlePath.isNotEmpty()) {
externalSubtitles = parseSrt(File(subtitlePath))
detectLanguageAndTranslate()
}
setupUI()
// 3. 네이티브 플레이어 초기화
nativePlayer = NativePlayer().apply {
initialize()
// 영상 해상도에 따른 화면 크기 조절 콜백
setVideoSizeCallback { width, height ->
videoWidth = width
videoHeight = height
adjustVideoAspectRatio(width, height)
}
// 자막 출력 콜백
setSubtitleCallback { text ->
runOnUiThread {
subtitleView.text = cleanSubtitleText(text)
subtitleView.visibility = if (text.isEmpty()) View.INVISIBLE else View.VISIBLE
}
}
// 💡 C++에서 준비 완료 시 호출
onPreparedListener = {
runOnUiThread {
loadAvailableSubtitles()
if (allSubtitleTracks.size > 1) {
showSubtitleSelectionDialog()
} else {
// 자막 없으면 자동 시작
play(Surface(videoTextureView.surfaceTexture!!))
}
}
}
// 💡 C++에서 오류 발생 시 호출
onErrorListener = { errorCode, errorMessage ->
runOnUiThread {
Toast.makeText(this@PlayerActivity, "오류 [$errorCode]: $errorMessage", Toast.LENGTH_LONG).show()
finish()
}
}
}
// 4. 제스처 설정
setupGestures()
}
private fun setupUI() {
val root = FrameLayout(this).apply { setBackgroundColor(Color.BLACK) }
// 비디오 뷰
videoTextureView = TextureView(this).apply {
surfaceTextureListener = this@PlayerActivity
}
videoTextureView = TextureView(this).apply { surfaceTextureListener = this@PlayerActivity }
// 자막 뷰
subtitleView = TextView(this).apply {
setTextColor(Color.WHITE)
textSize = 22f
@ -83,7 +127,6 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
setPadding(40, 0, 40, 180)
}
// 제스처 감지용 투명 레이어 (삼등분)
val gestureLayer = android.widget.LinearLayout(this).apply {
orientation = android.widget.LinearLayout.HORIZONTAL
weightSum = 3f
@ -97,10 +140,8 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
gestureLayer.addView(centerZone, android.widget.LinearLayout.LayoutParams(0, -1, 1f))
gestureLayer.addView(rightZone, android.widget.LinearLayout.LayoutParams(0, -1, 1f))
// 컨트롤 버튼 (좌측 하단 회전, 우측 하단 숨기기)
val controls = FrameLayout(this)
btnRotate = ImageButton(this).apply {
setImageResource(android.R.drawable.ic_menu_rotate) // 기본 리소스 사용
setImageResource(android.R.drawable.ic_menu_rotate)
setBackgroundColor(Color.TRANSPARENT)
setOnClickListener { toggleOrientation() }
}
@ -113,8 +154,6 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
root.addView(videoTextureView, FrameLayout.LayoutParams(-2, -2, Gravity.CENTER))
root.addView(subtitleView)
root.addView(gestureLayer)
// 버튼 배치
root.addView(btnRotate, FrameLayout.LayoutParams(150, 150, Gravity.BOTTOM or Gravity.START).apply { setMargins(30,0,0,30) })
root.addView(btnHideVideo, FrameLayout.LayoutParams(150, 150, Gravity.BOTTOM or Gravity.END).apply { setMargins(0,0,30,30) })
@ -122,37 +161,89 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
hideSystemUI()
}
override fun onSurfaceTextureAvailable(st: SurfaceTexture, w: Int, h: Int) {
val videoFile = File(videoPath)
if (videoFile.exists()) {
val videoPfd = ParcelFileDescriptor.open(videoFile, ParcelFileDescriptor.MODE_READ_ONLY)
val videoFd = videoPfd.detachFd()
var subFd = -1
if (subtitlePath.isNotEmpty()) {
val subFile = File(subtitlePath)
if (subFile.exists()) {
val subPfd = ParcelFileDescriptor.open(subFile, ParcelFileDescriptor.MODE_READ_ONLY)
subFd = subPfd.detachFd()
}
}
// C++로 DataSource를 넘기고 비동기 Prepare 명령!
nativePlayer?.setDataSource(videoFd, subFd)
nativePlayer?.prepareAsync()
}
}
private fun loadAvailableSubtitles() {
allSubtitleTracks.clear()
allSubtitleTracks.add(SubtitleTrack("-1", "자막 끄기", false))
val internalTracks = nativePlayer?.getInternalSubtitleTracks() ?: emptyList()
allSubtitleTracks.addAll(internalTracks)
if (externalSubtitles.isNotEmpty()) {
allSubtitleTracks.add(SubtitleTrack("EXTERNAL", "외부 자막 (SRT)", true))
}
}
private fun showSubtitleSelectionDialog() {
val trackNames = allSubtitleTracks.map { it.name }.toTypedArray()
AlertDialog.Builder(this, android.R.style.Theme_DeviceDefault_Dialog_Alert)
.setTitle("자막 선택")
.setCancelable(false)
.setItems(trackNames) { _, which ->
selectSubtitleTrack(allSubtitleTracks[which])
nativePlayer?.play(Surface(videoTextureView.surfaceTexture!!))
}
.show()
}
private fun selectSubtitleTrack(track: SubtitleTrack) {
if (track.isExternal) {
nativePlayer?.setInternalSubtitleTrack(-1)
startSubtitleSyncLoop()
} else {
subtitleSyncJob?.cancel()
subtitleView.visibility = View.INVISIBLE
nativePlayer?.setInternalSubtitleTrack(track.id.toInt())
}
}
private fun setupGestures() {
val gestureLayer = (videoTextureView.parent as FrameLayout).getChildAt(2) as android.widget.LinearLayout
val left = gestureLayer.getChildAt(0)
val center = gestureLayer.getChildAt(1)
val right = gestureLayer.getChildAt(2)
// 1. 가운데: 재생/일시정지
center.setOnClickListener {
isPlaying = !isPlaying
if (isPlaying) nativePlayer?.play(Surface(videoTextureView.surfaceTexture))
if (isPlaying) nativePlayer?.play(Surface(videoTextureView.surfaceTexture!!))
else nativePlayer?.pause()
}
// 2. 오른쪽: 롱프레스 4배속
val rightDetector = GestureDetector(this, object : GestureDetector.SimpleOnGestureListener() {
override fun onLongPress(e: MotionEvent) { nativePlayer?.setSpeed(4.0f) }
override fun onSingleTapUp(e: MotionEvent): Boolean {
nativePlayer?.seekBy(20.0) // 탭하면 10초 앞으로
nativePlayer?.seekBy(20.0)
return true
}
})
right.setOnTouchListener { v, event ->
right.setOnTouchListener { _, event ->
rightDetector.onTouchEvent(event)
if (event.action == MotionEvent.ACTION_UP || event.action == MotionEvent.ACTION_CANCEL) {
nativePlayer?.setSpeed(1.0f)
}
true
}
// 3. 왼쪽: 롱프레스 주기적 뒤로가기
val leftDetector = GestureDetector(this, object : GestureDetector.SimpleOnGestureListener() {
override fun onLongPress(e: MotionEvent) {
leftLongPressJob = CoroutineScope(Dispatchers.Main).launch {
@ -163,11 +254,11 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
}
}
override fun onSingleTapUp(e: MotionEvent): Boolean {
nativePlayer?.seekBy(-10.0) // 탭하면 10초 뒤로
nativePlayer?.seekBy(-10.0)
return true
}
})
left.setOnTouchListener { v, event ->
left.setOnTouchListener { _, event ->
leftDetector.onTouchEvent(event)
if (event.action == MotionEvent.ACTION_UP || event.action == MotionEvent.ACTION_CANCEL) {
leftLongPressJob?.cancel()
@ -178,16 +269,11 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
override fun onConfigurationChanged(newConfig: android.content.res.Configuration) {
super.onConfigurationChanged(newConfig)
// 시스템 UI(상태바 등)를 다시 숨깁니다.
hideSystemUI()
// 저장해둔 영상 해상도가 있다면 현재 바뀐 화면 크기에 맞춰 다시 정렬합니다.
if (videoWidth > 0 && videoHeight > 0) {
adjustVideoAspectRatio(videoWidth, videoHeight)
}
if (videoWidth > 0 && videoHeight > 0) adjustVideoAspectRatio(videoWidth, videoHeight)
}
// 💡 화면 채움 (Fill/Crop)
private fun adjustVideoAspectRatio(videoW: Int, videoH: Int) {
runOnUiThread {
// 1. 현재 기기의 실제 가용 화면 크기를 가져옵니다.
@ -242,27 +328,238 @@ class PlayerActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
private fun cleanSubtitleText(text: String): String = text.replace(Regex("\\{.*?\\}"), "")
override fun onSurfaceTextureAvailable(st: SurfaceTexture, w: Int, h: Int) {
val file = File(videoPath)
if (file.exists()) {
val pfd = ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY)
nativePlayer?.setDataSource(pfd.detachFd(), subtitlePath)
nativePlayer?.play(Surface(st))
var lastSubTitle : String = ""
private fun startSubtitleSyncLoop() {
subtitleSyncJob?.cancel()
subtitleSyncJob = CoroutineScope(Dispatchers.Main).launch {
while (isActive) {
if (isPlaying) {
val currentSec = nativePlayer?.getCurrentPosition() ?: 0.0
val currentSub = externalSubtitles.find { currentSec in it.startSec..it.endSec }
if (currentSub != null) {
// 💡 번역본이 존재하면 [번역본] + [줄바꿈] + [원본] 형태로 보여주거나, 번역본만 보여줍니다.
val displayText = if (currentSub.translatedText != null) {
"${currentSub.translatedText}\n${cleanSubtitleText(currentSub.text)}"
// (원본이 보기 싫다면 그냥 currentSub.translatedText 만 넣으셔도 됩니다!)
} else {
cleanSubtitleText(currentSub.text)
}
if (displayText != lastSubTitle) {
lastSubTitle = displayText
subtitleView.text = displayText
subtitleView.visibility = View.VISIBLE
}
} else {
subtitleView.visibility = View.INVISIBLE
}
}
delay(100)
}
}
}
private fun readTextWithEncoding(file: File): String {
val bytes = file.readBytes()
if (bytes.isEmpty()) return ""
// 1. 꼬리표(BOM) 100% 확정 검사
if (bytes.size >= 3 && bytes[0] == 0xEF.toByte() && bytes[1] == 0xBB.toByte() && bytes[2] == 0xBF.toByte()) {
return String(bytes, 3, bytes.size - 3, Charsets.UTF_8)
}
if (bytes.size >= 2 && bytes[0] == 0xFF.toByte() && bytes[1] == 0xFE.toByte()) {
return String(bytes, 2, bytes.size - 2, Charset.forName("UTF-16LE"))
}
if (bytes.size >= 2 && bytes[0] == 0xFE.toByte() && bytes[1] == 0xFF.toByte()) {
return String(bytes, 2, bytes.size - 2, Charset.forName("UTF-16BE"))
}
// 2. UTF-16 검사 (Null 바이트 비율)
val nullCount = bytes.count { it == 0.toByte() }
if (nullCount > bytes.size / 4) {
return String(bytes, Charset.forName("UTF-16LE"))
}
// 💡 3. UTF-8 강제 우선권 부여 (중국어 블랙홀 방지)
try {
val decoder = Charsets.UTF_8.newDecoder()
decoder.onMalformedInput(CodingErrorAction.REPLACE)
decoder.onUnmappableCharacter(CodingErrorAction.REPLACE)
decoder.replaceWith("\uFFFD")
val utf8Text = decoder.decode(ByteBuffer.wrap(bytes)).toString()
val utf8Errors = utf8Text.count { it == '\uFFFD' }
// 에러가 5% 미만이라면 사실상 UTF-8 파일이 부분 손상된 것으로 간주하고 확정!
if (utf8Errors < utf8Text.length / 20) {
return utf8Text
}
} catch (e: Exception) {}
// 💡 4. 한국어/일본어 전용 채점 (GB18030 같은 블랙홀은 리스트에서 배제)
val charsets = listOf("CP949", "Shift_JIS", "EUC-JP")
var bestText = String(bytes, Charsets.UTF_8) // 최후의 보루는 UTF-8
var minErrors = Int.MAX_VALUE
for (charsetName in charsets) {
try {
val decoder = Charset.forName(charsetName).newDecoder()
decoder.onMalformedInput(CodingErrorAction.REPLACE)
decoder.onUnmappableCharacter(CodingErrorAction.REPLACE)
decoder.replaceWith("\uFFFD")
val text = decoder.decode(ByteBuffer.wrap(bytes)).toString()
val errorCount = text.count { it == '\uFFFD' }
// 에러가 가장 적은 인코딩 채택
if (errorCount < minErrors) {
minErrors = errorCount
bestText = text
}
} catch (e: Exception) { }
}
return bestText
}
private fun parseSrt(file: File): List<SubtitleBlock> {
val result = mutableListOf<SubtitleBlock>()
if (!file.exists()) return result
try {
// 💡 기존의 file.readText() 대신 우리가 만든 스마트 함수를 사용합니다.
val content = readTextWithEncoding(file)
// --- 아래는 기존과 완전히 동일 ---
val blocks = content.split("\n\n", "\r\n\r\n")
for (block in blocks) {
val lines = block.lines().filter { it.isNotBlank() }
if (lines.size >= 3) {
val timeLine = lines[1]
val text = lines.drop(2).joinToString("\n")
val times = timeLine.split(" --> ")
if (times.size == 2) {
result.add(SubtitleBlock(parseSrtTime(times[0]), parseSrtTime(times[1]), text))
}
}
}
} catch (e: Exception) {
Log.e("PlayerActivity", "Subtitle parsing error", e)
}
return result
}
private fun parseSrtTime(timeStr: String): Double {
val cleanStr = timeStr.trim().replace(",", ".")
val parts = cleanStr.split(":")
if (parts.size == 3) {
val h = parts[0].toDoubleOrNull() ?: 0.0
val m = parts[1].toDoubleOrNull() ?: 0.0
val s = parts[2].toDoubleOrNull() ?: 0.0
return (h * 3600) + (m * 60) + s
}
return 0.0
}
private fun hideSystemUI() {
window.decorView.systemUiVisibility = (View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
or View.SYSTEM_UI_FLAG_FULLSCREEN or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
window.setDecorFitsSystemWindows(false)
window.insetsController?.let {
it.hide(WindowInsets.Type.statusBars() or WindowInsets.Type.navigationBars())
it.systemBarsBehavior = WindowInsetsController.BEHAVIOR_SHOW_TRANSIENT_BARS_BY_SWIPE
}
} else {
@Suppress("DEPRECATION")
window.decorView.systemUiVisibility = (View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
or View.SYSTEM_UI_FLAG_LAYOUT_STABLE
or View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
or View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
or View.SYSTEM_UI_FLAG_FULLSCREEN)
}
}
override fun onSurfaceTextureSizeChanged(st: SurfaceTexture, w: Int, h: Int) {}
override fun onSurfaceTextureDestroyed(st: SurfaceTexture): Boolean { nativePlayer?.stop(); return true }
override fun onSurfaceTextureUpdated(st: SurfaceTexture) {}
private fun detectLanguageAndTranslate() {
if (externalSubtitles.isEmpty()) return
// 정확도를 위해 자막 앞부분의 텍스트를 적당히 뭉쳐서 샘플로 만듭니다.
val sampleText = externalSubtitles.take(10).joinToString(" ") { it.text }
val languageIdentifier = LanguageIdentification.getClient()
languageIdentifier.identifyLanguage(sampleText)
.addOnSuccessListener { languageCode ->
if (languageCode == "und") {
Blog.LOGE("언어 감지 실패 (알 수 없는 언어)")
} else {
Blog.LOGE("💡 감지된 언어 코드: $languageCode")
// 만약 이미 한국어(ko)라면 번역할 필요가 없으므로 종료
if (languageCode == "ko") return@addOnSuccessListener
// 감지된 언어 코드를 번역기가 이해할 수 있는 코드로 변환
val sourceLang = TranslateLanguage.fromLanguageTag(languageCode)
if (sourceLang != null) {
startTranslationProcess(sourceLang)
} else {
Blog.LOGE("번역을 지원하지 않는 언어입니다: $languageCode")
}
}
}
.addOnFailureListener { e ->
Blog.LOGE("언어 감지 에러: ${e.message}")
}
}
// 💡 2. 감지된 언어를 바탕으로 한국어로 번역하는 함수
private fun startTranslationProcess(sourceLang: String) {
val options = TranslatorOptions.Builder()
.setSourceLanguage(sourceLang)
.setTargetLanguage(TranslateLanguage.KOREAN) // 타겟은 무조건 한국어!
.build()
val translator = Translation.getClient(options)
Toast.makeText(this, "번역 모델 확인 중...", Toast.LENGTH_SHORT).show()
// 해당 언어 모델이 폰에 없으면 다운로드 (최초 1회, 약 30MB)
translator.downloadModelIfNeeded()
.addOnSuccessListener {
Toast.makeText(this, "자막 번역을 시작합니다!", Toast.LENGTH_SHORT).show()
// 💡 3. 수백~수천 줄의 자막을 백그라운드에서 한방에 번역 (UI 멈춤 방지)
CoroutineScope(Dispatchers.IO).launch {
for (block in externalSubtitles) {
try {
// ML Kit의 번역은 원래 비동기(Task)지만, Tasks.await를 쓰면 동기적으로 쫙 뽑아낼 수 있습니다.
val translated = com.google.android.gms.tasks.Tasks.await(translator.translate(block.text))
block.translatedText = translated
} catch (e: Exception) {
block.translatedText = block.text // 에러나면 원본 유지
}
}
withContext(Dispatchers.Main) {
Toast.makeText(this@PlayerActivity, "자막 번역 완료!", Toast.LENGTH_SHORT).show()
}
}
}
.addOnFailureListener {
Toast.makeText(this, "번역 모델 다운로드 실패", Toast.LENGTH_SHORT).show()
}
}
override fun onDestroy() {
super.onDestroy()
nativePlayer?.destroy()
leftLongPressJob?.cancel()
subtitleSyncJob?.cancel()
}
}

View File

@ -0,0 +1,163 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:padding="16dp"
android:background="#F5F5F5">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="🌐 ML Kit Translator"
android:textSize="20sp"
android:textStyle="bold"
android:layout_marginBottom="16dp"/>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="4dp">
<TextView
android:id="@+id/tvInputLangStatus"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="원본 (자동 감지)"
android:textColor="#666666"
android:textStyle="bold"
android:layout_centerVertical="true"/>
<TextView
android:id="@+id/tvTtsInput"
android:layout_width="40dp"
android:layout_height="40dp"
android:layout_alignParentEnd="true"
android:gravity="center"
android:text="volume_up"
android:textSize="24sp"
android:fontFamily="@font/material_symbols"
android:textColor="#2196F3"
android:background="?attr/selectableItemBackgroundBorderless"
android:visibility="gone"/>
</RelativeLayout>
<EditText
android:id="@+id/etInputText"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
android:background="@android:color/white"
android:gravity="top|start"
android:hint="번역할 내용을 입력하세요."
android:padding="12dp"
android:inputType="textMultiLine"
android:elevation="2dp"/>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:layout_marginTop="8dp"
android:layout_marginBottom="8dp"
android:background="#E0E0E0"
android:padding="8dp">
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<TextView
android:id="@+id/tvSwap"
android:layout_width="48dp"
android:layout_height="48dp"
android:gravity="center"
android:text="swap_vert"
android:textSize="28sp"
android:fontFamily="@font/material_symbols"
android:textColor="#444444"
android:background="?attr/selectableItemBackgroundBorderless"/>
<Spinner
android:id="@+id/spinnerTargetLang"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_toEndOf="@id/tvSwap"
android:layout_centerVertical="true"
android:layout_marginStart="8dp"/>
<Button
android:id="@+id/btnTranslate"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentEnd="true"
android:text="번역"/>
</RelativeLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:gravity="center_vertical"
android:layout_marginTop="4dp">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="TTS 속도"
android:textSize="12sp"
android:layout_marginEnd="8dp"/>
<SeekBar
android:id="@+id/sbTtsSpeed"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="200"
android:progress="100"/> </LinearLayout>
</LinearLayout>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="4dp">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="번역 결과"
android:textColor="#666666"
android:textStyle="bold"
android:layout_centerVertical="true"/>
<TextView
android:id="@+id/tvTtsOutput"
android:layout_width="40dp"
android:layout_height="40dp"
android:layout_alignParentEnd="true"
android:gravity="center"
android:text="volume_up"
android:textSize="24sp"
android:fontFamily="@font/material_symbols"
android:textColor="#2196F3"
android:background="?attr/selectableItemBackgroundBorderless"
android:visibility="gone"/>
</RelativeLayout>
<TextView
android:id="@+id/tvOutputText"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
android:background="@android:color/white"
android:padding="12dp"
android:textIsSelectable="true"
android:textSize="16sp"
android:elevation="2dp"
android:hint="번역 결과가 표시됩니다."/>
<LinearLayout
android:id="@+id/layoutProgress"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:gravity="center_vertical"
android:visibility="gone"
android:layout_marginTop="8dp">
<ProgressBar android:layout_width="20dp" android:layout_height="20dp" android:layout_marginEnd="8dp" />
<TextView android:layout_width="wrap_content" android:layout_height="wrap_content" text="모델 다운로드 중..." textSize="12sp" />
</LinearLayout>
</LinearLayout>

View File

@ -147,6 +147,15 @@
android:textStyle="bold"
android:gravity="center"
android:padding="12dp"/>
<TextView
android:id="@+id/btnCopySelected"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="외부폴더로 복사"
android:textStyle="bold"
android:gravity="center"
android:padding="12dp"/>
<TextView
android:id="@+id/btnMoveSelected"
android:layout_width="0dp"

View File

@ -120,12 +120,11 @@
style="@style/CommonFabStyle"
android:id="@+id/btn_img3"
/>
<!-- <bums.lunatic.launcher.view.FloatingActionButton-->
<!-- app:fab_label="⛏️"-->
<!-- android:visibility="gone"-->
<!-- style="@style/CommonFabStyle"-->
<!-- android:id="@+id/btn_img4"-->
<!-- />-->
<bums.lunatic.launcher.view.FloatingActionButton
app:fab_label="🌐"
style="@style/CommonFabStyle"
android:id="@+id/btn_img4"
/>
<bums.lunatic.launcher.view.FloatingActionButton
app:fab_label="🌍"

View File

@ -17,6 +17,15 @@
<item name="scrimBackground">@color/almost_transparent</item>
</style>
<style name="Theme.Player" parent="Theme.Material3.DynamicColors.DayNight.NoActionBar">
<!-- Background -->
<item name="android:windowShowWallpaper">false</item>
<item name="android:windowIsTranslucent">false</item>
<item name="android:windowBackground">@android:color/black</item>
</style>
<style name="roundedImageView" parent="">
<item name="cornerFamily">rounded</item>
<item name="cornerSize">10dp</item>

View File

@ -28,6 +28,8 @@
android:name="android.support.wearable.complications.UPDATE_PERIOD_SECONDS"
android:value="0" />
</service>
<receiver android:name=".tile.TileActionReceiver" android:exported="false" />
<service
android:name=".tile.MainTileService"
android:exported="true"

View File

@ -1,8 +1,14 @@
package bums.lunatic.launcher.tile
import android.app.PendingIntent
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import androidx.wear.protolayout.ActionBuilders
import androidx.wear.protolayout.ColorBuilders.argb
import androidx.wear.protolayout.DimensionBuilders.dp
import androidx.wear.protolayout.LayoutElementBuilders
import androidx.wear.protolayout.ModifiersBuilders
import androidx.wear.protolayout.ResourceBuilders
import androidx.wear.protolayout.TimelineBuilders
import androidx.wear.protolayout.material.Colors
@ -14,6 +20,7 @@ import androidx.wear.tiles.TileBuilders
import androidx.wear.tiles.tooling.preview.Preview
import androidx.wear.tiles.tooling.preview.TilePreviewData
import androidx.wear.tooling.preview.devices.WearDevices
import com.google.android.gms.wearable.Wearable
import com.google.android.horologist.annotations.ExperimentalHorologistApi
import com.google.android.horologist.tiles.SuspendingTileService
@ -64,22 +71,77 @@ private fun tile(
.build()
}
private fun tileLayout(
requestParams: RequestBuilders.TileRequest,
context: Context,
): LayoutElementBuilders.LayoutElement {
private fun tileLayout(requestParams: RequestBuilders.TileRequest, context: Context): LayoutElementBuilders.LayoutElement {
fun createNavButton(label: String, path: String,x : Float, y :Float): LayoutElementBuilders.LayoutElement {
// 리시버를 실행하기 위한 PendingIntent 생성
val intent = Intent(context, TileActionReceiver::class.java).apply {
putExtra("path", path)
}
val pendingIntent = PendingIntent.getBroadcast(
context, path.hashCode(), intent,
PendingIntent.FLAG_UPDATE_CURRENT or PendingIntent.FLAG_IMMUTABLE
)
return LayoutElementBuilders.Box.Builder()
.setModifiers(ModifiersBuilders.Modifiers.Builder()
.setClickable(ModifiersBuilders.Clickable.Builder()
.setOnClick(ActionBuilders.LaunchAction.Builder()
.setAndroidActivity(ActionBuilders.AndroidActivity.Builder()
.setPackageName(context.packageName)
.setClassName(TileActionReceiver::class.java.name) // 리시버 호출
.build())
.build())
.build())
.setTransformation(ModifiersBuilders.Transformation.Builder()
// 방향에 따라 x, y 좌표 조절 (상: 0,-45 / 하: 0,45 / 좌: -45,0 / 우: 45,0)
.setTranslationX(dp(x))
.setTranslationY(dp(y))
.build())
.setBackground(ModifiersBuilders.Background.Builder()
.setColor(argb(0xFF303030.toInt()))
.setCorner(ModifiersBuilders.Corner.Builder().setRadius(dp(15f)).build())
.build())
.build())
.setWidth(dp(56f)).setHeight(dp(56f))
.addContent(Text.Builder(context, label).build())
.build()
}
return PrimaryLayout.Builder(requestParams.deviceConfiguration)
.setResponsiveContentInsetEnabled(true)
.setContent(
Text.Builder(context, "Hello World!")
.setColor(argb(Colors.DEFAULT.onSurface))
.setTypography(Typography.TYPOGRAPHY_CAPTION1)
LayoutElementBuilders.Box.Builder()
// 상 (UP): X는 그대로, Y만 위로(-45dp)
.addContent(createNavButton("UP", "/gesture/up", 0f, -50f))
// 하 (DOWN): X는 그대로, Y만 아래로(+50dp)
.addContent(createNavButton("DOWN", "/gesture/down", 0f, 50f))
// 좌 (LEFT): X를 왼쪽으로(-50dp), Y는 그대로
.addContent(createNavButton("LEFT", "/gesture/left", -50f, 0f))
// 우 (RIGHT): X를 오른쪽으로(+50dp), Y는 그대로
.addContent(createNavButton("RIGHT", "/gesture/right", 50f, 0f))
.build()
).build()
}
@Preview(device = WearDevices.SMALL_ROUND)
@Preview(device = WearDevices.LARGE_ROUND)
fun tilePreview(context: Context) = TilePreviewData(::resources) {
tile(it, context)
}
class TileActionReceiver : BroadcastReceiver() {
override fun onReceive(context: Context, intent: Intent) {
val path = intent.getStringExtra("path") ?: return
// 폰(런처)으로 메시지 전송
Wearable.getNodeClient(context).connectedNodes.addOnSuccessListener { nodes ->
for (node in nodes) {
Wearable.getMessageClient(context).sendMessage(node.id, path, null)
}
}
}
}