Compare commits

...

2 Commits

  1. 2
      app/build.gradle
  2. 6
      app/src/main/java/net/pokeranalytics/android/ui/modules/handhistory/replayer/FrameManager.kt
  3. 335
      app/src/main/java/net/pokeranalytics/android/ui/modules/handhistory/replayer/ReplayExportService.kt
  4. 1
      app/src/main/java/net/pokeranalytics/android/ui/modules/handhistory/replayer/ReplayerAnimator.kt
  5. 2
      build.gradle

@ -143,7 +143,7 @@ dependencies {
implementation 'org.apache.commons:commons-math3:3.6.1' implementation 'org.apache.commons:commons-math3:3.6.1'
// ffmpeg for encoding video (HH export) // ffmpeg for encoding video (HH export)
implementation 'com.arthenica:ffmpeg-kit-min-gpl:4.4.LTS' // implementation 'com.arthenica:ffmpeg-kit-min-gpl:4.4.LTS'
// Camera // Camera
def camerax_version = "1.1.0" def camerax_version = "1.1.0"

@ -3,9 +3,9 @@ package net.pokeranalytics.android.ui.modules.handhistory.replayer
import net.pokeranalytics.android.exceptions.PAIllegalStateException import net.pokeranalytics.android.exceptions.PAIllegalStateException
enum class FrameType(val visualOccurences: Int) { enum class FrameType(val visualOccurences: Int) {
STATE(150), STATE(50),
GATHER_ANIMATION(2), GATHER_ANIMATION(1),
DISTRIBUTION_ANIMATION(2) DISTRIBUTION_ANIMATION(1)
} }
class FrameManager { class FrameManager {

@ -3,6 +3,7 @@ package net.pokeranalytics.android.ui.modules.handhistory.replayer
import android.app.PendingIntent import android.app.PendingIntent
import android.app.Service import android.app.Service
import android.content.ContentValues import android.content.ContentValues
import android.content.Context
import android.content.Intent import android.content.Intent
import android.net.Uri import android.net.Uri
import android.os.Binder import android.os.Binder
@ -11,13 +12,17 @@ import android.os.Environment
import android.os.IBinder import android.os.IBinder
import android.provider.MediaStore import android.provider.MediaStore
import androidx.core.content.FileProvider import androidx.core.content.FileProvider
import com.arthenica.ffmpegkit.FFmpegKit import android.graphics.Bitmap
import android.media.MediaCodec
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import java.nio.ByteBuffer
import io.realm.Realm import io.realm.Realm
import kotlinx.coroutines.* import kotlinx.coroutines.*
import net.pokeranalytics.android.R import net.pokeranalytics.android.R
import net.pokeranalytics.android.exceptions.PAIllegalStateException import net.pokeranalytics.android.exceptions.PAIllegalStateException
import net.pokeranalytics.android.model.realm.handhistory.HandHistory import net.pokeranalytics.android.model.realm.handhistory.HandHistory
import net.pokeranalytics.android.util.FFMPEG_DESCRIPTOR_FILE
import net.pokeranalytics.android.util.TriggerNotification import net.pokeranalytics.android.util.TriggerNotification
import net.pokeranalytics.android.util.extensions.dateTimeFileFormatted import net.pokeranalytics.android.util.extensions.dateTimeFileFormatted
import net.pokeranalytics.android.util.extensions.findById import net.pokeranalytics.android.util.extensions.findById
@ -52,11 +57,7 @@ class ReplayExportService : Service() {
fun videoExport(handHistoryId: String) { fun videoExport(handHistoryId: String) {
this@ReplayExportService.handHistoryId = handHistoryId this@ReplayExportService.handHistoryId = handHistoryId
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { startFFMPEGVideoExport()
startFFMPEGVideoExport()
} else {
startFFMPEGVideoExportPreQ()
}
} }
fun gifExport(handHistoryId: String) { fun gifExport(handHistoryId: String) {
@ -159,7 +160,6 @@ class ReplayExportService : Service() {
val animator = ReplayerAnimator(handHistory, true) val animator = ReplayerAnimator(handHistory, true)
val square = 1024 val square = 1024
val width = square val width = square
val height = square val height = square
@ -167,60 +167,35 @@ class ReplayExportService : Service() {
val drawer = TableDrawer() val drawer = TableDrawer()
drawer.configurePaints(context, animator) drawer.configurePaints(context, animator)
// generates all images and file descriptor
Timber.d("Generating images for video...")
val tmpDir = animator.generateVideoContent(this@ReplayExportService)
val dpath = "${tmpDir.path}/$FFMPEG_DESCRIPTOR_FILE"
val formattedDate = Date().dateTimeFileFormatted val formattedDate = Date().dateTimeFileFormatted
val fileName = "hand_${formattedDate}.mp4" val fileName = "hand_${formattedDate}.mp4"
val outputDirectory = context.getExternalFilesDir(Environment.DIRECTORY_MOVIES) ?: throw PAIllegalStateException("File is invalid") val outputDirectory = context.getExternalFilesDir(Environment.DIRECTORY_MOVIES) ?: throw PAIllegalStateException("File is invalid")
val output = "${outputDirectory.path}/$fileName" val outputFile = File(outputDirectory, fileName)
Timber.d("Assembling images for video...")
val command = "-f concat -safe 0 -i $dpath -vb 20M -vsync vfr -s ${width}x${height} -vf fps=20 -pix_fmt yuv420p $output"
FFmpegKit.executeAsync(command) {
when { Timber.d("Creating video with MediaMuxer...")
it.returnCode.isSuccess -> {
Timber.d("FFMPEG command execution completed successfully")
}
it.returnCode.isCancel -> {
Timber.d("Command execution cancelled by user.")
}
else -> {
Timber.d(String.format("Command execution failed with rc=%d and the output below.", it.returnCode.value))
}
}
File(dpath).delete() try {
tmpDir.delete() createVideoWithMediaMuxer(animator, context, outputFile, width, height)
val file = File(output)
val resolver = applicationContext.contentResolver val resolver = applicationContext.contentResolver
// Q version tested before calling the function
val videoCollection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY) val videoCollection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY)
Timber.d("getContentUri = $videoCollection...")
val fileDetails = ContentValues().apply { val fileDetails = ContentValues().apply {
Timber.d("set file details = $fileName") Timber.d("set file details = $fileName")
put(MediaStore.Video.Media.DISPLAY_NAME, fileName) put(MediaStore.Video.Media.DISPLAY_NAME, fileName)
put(MediaStore.Images.Media.MIME_TYPE, FileType.VIDEO_MP4.value) put(MediaStore.Video.Media.MIME_TYPE, FileType.VIDEO_MP4.value)
} }
// copy video to nice path
resolver.insert(videoCollection, fileDetails)?.let { uri -> resolver.insert(videoCollection, fileDetails)?.let { uri ->
Timber.d("copy file at uri = $uri") Timber.d("copy file at uri = $uri")
val os = resolver.openOutputStream(uri) val os = resolver.openOutputStream(uri)
os?.write(file.readBytes()) os?.write(outputFile.readBytes())
os?.close() os?.close()
file.delete() // delete temp file outputFile.delete() // delete temp file
notifyUser(uri, FileType.VIDEO_MP4) notifyUser(uri, FileType.VIDEO_MP4)
@ -231,59 +206,173 @@ class ReplayExportService : Service() {
Timber.w("Resolver insert ended without uri...") Timber.w("Resolver insert ended without uri...")
} }
} catch (e: Exception) {
Timber.e(e, "Error creating video with MediaMuxer")
if (outputFile.exists()) {
outputFile.delete()
}
} }
realm.close()
} }
async.await() async.await()
} }
}
private fun createVideoWithMediaMuxer(animator: ReplayerAnimator, context: Context, outputFile: File, width: Int, height: Int) {
val mimeType = MediaFormat.MIMETYPE_VIDEO_AVC
val frameRate = 20
val bitRate = 2000000 // 2Mbps
// Create MediaFormat with YUV420 flexible format
val format = MediaFormat.createVideoFormat(mimeType, width, height).apply {
setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible)
setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
setInteger(MediaFormat.KEY_FRAME_RATE, frameRate)
setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
}
// Create encoder
val encoder = MediaCodec.createEncoderByType(mimeType)
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
Timber.d("Starting encoder...")
encoder.start()
// Create MediaMuxer
val muxer = MediaMuxer(outputFile.path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
var trackIndex = -1
var muxerStarted = false
val bufferInfo = MediaCodec.BufferInfo()
var frameIndex = 0
val presentationTimeUs = 1000000L / frameRate // Time per frame in microseconds
try {
// Generate frames using animator
Timber.d("Generate frames...")
animator.frames(context) { bitmap, visualOccurrences ->
Timber.d(">>> Generated frame, visualOccurrences = $visualOccurrences")
val yuvData = convertBitmapToYUV420(bitmap, width, height)
repeat(visualOccurrences) {
// Convert bitmap to YUV420 and feed to encoder
val inputBufferIndex = encoder.dequeueInputBuffer(10000)
if (inputBufferIndex >= 0) {
val inputBuffer = encoder.getInputBuffer(inputBufferIndex)
if (inputBuffer != null) {
inputBuffer.clear()
inputBuffer.put(yuvData)
encoder.queueInputBuffer(inputBufferIndex, 0, yuvData.size, frameIndex * presentationTimeUs, 0)
}
}
// Process output buffers
// Timber.d("drainEncoder...")
drainEncoder(encoder, muxer, bufferInfo, trackIndex) { newTrackIndex ->
trackIndex = newTrackIndex
muxerStarted = true
}
frameIndex++
}
}
Timber.d("end of frames generation...")
// Signal end of input
val inputBufferIndex = encoder.dequeueInputBuffer(10000)
if (inputBufferIndex >= 0) {
encoder.queueInputBuffer(inputBufferIndex, 0, 0, frameIndex * presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM)
}
Timber.d("drainEncoder again...")
// Drain remaining output
drainEncoder(encoder, muxer, bufferInfo, trackIndex, true) { newTrackIndex ->
if (!muxerStarted) {
trackIndex = newTrackIndex
muxerStarted = true
}
}
} finally {
Timber.d("stop and release...")
encoder.stop()
encoder.release()
if (muxerStarted) {
muxer.stop()
}
muxer.release()
}
} }
// private fun startVideoExport() { private fun drainEncoder(encoder: MediaCodec, muxer: MediaMuxer, bufferInfo: MediaCodec.BufferInfo,
// trackIndex: Int, endOfStream: Boolean = false, onTrackAdded: (Int) -> Unit) {
// GlobalScope.launch(coroutineContext) { var localTrackIndex = trackIndex
// val c = GlobalScope.async {
// while (true) {
// val realm = Realm.getDefaultInstance() val outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, if (endOfStream) 10000 else 0)
// val handHistory = realm.findById<HandHistory>(handHistoryId) ?: throw PAIllegalStateException("HandHistory not found, id: $handHistoryId") when {
// outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER -> {
// val context = this@ReplayExportService if (!endOfStream) break else continue
// }
// val animator = ReplayerAnimator(handHistory, true) outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
// if (localTrackIndex >= 0) {
// val square = 1024 throw RuntimeException("Format changed twice")
// }
// val width = square localTrackIndex = muxer.addTrack(encoder.outputFormat)
// val height = square muxer.start()
// onTrackAdded(localTrackIndex)
// animator.setDimension(width.toFloat(), height.toFloat()) }
// TableDrawer.configurePaints(context, animator) outputBufferIndex >= 0 -> {
// val outputBuffer = encoder.getOutputBuffer(outputBufferIndex)
// val muxer = MMediaMuxer() if (outputBuffer != null && bufferInfo.size > 0 && localTrackIndex >= 0) {
// muxer.init(null, width, height, "hhVideo", "YES!") muxer.writeSampleData(localTrackIndex, outputBuffer, bufferInfo)
// }
// animator.frames(context) { bitmap, count -> encoder.releaseOutputBuffer(outputBufferIndex, false)
//
// try { if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
// val byteArray = bitmap.toByteArray() break
// muxer.addFrame(byteArray, count, false) }
// } catch (e: Exception) { }
// Timber.e("error = ${e.message}") }
// } }
// }
// }
// private fun convertBitmapToYUV420(bitmap: Bitmap, width: Int, height: Int): ByteArray {
// realm.close() val pixels = IntArray(width * height)
// bitmap.getPixels(pixels, 0, width, 0, 0, width, height)
// muxer.createVideo { path ->
// notifyUser(path) val yuvSize = width * height * 3 / 2
// } val yuv = ByteArray(yuvSize)
//
// } var yIndex = 0
// c.await() var uvIndex = width * height
// }
// for (y in 0 until height) {
// } for (x in 0 until width) {
val pixel = pixels[y * width + x]
val r = (pixel shr 16) and 0xff
val g = (pixel shr 8) and 0xff
val b = pixel and 0xff
// Convert RGB to YUV
val yValue = ((66 * r + 129 * g + 25 * b + 128) shr 8) + 16
yuv[yIndex++] = yValue.coerceIn(0, 255).toByte()
if (y % 2 == 0 && x % 2 == 0) {
val uValue = ((-38 * r - 74 * g + 112 * b + 128) shr 8) + 128
val vValue = ((112 * r - 94 * g - 18 * b + 128) shr 8) + 128
yuv[uvIndex++] = uValue.coerceIn(0, 255).toByte()
yuv[uvIndex++] = vValue.coerceIn(0, 255).toByte()
}
}
}
return yuv
}
private fun startGIFExportPreQ() { private fun startGIFExportPreQ() {
@ -347,80 +436,6 @@ class ReplayExportService : Service() {
} }
private fun startFFMPEGVideoExportPreQ() {
GlobalScope.launch(coroutineContext) {
val async = GlobalScope.async {
val realm = Realm.getDefaultInstance()
val handHistory = realm.findById<HandHistory>(handHistoryId) ?: throw PAIllegalStateException("HandHistory not found, id: $handHistoryId")
val context = this@ReplayExportService
val animator = ReplayerAnimator(handHistory, true)
val square = 1024
val width = square
val height = square
animator.configure(width.toFloat(), height.toFloat(), this@ReplayExportService)
val drawer = TableDrawer()
drawer.configurePaints(context, animator)
// generates all images and file descriptor
Timber.d("Generating images for video...")
val tmpDir = animator.generateVideoContent(this@ReplayExportService)
val dpath = "${tmpDir.path}/$FFMPEG_DESCRIPTOR_FILE"
val formattedDate = Date().dateTimeFileFormatted
val output = File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES),
"hand_${formattedDate}.mp4"
).path
Environment.getExternalStorageState(tmpDir)
Timber.d("Assembling images for video...")
val command = "-f concat -safe 0 -i $dpath -vb 20M -vsync vfr -s ${width}x${height} -vf fps=20 -pix_fmt yuv420p $output"
FFmpegKit.executeAsync(command) {
when {
it.returnCode.isSuccess -> {
Timber.d("FFMPEG command execution completed successfully")
}
it.returnCode.isCancel -> {
Timber.d("Command execution cancelled by user.")
}
else -> {
Timber.d(String.format("Command execution failed with rc=%d and the output below.", it.returnCode.value))
}
}
// FFmpeg.executeAsync("-f concat -safe 0 -i $dpath -vb 20M -vsync vfr -s ${width}x${height} -vf fps=20 -pix_fmt yuv420p $output") { id, rc ->
//
// if (rc == RETURN_CODE_SUCCESS) {
// Timber.d("FFMPEG command execution completed successfully")
// } else if (rc == RETURN_CODE_CANCEL) {
// Timber.d("Command execution cancelled by user.")
// } else {
// Timber.d(String.format("Command execution failed with rc=%d and the output below.", rc))
// }
// Delete descriptor and image files
// tmpDir.delete()
// File(dpath).delete()
notifyUser(output)
}
}
async.await()
}
}
private fun notifyUser(uri: Uri, type: FileType) { private fun notifyUser(uri: Uri, type: FileType) {
val title = getString(R.string.video_available) val title = getString(R.string.video_available)

@ -592,6 +592,7 @@ class ReplayerAnimator(var handHistory: HandHistory, var export: Boolean) {
this.drawer.drawTable(canvas, context) this.drawer.drawTable(canvas, context)
frameHandler(bitmap, vo) frameHandler(bitmap, vo)
bitmap.recycle()
} }

@ -9,7 +9,7 @@ buildscript {
dependencies { dependencies {
classpath 'com.android.tools.build:gradle:8.2.2' classpath 'com.android.tools.build:gradle:8.2.2'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
classpath 'io.realm:realm-gradle-plugin:10.15.1' classpath 'io.realm:realm-gradle-plugin:10.19.0'
// crashlytics // crashlytics
classpath 'com.google.gms:google-services:4.4.2' classpath 'com.google.gms:google-services:4.4.2'

Loading…
Cancel
Save