android: send recorded audio as voice and fix playback replay/duration
Some checks failed
Android CI / android (push) Failing after 4m5s
Android Release / release (push) Failing after 4m1s
CI / test (push) Failing after 2m24s

This commit is contained in:
Codex
2026-03-09 16:52:17 +03:00
parent fd31e39fce
commit c12ab05946
3 changed files with 43 additions and 5 deletions

View File

@@ -455,3 +455,8 @@
- Added tablet-aware max-width layout constraints across major screens (login, verify/reset auth, chats list, chat, profile, settings). - Added tablet-aware max-width layout constraints across major screens (login, verify/reset auth, chats list, chat, profile, settings).
- Kept phone layout unchanged while centering content and limiting line width on larger displays. - Kept phone layout unchanged while centering content and limiting line width on larger displays.
- Fixed voice hold-to-send gesture reliability by removing pointer-input restarts during active recording, so release consistently triggers send path. - Fixed voice hold-to-send gesture reliability by removing pointer-input restarts during active recording, so release consistently triggers send path.
### Step 73 - Voice message send/playback bugfixes
- Fixed voice media type mapping in message repository: recorded files with `voice_*.m4a` are now sent as message type `voice` (not generic `audio`).
- Fixed audio replay behavior: when playback reaches the end, next play restarts from `0:00`.
- Improved duration display in audio/voice player by adding metadata fallback when `MediaPlayer` duration is not immediately available.

View File

@@ -323,7 +323,7 @@ class NetworkMessageRepository @Inject constructor(
caption: String?, caption: String?,
replyToMessageId: Long?, replyToMessageId: Long?,
): AppResult<Unit> = withContext(ioDispatcher) { ): AppResult<Unit> = withContext(ioDispatcher) {
val messageType = mapMimeToMessageType(mimeType) val messageType = mapMimeToMessageType(mimeType = mimeType, fileName = fileName)
val tempId = -System.currentTimeMillis() val tempId = -System.currentTimeMillis()
val tempMessage = MessageEntity( val tempMessage = MessageEntity(
id = tempId, id = tempId,
@@ -495,10 +495,14 @@ class NetworkMessageRepository @Inject constructor(
} }
} }
private fun mapMimeToMessageType(mimeType: String): String { private fun mapMimeToMessageType(
mimeType: String,
fileName: String,
): String {
return when { return when {
mimeType.startsWith("image/") -> "image" mimeType.startsWith("image/") -> "image"
mimeType.startsWith("video/") -> "video" mimeType.startsWith("video/") -> "video"
mimeType.startsWith("audio/") && fileName.startsWith("voice_", ignoreCase = true) -> "voice"
mimeType.startsWith("audio/") -> "audio" mimeType.startsWith("audio/") -> "audio"
else -> "file" else -> "file"
} }

View File

@@ -4,6 +4,7 @@ import android.Manifest
import android.content.Context import android.content.Context
import android.content.pm.PackageManager import android.content.pm.PackageManager
import android.media.AudioAttributes import android.media.AudioAttributes
import android.media.MediaMetadataRetriever
import android.media.MediaPlayer import android.media.MediaPlayer
import android.net.Uri import android.net.Uri
import android.provider.OpenableColumns import android.provider.OpenableColumns
@@ -1220,9 +1221,10 @@ private fun AudioAttachmentPlayer(
isPrepared = true isPrepared = true
durationMs = player.duration.coerceAtLeast(0) durationMs = player.duration.coerceAtLeast(0)
} }
setOnCompletionListener { setOnCompletionListener { player ->
isPlaying = false isPlaying = false
positionMs = durationMs runCatching { player.seekTo(0) }
positionMs = 0
AppAudioFocusCoordinator.release("player:$url") AppAudioFocusCoordinator.release("player:$url")
} }
setDataSource(url) setDataSource(url)
@@ -1245,6 +1247,13 @@ private fun AudioAttachmentPlayer(
delay(250) delay(250)
} }
} }
LaunchedEffect(url, isPrepared) {
if (durationMs > 0) return@LaunchedEffect
val fallbackDuration = resolveRemoteAudioDurationMs(url)
if (fallbackDuration != null && fallbackDuration > 0) {
durationMs = fallbackDuration
}
}
DisposableEffect(mediaPlayer) { DisposableEffect(mediaPlayer) {
onDispose { onDispose {
runCatching { runCatching {
@@ -1273,6 +1282,10 @@ private fun AudioAttachmentPlayer(
isPlaying = false isPlaying = false
AppAudioFocusCoordinator.release("player:$url") AppAudioFocusCoordinator.release("player:$url")
} else { } else {
if (durationMs > 0 && positionMs >= durationMs - 200) {
runCatching { mediaPlayer.seekTo(0) }
positionMs = 0
}
runCatching { runCatching {
mediaPlayer.playbackParams = mediaPlayer.playbackParams.setSpeed(speedOptions[speedIndex]) mediaPlayer.playbackParams = mediaPlayer.playbackParams.setSpeed(speedOptions[speedIndex])
} }
@@ -1322,7 +1335,10 @@ private fun AudioAttachmentPlayer(
horizontalArrangement = Arrangement.SpaceBetween, horizontalArrangement = Arrangement.SpaceBetween,
) { ) {
Text(text = formatDuration(positionMs), style = MaterialTheme.typography.labelSmall) Text(text = formatDuration(positionMs), style = MaterialTheme.typography.labelSmall)
Text(text = formatDuration(durationMs), style = MaterialTheme.typography.labelSmall) Text(
text = if (durationMs > 0) formatDuration(durationMs) else "--:--",
style = MaterialTheme.typography.labelSmall,
)
} }
} }
} }
@@ -1419,6 +1435,19 @@ private fun formatDuration(ms: Int): String {
return "$min:${sec.toString().padStart(2, '0')}" return "$min:${sec.toString().padStart(2, '0')}"
} }
private fun resolveRemoteAudioDurationMs(url: String): Int? {
return runCatching {
val retriever = MediaMetadataRetriever()
try {
retriever.setDataSource(url, emptyMap())
val duration = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)
duration?.toIntOrNull()
} finally {
retriever.release()
}
}.getOrNull()
}
private fun extractFileName(url: String): String { private fun extractFileName(url: String): String {
return runCatching { return runCatching {
val path = java.net.URI(url).path.orEmpty() val path = java.net.URI(url).path.orEmpty()