+1
-1
posedetection/build.gradle.kts
+1
-1
posedetection/build.gradle.kts
···
6
6
7
7
mavenPublishing {
8
8
publishToMavenCentral(SonatypeHost.CENTRAL_PORTAL)
9
-
coordinates("com.performancecoachlab.posedetection", "posedetection-compose", "4.3.0")
9
+
coordinates("com.performancecoachlab.posedetection", "posedetection-compose", "4.4.0")
10
10
11
11
pom {
12
12
name.set("Pose Detection")
+20
-65
posedetection/src/androidMain/kotlin/com.performancecoachlab/posedetection/camera/CameraView.android.kt
+20
-65
posedetection/src/androidMain/kotlin/com.performancecoachlab/posedetection/camera/CameraView.android.kt
···
55
55
import kotlinx.coroutines.launch
56
56
import org.tensorflow.lite.support.image.TensorImage
57
57
import org.tensorflow.lite.task.vision.detector.ObjectDetector
58
-
import android.graphics.Paint
59
-
import android.graphics.RectF
60
-
import android.graphics.Color as AndroidColor
61
58
62
59
// Data class to hold recording state for each recording ID
63
60
data class RecordingSlot(
···
211
208
val objectClient = if (currentDetectMode.doObject()) objectDetector?.getDetector() else null
212
209
imageProxy.process(
213
210
objectClient, poseClient, timestamp, area
214
-
) { analysisResult, _bitmap ->
215
-
val androidBmp = _bitmap.copy(android.graphics.Bitmap.Config.ARGB_8888, true)
216
-
val canvas = android.graphics.Canvas(androidBmp)
217
-
218
-
val drawableObjects: List<DrawableObject> = drawObjects?.invoke(analysisResult.objects) ?: emptyList()
219
-
220
-
val paint = Paint().apply {
221
-
style = Paint.Style.STROKE
222
-
strokeWidth = 4f // you can adjust thickness here
223
-
color = AndroidColor.RED
224
-
isAntiAlias = true
225
-
}
226
-
227
-
drawableObjects.forEach { dobj ->
228
-
val box = dobj.obj.boundingBox
229
-
val label = dobj.obj.labels.first().text
230
-
val confidence = dobj.obj.labels.first().confidence
231
-
232
-
val left = box.left
233
-
val top = box.top
234
-
val right = box.left + box.width
235
-
val bottom = box.top + box.height
236
-
val rect = RectF(left, top, right, bottom)
237
-
238
-
val textPaint = Paint().apply {
239
-
color = AndroidColor.RED
240
-
textSize = 36f
241
-
isAntiAlias = true
242
-
style = Paint.Style.FILL
243
-
setShadowLayer(4f, 2f, 2f, AndroidColor.BLACK)
244
-
}
245
-
246
-
val labelText = "$label ${(confidence * 100).toInt()}%"
247
-
val textX = left + 8f
248
-
val textY = top - 10f
249
-
250
-
val textBounds = android.graphics.Rect()
251
-
textPaint.getTextBounds(labelText, 0, labelText.length, textBounds)
252
-
253
-
canvas.drawRect(rect, paint)
254
-
canvas.drawText(labelText, textX, textY, textPaint)
211
+
){ analysisResult, _bitmap ->
212
+
customObjectRepository.updateCustomObject(analysisResult.objects)
213
+
analysisResult.skeleton?.let { skel ->
214
+
skeletonRepository.updateSkeleton(skel)
255
215
}
256
-
257
-
258
-
val annotatedImageBitmap: ImageBitmap = androidBmp.asImageBitmap()
259
-
260
-
controller?.setRequestDataProvider {
261
-
CameraViewData(
262
-
width = annotatedImageBitmap.width.toFloat(),
263
-
height = annotatedImageBitmap.height.toFloat(),
264
-
rotation = when (imageProxy.imageInfo.rotationDegrees) {
265
-
0 -> SensorRotation.ROTATION_0
266
-
90 -> SensorRotation.ROTATION_90
267
-
180 -> SensorRotation.ROTATION_180
268
-
270 -> SensorRotation.ROTATION_270
269
-
else -> SensorRotation.ROTATION_0
216
+
bitmap = _bitmap.asImageBitmap().let { inbmp ->
217
+
controller?.setRequestDataProvider {
218
+
CameraViewData(
219
+
width = inbmp.width.toFloat(),
220
+
height = inbmp.height.toFloat(),
221
+
rotation = when (imageProxy.imageInfo.rotationDegrees) {
222
+
0 -> SensorRotation.ROTATION_0
223
+
90 -> SensorRotation.ROTATION_90
224
+
180 -> SensorRotation.ROTATION_180
225
+
270 -> SensorRotation.ROTATION_270
226
+
else -> SensorRotation.ROTATION_0
227
+
}
228
+
)
229
+
}
230
+
addFrameToActiveRecordings(inbmp, timestamp)
231
+
inbmp.drawResults(if(drawSkeleton) analysisResult.skeleton else null, drawObjects?.invoke(analysisResult.objects)?: emptyList())
270
232
}
271
-
)
272
-
}
273
-
274
-
addFrameToActiveRecordings(annotatedImageBitmap, timestamp)
275
-
276
-
bitmap = annotatedImageBitmap
277
-
278
233
imageProxy.close()
279
234
}
280
235
}
+94
-59
posedetection/src/iosMain/kotlin/com/performancecoachlab/posedetection/camera/CameraEngine.kt
+94
-59
posedetection/src/iosMain/kotlin/com/performancecoachlab/posedetection/camera/CameraEngine.kt
···
10
10
import androidx.compose.ui.graphics.toComposeImageBitmap
11
11
import androidx.compose.ui.unit.Density
12
12
import androidx.compose.ui.unit.LayoutDirection
13
+
import co.touchlab.kermit.Logger
13
14
import com.performancecoachlab.posedetection.custom.CustomObjectRespository
14
15
import com.performancecoachlab.posedetection.custom.ObjectModel
15
16
import com.performancecoachlab.posedetection.recording.AnalysisObject
···
88
89
import platform.darwin.dispatch_get_global_queue
89
90
import platform.darwin.dispatch_get_main_queue
90
91
import platform.darwin.dispatch_queue_create
92
+
import platform.darwin.dispatch_sync
93
+
import platform.darwin.dispatch_time
91
94
import platform.posix.memcpy
92
95
import kotlin.math.abs
93
96
import kotlin.native.runtime.NativeRuntimeApi
···
159
162
160
163
@OptIn(ExperimentalForeignApi::class)
161
164
private fun setupCamera() {
165
+
// Run session setup synchronously so callers can safely start the session afterwards
162
166
cameraController.setupSession()
163
167
cameraController.setupPreviewLayer(view)
164
168
startSession()
···
214
218
cameraController.switchCamera()
215
219
}
216
220
217
-
private fun startSession() {
221
+
fun startSession() {
218
222
MemoryManager.clearBufferPools()
219
223
cameraController.startSession()
220
224
}
···
288
292
var drawSkeleton: Boolean = true
289
293
var drawObjects: ((List<AnalysisObject>) -> List<DrawableObject>)? = null
290
294
295
+
// Serial queue to serialize session configuration and start/stop calls
296
+
private val sessionQueue = dispatch_queue_create("com.performancecoachlab.captureSessionQueue", null)
297
+
291
298
sealed class CameraException : Exception() {
292
299
class DeviceNotAvailable : CameraException()
293
300
class ConfigurationError(message: String) : CameraException()
···
295
302
}
296
303
297
304
fun startRecording(): String? {
298
-
if (movieFileOutput == null) {
299
-
movieFileOutput = AVCaptureMovieFileOutput()
300
-
captureSession?.addOutput(movieFileOutput!!)
305
+
// Ensure we add outputs on the session queue to avoid races
306
+
dispatch_sync(sessionQueue) {
307
+
if (movieFileOutput == null) {
308
+
val candidate = AVCaptureMovieFileOutput()
309
+
movieFileOutput = candidate
310
+
if (captureSession?.canAddOutput(candidate) == true) {
311
+
captureSession?.addOutput(movieFileOutput!!)
312
+
}
313
+
}
301
314
}
315
+
302
316
val outputURL = generateSegmentURL()
303
317
movieFileOutput?.connections?.firstOrNull()?.let { connection ->
304
318
(connection as AVCaptureConnection).let { avConnection ->
···
344
358
}
345
359
346
360
fun setupSession() {
347
-
try {
348
-
captureSession = AVCaptureSession()
349
-
captureSession?.beginConfiguration()
350
-
captureSession?.sessionPreset = AVCaptureSessionPresetMedium
361
+
// Run synchronously on sessionQueue so callers (like setupCamera) can safely call startSession afterwards
362
+
dispatch_sync(sessionQueue) {
363
+
var configurationBegan = false
364
+
try {
365
+
captureSession = AVCaptureSession()
366
+
captureSession?.beginConfiguration()
367
+
configurationBegan = true
368
+
captureSession?.sessionPreset = AVCaptureSessionPresetMedium
369
+
370
+
if (!setupInputs()) {
371
+
throw CameraException.DeviceNotAvailable()
372
+
}
373
+
setupVideoOutput()
351
374
352
-
if (!setupInputs()) {
353
-
throw CameraException.DeviceNotAvailable()
354
-
}
355
-
setupVideoOutput()
356
-
// Add movie file output for recording
357
-
if (movieFileOutput == null) {
358
-
movieFileOutput = AVCaptureMovieFileOutput()
359
-
captureSession?.addOutput(movieFileOutput!!)
375
+
// Add movie file output for recording only if it can be added
376
+
if (movieFileOutput == null) {
377
+
val candidate = AVCaptureMovieFileOutput()
378
+
if (captureSession?.canAddOutput(candidate) == true) {
379
+
movieFileOutput = candidate
380
+
captureSession?.addOutput(movieFileOutput!!)
381
+
} else {
382
+
// keep the instance (not added) so it can be added later when appropriate
383
+
movieFileOutput = candidate
384
+
}
385
+
}
386
+
} catch (e: CameraException) {
387
+
cleanupSession()
388
+
onError?.invoke(e)
389
+
return@dispatch_sync
390
+
} finally {
391
+
if (configurationBegan) {
392
+
captureSession?.commitConfiguration()
393
+
}
360
394
}
361
-
captureSession?.commitConfiguration()
362
-
startRecording()
363
-
} catch (e: CameraException) {
364
-
cleanupSession()
365
-
onError?.invoke(e)
366
395
}
396
+
// Do not start recording here; startRecording() should be called explicitly after session is running
367
397
}
368
398
369
399
private fun setupVideoOutput() {
···
415
445
}
416
446
417
447
fun startSession() {
418
-
if (captureSession?.isRunning() == false) {
419
-
dispatch_async(
420
-
dispatch_get_global_queue(
421
-
DISPATCH_QUEUE_PRIORITY_HIGH.toLong(), 0u
422
-
)
423
-
) {
448
+
// Run on sessionQueue to avoid overlapping with configuration
449
+
dispatch_async(sessionQueue) {
450
+
if (captureSession?.isRunning() == false) {
424
451
captureSession?.startRunning()
425
452
}
426
453
}
427
454
}
428
455
429
456
fun stopSession() {
430
-
if (captureSession?.isRunning() == true) {
431
-
captureSession?.stopRunning()
457
+
dispatch_async(sessionQueue) {
458
+
if (captureSession?.isRunning() == true) {
459
+
captureSession?.stopRunning()
460
+
}
432
461
}
433
462
}
434
463
···
479
508
fun switchCamera() {
480
509
guard(captureSession != null) { return@guard }
481
510
482
-
captureSession?.beginConfiguration()
511
+
// Run camera switch on sessionQueue to serialize with setupSession/startSession
512
+
dispatch_sync(sessionQueue) {
513
+
var configurationBegan = false
514
+
try {
515
+
captureSession?.beginConfiguration()
516
+
configurationBegan = true
483
517
484
-
try {
485
-
captureSession?.inputs?.firstOrNull()?.let { input ->
486
-
captureSession?.removeInput(input as AVCaptureInput)
487
-
}
518
+
captureSession?.inputs?.firstOrNull()?.let { input ->
519
+
captureSession?.removeInput(input as AVCaptureInput)
520
+
}
488
521
489
-
isUsingFrontCamera = !isUsingFrontCamera
490
-
currentCamera = if (isUsingFrontCamera) frontCamera else backCamera
522
+
isUsingFrontCamera = !isUsingFrontCamera
523
+
currentCamera = if (isUsingFrontCamera) frontCamera else backCamera
491
524
492
-
val newCamera = currentCamera ?: throw CameraException.DeviceNotAvailable()
525
+
val newCamera = currentCamera ?: throw CameraException.DeviceNotAvailable()
493
526
494
-
val newInput = AVCaptureDeviceInput.deviceInputWithDevice(
495
-
newCamera, null
496
-
) ?: throw CameraException.ConfigurationError("Failed to create input")
527
+
val newInput = AVCaptureDeviceInput.deviceInputWithDevice(
528
+
newCamera, null
529
+
) ?: throw CameraException.ConfigurationError("Failed to create input")
497
530
498
-
if (captureSession?.canAddInput(newInput) == true) {
499
-
captureSession?.addInput(newInput)
500
-
} else {
501
-
throw CameraException.ConfigurationError("Cannot add input")
502
-
}
531
+
if (captureSession?.canAddInput(newInput) == true) {
532
+
captureSession?.addInput(newInput)
533
+
} else {
534
+
throw CameraException.ConfigurationError("Cannot add input")
535
+
}
503
536
504
-
cameraPreviewLayer?.connection?.let { connection ->
505
-
if (connection.isVideoMirroringSupported()) {
506
-
connection.automaticallyAdjustsVideoMirroring = false
507
-
connection.setVideoMirrored(isUsingFrontCamera)
537
+
cameraPreviewLayer?.connection?.let { connection ->
538
+
if (connection.isVideoMirroringSupported()) {
539
+
connection.automaticallyAdjustsVideoMirroring = false
540
+
connection.setVideoMirrored(isUsingFrontCamera)
541
+
}
542
+
}
543
+
544
+
} catch (e: CameraException) {
545
+
onError?.invoke(e)
546
+
} catch (e: Exception) {
547
+
onError?.invoke(CameraException.ConfigurationError(e.message ?: "Unknown error"))
548
+
} finally {
549
+
if (configurationBegan) {
550
+
captureSession?.commitConfiguration()
508
551
}
509
552
}
510
-
511
-
captureSession?.commitConfiguration()
512
-
} catch (e: CameraException) {
513
-
captureSession?.commitConfiguration()
514
-
onError?.invoke(e)
515
-
} catch (e: Exception) {
516
-
captureSession?.commitConfiguration()
517
-
onError?.invoke(CameraException.ConfigurationError(e.message ?: "Unknown error"))
518
553
}
519
554
}
520
555
···
572
607
).let { FrameSize(it.width.toInt(), it.height.toInt()) }
573
608
})
574
609
}
575
-
previewObjects?.also { objects ->
610
+
previewObjects.also { objects ->
576
611
customObjectRepository?.updateCustomObject(objects)
577
612
}
578
613
preview.bounds.useContents {
···
606
641
}
607
642
}
608
643
} catch (e: Exception) {
609
-
//println(e.message ?: "Unknown error in frame processing")
644
+
// ignore frame processing errors
610
645
}
611
646
}
612
647
}
+5
posedetection/src/iosMain/kotlin/com/performancecoachlab/posedetection/camera/CameraPreview.kt
+5
posedetection/src/iosMain/kotlin/com/performancecoachlab/posedetection/camera/CameraPreview.kt
···
21
21
CameraEngine()
22
22
}
23
23
LifecycleEventEffect(Lifecycle.Event.ON_STOP) {
24
+
// Stops camera when going to background
24
25
cameraEngine.stopSession()
26
+
}
27
+
LifecycleEventEffect(Lifecycle.Event.ON_RESUME) {
28
+
// Resumes the camera when coming back from background
29
+
cameraEngine.startSession()
25
30
}
26
31
27
32
LaunchedEffect(cameraEngine) {
sample/composeApp/src/androidMain/assets/detect_float32_metadata.tflite
sample/composeApp/src/androidMain/assets/detect_float32_metadata.tflite
This is a binary file and will not be displayed.
+6
-15
sample/composeApp/src/commonMain/kotlin/com/nate/posedetection/App.kt
+6
-15
sample/composeApp/src/commonMain/kotlin/com/nate/posedetection/App.kt
···
152
152
var frame by remember { mutableStateOf(timeRange.first) }
153
153
val generalModel = initialiseObjectModel(
154
154
ModelPath(
155
-
"detect_float32_metadata.tflite",
156
-
"best-hoop5"
155
+
"lite-model_efficientdet_lite2_detection_metadata_1.tflite",
156
+
"YOLOv3FP16"
157
157
)
158
158
)
159
159
val frameAnalyser by remember { mutableStateOf(FrameAnalyser(generalModel)) }
···
313
313
val generalModel = initialiseObjectModel(
314
314
ModelPath(
315
315
"lite-model_efficientdet_lite2_detection_metadata_1.tflite",
316
-
"best-hoop5"
316
+
"YOLOv3FP16"
317
317
)
318
318
)
319
319
val controller = remember { CameraViewControllerImpl() }
···
354
354
CameraView(
355
355
skeletonRepository = skeletonRepository,
356
356
customObjectRepository = customObjectRespository,
357
-
detectMode = DetectMode.BOTH,
357
+
detectMode = DetectMode.OBJECT,
358
358
drawSkeleton = true,
359
359
drawObjects = { obj ->
360
360
obj.map {
361
-
//println("${it.labels.first().text}")
362
-
// Map class IDs (or labels) to colors
363
-
val color = when (it.labels.first().text) {
364
-
"2P" -> Color.Red // 2points
365
-
"BasketBall" -> Color.Green // basketball
366
-
"Hoop" -> Color.Blue // hoop
367
-
"Player" -> Color.Magenta // player
368
-
else -> Color.Yellow // Fallback for unexpected classes
369
-
}
370
361
DrawableObject(
371
362
obj = it,
372
-
shape = DrawableShape.OVAL,
373
-
colour = color,
363
+
shape = DrawableShape.RECTANGLE,
364
+
colour = Color.Yellow,
374
365
style = Stroke(it.boundingBox.width * 0.1f)
375
366
)
376
367
}
sample/iosApp/iosApp/models/best-hoop5.mlpackage/Data/com.apple.CoreML/model.mlmodel
sample/iosApp/iosApp/models/best-hoop5.mlpackage/Data/com.apple.CoreML/model.mlmodel
This is a binary file and will not be displayed.
sample/iosApp/iosApp/models/best-hoop5.mlpackage/Data/com.apple.CoreML/weights/weight.bin
sample/iosApp/iosApp/models/best-hoop5.mlpackage/Data/com.apple.CoreML/weights/weight.bin
This is a binary file and will not be displayed.
-18
sample/iosApp/iosApp/models/best-hoop5.mlpackage/Manifest.json
-18
sample/iosApp/iosApp/models/best-hoop5.mlpackage/Manifest.json
···
1
-
{
2
-
"fileFormatVersion": "1.0.0",
3
-
"itemInfoEntries": {
4
-
"56fc92b8-ce49-4049-83cf-3318e3c0d9c1": {
5
-
"author": "com.apple.CoreML",
6
-
"description": "CoreML Model Specification",
7
-
"name": "model.mlmodel",
8
-
"path": "com.apple.CoreML/model.mlmodel"
9
-
},
10
-
"a427aefa-3adb-4e06-9cc1-0a14b28ff7f2": {
11
-
"author": "com.apple.CoreML",
12
-
"description": "CoreML Model Weights",
13
-
"name": "weights",
14
-
"path": "com.apple.CoreML/weights"
15
-
}
16
-
},
17
-
"rootModelIdentifier": "56fc92b8-ce49-4049-83cf-3318e3c0d9c1"
18
-
}
sample/iosApp/iosApp/models/best.mlpackage/Data/com.apple.CoreML/model.mlmodel
sample/iosApp/iosApp/models/best.mlpackage/Data/com.apple.CoreML/model.mlmodel
This is a binary file and will not be displayed.
sample/iosApp/iosApp/models/best.mlpackage/Data/com.apple.CoreML/weights/weight.bin
sample/iosApp/iosApp/models/best.mlpackage/Data/com.apple.CoreML/weights/weight.bin
This is a binary file and will not be displayed.
-18
sample/iosApp/iosApp/models/best.mlpackage/Manifest.json
-18
sample/iosApp/iosApp/models/best.mlpackage/Manifest.json
···
1
-
{
2
-
"fileFormatVersion": "1.0.0",
3
-
"itemInfoEntries": {
4
-
"152ed734-ffee-4984-8ebc-44cea4eddde9": {
5
-
"author": "com.apple.CoreML",
6
-
"description": "CoreML Model Specification",
7
-
"name": "model.mlmodel",
8
-
"path": "com.apple.CoreML/model.mlmodel"
9
-
},
10
-
"d0741116-5705-4e9d-bcee-a9125b8e9f82": {
11
-
"author": "com.apple.CoreML",
12
-
"description": "CoreML Model Weights",
13
-
"name": "weights",
14
-
"path": "com.apple.CoreML/weights"
15
-
}
16
-
},
17
-
"rootModelIdentifier": "152ed734-ffee-4984-8ebc-44cea4eddde9"
18
-
}
sample/iosApp/iosApp/models/yolo11n.mlpackage/Data/com.apple.CoreML/model.mlmodel
sample/iosApp/iosApp/models/yolo11n.mlpackage/Data/com.apple.CoreML/model.mlmodel
This is a binary file and will not be displayed.
sample/iosApp/iosApp/models/yolo11n.mlpackage/Data/com.apple.CoreML/weights/weight.bin
sample/iosApp/iosApp/models/yolo11n.mlpackage/Data/com.apple.CoreML/weights/weight.bin
This is a binary file and will not be displayed.
-18
sample/iosApp/iosApp/models/yolo11n.mlpackage/Manifest.json
-18
sample/iosApp/iosApp/models/yolo11n.mlpackage/Manifest.json
···
1
-
{
2
-
"fileFormatVersion": "1.0.0",
3
-
"itemInfoEntries": {
4
-
"85c35693-a7e7-460c-83e9-b9274fd1ac9e": {
5
-
"author": "com.apple.CoreML",
6
-
"description": "CoreML Model Specification",
7
-
"name": "model.mlmodel",
8
-
"path": "com.apple.CoreML/model.mlmodel"
9
-
},
10
-
"e4ee33e8-3d98-406f-9a65-98e7354dd026": {
11
-
"author": "com.apple.CoreML",
12
-
"description": "CoreML Model Weights",
13
-
"name": "weights",
14
-
"path": "com.apple.CoreML/weights"
15
-
}
16
-
},
17
-
"rootModelIdentifier": "85c35693-a7e7-460c-83e9-b9274fd1ac9e"
18
-
}