Skip to main content

Set up an external video source

Note

This example is a part of the Vision SDK Examples. You can find the values for all referenced resources in the res directory. For example, see res/values/strings.xml for R.string.* references used in this example.

<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent">

<com.mapbox.vision.view.VisionView
android:id="@+id/vision_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:visualization_mode="clear" />

<FrameLayout
android:id="@+id/speed_alert_view"
android:layout_width="@dimen/speed_limit_view_width"
android:layout_height="@dimen/speed_limit_view_height"
android:layout_gravity="end|top"
android:layout_margin="15dp"
android:visibility="gone">

<ImageView
android:id="@+id/speed_sign_view"
android:layout_width="@dimen/speed_limit_view_width"
android:layout_height="@dimen/speed_limit_view_height"
android:src="@drawable/speed_limit_normal" />

<TextView
android:id="@+id/speed_value_view"
android:layout_width="@dimen/speed_limit_speed_view_width"
android:layout_height="@dimen/speed_limit_speed_view_height"
android:layout_gravity="bottom|center_horizontal"
android:layout_marginBottom="10dp"
android:gravity="center"
android:textSize="40dp"
android:textStyle="bold"
tools:ignore="SpUsage"
tools:text="40" />
</FrameLayout>

</FrameLayout>
ExternalVideoSourceActivity.kt
package com.mapbox.vision.examples

import android.graphics.Bitmap
import android.media.MediaMetadataRetriever
import android.os.Bundle
import android.os.Handler
import android.os.HandlerThread
import com.mapbox.vision.VisionManager
import com.mapbox.vision.mobile.core.interfaces.VisionEventsListener
import com.mapbox.vision.mobile.core.models.AuthorizationStatus
import com.mapbox.vision.mobile.core.models.Camera
import com.mapbox.vision.mobile.core.models.Country
import com.mapbox.vision.mobile.core.models.FrameSegmentation
import com.mapbox.vision.mobile.core.models.classification.FrameSignClassifications
import com.mapbox.vision.mobile.core.models.detection.FrameDetections
import com.mapbox.vision.mobile.core.models.frame.ImageFormat
import com.mapbox.vision.mobile.core.models.frame.ImageSize
import com.mapbox.vision.mobile.core.models.position.VehicleState
import com.mapbox.vision.mobile.core.models.road.RoadDescription
import com.mapbox.vision.mobile.core.models.world.WorldDescription
import com.mapbox.vision.video.videosource.VideoSource
import com.mapbox.vision.video.videosource.VideoSourceListener
import java.nio.ByteBuffer
import java.util.concurrent.TimeUnit
import kotlinx.android.synthetic.main.activity_main.*

/**
* Example shows how Vision SDK can work with external video source. This can be some custom camera implementation or any
* other source of frames - video, set of pictures, etc.
*/
class ExternalVideoSourceActivityKt : BaseActivity() {

companion object {
// Video file that will be processed.
private const val PATH_TO_VIDEO_FILE = "path_to_video_file"
}

private var videoSourceListener: VideoSourceListener? = null
private val handlerThread = HandlerThread("VideoDecode")
private var visionManagerWasInit = false

// VideoSource that will play the file.
private val customVideoSource = object : VideoSource {

override fun attach(videoSourceListener: VideoSourceListener) {
this@ExternalVideoSourceActivityKt.videoSourceListener = videoSourceListener
handlerThread.start()
Handler(handlerThread.looper).post { startFileVideoSource() }
}

override fun detach() {
videoSourceListener = null
handlerThread.quitSafely()
}
}

// VisionEventsListener handles events from Vision SDK on background thread.
private val visionEventsListener = object : VisionEventsListener {

override fun onAuthorizationStatusUpdated(authorizationStatus: AuthorizationStatus) {}

override fun onFrameSegmentationUpdated(frameSegmentation: FrameSegmentation) {}

override fun onFrameDetectionsUpdated(frameDetections: FrameDetections) {}

override fun onFrameSignClassificationsUpdated(frameSignClassifications: FrameSignClassifications) {}

override fun onRoadDescriptionUpdated(roadDescription: RoadDescription) {}

override fun onWorldDescriptionUpdated(worldDescription: WorldDescription) {}

override fun onVehicleStateUpdated(vehicleState: VehicleState) {}

override fun onCameraUpdated(camera: Camera) {}

override fun onCountryUpdated(country: Country) {}

override fun onUpdateCompleted() {}
}

public override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
}

override fun onPermissionsGranted() {
startVisionManager()
}

override fun initViews() {
setContentView(R.layout.activity_main)
}

override fun onStart() {
super.onStart()
startVisionManager()
}

override fun onStop() {
super.onStop()
stopVisionManager()
}

override fun onResume() {
super.onResume()
vision_view.onResume()
}

override fun onPause() {
super.onPause()
vision_view.onPause()
}

private fun startVisionManager() {
if (allPermissionsGranted() && !visionManagerWasInit) {
VisionManager.create(customVideoSource)
vision_view.setVisionManager(VisionManager)
VisionManager.visionEventsListener = visionEventsListener
VisionManager.start()

visionManagerWasInit = true
}
}

private fun stopVisionManager() {
if (visionManagerWasInit) {
VisionManager.stop()
VisionManager.destroy()

visionManagerWasInit = false
}
}

/**
* Decodes video source frame by frame and feeds frames to Vision SDK.
*/
private fun startFileVideoSource() {
// Use MediaMetadataRetriever to decode video.
// It isn't the fastest approach to decode videos and you probably want some other method.
// if FPS is important (eg. MediaCodec).
val retriever = MediaMetadataRetriever()
retriever.setDataSource(PATH_TO_VIDEO_FILE)

// Get video frame size.
val frameWidth =
Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH))
val frameHeight =
Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT))
val imageSize = ImageSize(frameWidth, frameHeight)
// ByteBuffer to hold RGBA bytes.
val rgbaByteBuffer = ByteBuffer.allocateDirect(frameWidth * frameHeight * 4)

// Get duration.
val duration =
java.lang.Long.parseLong(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION))

try {
// Get frames one by one with 1 second intervals.
for (seconds in 0 until duration) {
val bitmap = retriever
.getFrameAtTime(
TimeUnit.SECONDS.toMicros(seconds),
MediaMetadataRetriever.OPTION_CLOSEST
)
.copy(Bitmap.Config.ARGB_8888, false)

bitmap.copyPixelsToBuffer(rgbaByteBuffer)

videoSourceListener!!.onNewFrame(
VideoSourceListener.FrameHolder.ByteBufferHolder(rgbaByteBuffer),
ImageFormat.RGBA,
imageSize
)
rgbaByteBuffer.clear()
}
} catch (e: RuntimeException) {
e.printStackTrace()
} finally {
try {
retriever.release()
} catch (e: RuntimeException) {
e.printStackTrace()
}
}
}
}
Was this example helpful?