我正在使用Kotlin和tensor flow lite模型制作一个对象检测应用程序(我使用yolo v5,然后使用以下代码行将其转换为tensor flow lite:pythonexport.py--权重best.pt--包含tflite --nms)这是输出Tensor详细信息:
名称:状态分区调用:0形状:[ 1 100 4]类型:〈类别'数字.浮点数32'〉
我将其添加到应用程序中,并使用模型提供的Kotlin示例在检测到的对象上绘制边框问题是,应用程序没有产生任何问题,但当我在物理设备上运行它时,它会崩溃,并在查看启动画面后自行关闭,并显示错误消息,说明应用程序有bug。
E/安卓运行时:致命异常:主要工艺:com.示例.sightfulkotlin,PID:23100 Java语言运行时异常:无法启动活动组件信息{com.example.sightfulkotlin/com.example.sightfulkotlin.主活动}:java.lang.IllegalStateException:内部错误:准备Tensor分配时出现意外故障:此解释器不支持常规TensorFlow操作。请确保在推断之前应用/链接Flex委托。节点号402(FlexCombinedNonMaxSuppression)准备失败。
我尝试了下面的代码来打开相机并在检测到的对象上绘制边界框。
主要活动:
package com.example.sightfulkotlin
import android.annotation.SuppressLint
import android.content.Context
import android.content.pm.PackageManager
import android.graphics.*
import android.hardware.camera2.CameraCaptureSession
import android.hardware.camera2.CameraDevice
import android.hardware.camera2.CameraManager
import android.os.Bundle
import android.os.Handler
import android.os.HandlerThread
import android.view.Surface
import android.view.TextureView
import android.widget.ImageView
import androidx.appcompat.app.AppCompatActivity
import androidx.core.content.ContextCompat
import com.example.sightfulkotlin.ml.ObjectDetection
import org.tensorflow.lite.DataType
import org.tensorflow.lite.support.common.FileUtil
import org.tensorflow.lite.support.image.ImageProcessor
import org.tensorflow.lite.support.image.TensorImage
import org.tensorflow.lite.support.image.ops.ResizeOp
import org.tensorflow.lite.support.tensorbuffer.TensorBuffer
class MainActivity : AppCompatActivity() {
var colors = listOf(
Color.BLUE, Color.GREEN, Color.RED, Color.CYAN, Color.GRAY, Color.BLACK, Color.DKGRAY, Color.MAGENTA, Color.YELLOW, Color.LTGRAY, Color.WHITE)
val paint = Paint()
private lateinit var labels:List<String>
private lateinit var cameraManager: CameraManager
lateinit var cameraDevice: CameraDevice
lateinit var handler: Handler
lateinit var textureView: TextureView
lateinit var model: ObjectDetection
lateinit var bitmap: Bitmap
lateinit var imageView: ImageView
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
getPermission()
labels = FileUtil.loadLabels(this, "labels.txt")
model = ObjectDetection.newInstance(this)
var imageProcessor = ImageProcessor.Builder().add(ResizeOp(640, 640, ResizeOp.ResizeMethod.BILINEAR)).build()
val handlerThread = HandlerThread("videoThread")
handlerThread.start()
handler = Handler(handlerThread.looper)
paint.color = Color.GREEN
imageView = findViewById(R.id.imageView)
textureView = findViewById(R.id.textureView)
textureView.surfaceTextureListener = object: TextureView.SurfaceTextureListener
{
override fun onSurfaceTextureAvailable(p0: SurfaceTexture, p1: Int, p2: Int) {
openCamera()
}
override fun onSurfaceTextureSizeChanged(p0: SurfaceTexture, p1: Int, p2: Int) {
}
override fun onSurfaceTextureDestroyed(p0: SurfaceTexture): Boolean {
return false
}
override fun onSurfaceTextureUpdated(p0: SurfaceTexture) {
bitmap = textureView.bitmap!!
var tensorImage = TensorImage(DataType.FLOAT32)
tensorImage.load(bitmap)
tensorImage = imageProcessor.process(tensorImage)
val inputFeature0 = TensorBuffer.createFixedSize(intArrayOf(1, 640, 640, 3), DataType.FLOAT32)
inputFeature0.loadBuffer(tensorImage.buffer)
val outputs = model.process(inputFeature0)
val locations = outputs.outputFeature0AsTensorBuffer.floatArray
val scores = outputs.outputFeature1AsTensorBuffer.floatArray
val classes = outputs.outputFeature2AsTensorBuffer.floatArray
val numberOfDetections = outputs.outputFeature3AsTensorBuffer.floatArray
val mutable = bitmap.copy(Bitmap.Config.ARGB_8888, true)
val canvas = Canvas(mutable)
val h = mutable.height
val w = mutable.width
paint.textSize = h/15f
paint.strokeWidth = h/85f
scores.forEachIndexed{index, fl ->
var x = index
x *= 4
if(fl > 0.5)
{
paint.color = colors[index]
paint.style = Paint.Style.STROKE
canvas.drawRect(RectF(locations[x+1] *w, locations[x] *h, locations[x+3] *w, locations[x+2] *h), paint)
paint.style = Paint.Style.FILL
canvas.drawText(labels[classes[index].toInt()] +" "+fl.toString(), locations[x+1] *w, locations[x] *h, paint)
}
}
imageView.setImageBitmap(mutable)
}
}
cameraManager = getSystemService(Context.CAMERA_SERVICE) as CameraManager
}
override fun onDestroy() {
super.onDestroy()
model.close()
}
@SuppressLint("MissingPermission")
fun openCamera()
{
cameraManager.openCamera(cameraManager.cameraIdList[0], object: CameraDevice.StateCallback(){
@SuppressLint("MissingPermission")
override fun onOpened(p0: CameraDevice) {
cameraDevice = p0
val surfaceTexture = textureView.surfaceTexture
val surface = Surface(surfaceTexture)
val captureRequest = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
captureRequest.addTarget(surface)
cameraDevice.createCaptureSession(listOf(surface), object: CameraCaptureSession.StateCallback(){
override fun onConfigured(p0: CameraCaptureSession) {
p0.setRepeatingRequest(captureRequest.build(), null, null)
}
override fun onConfigureFailed(p0: CameraCaptureSession) {
}
}, handler)
}
override fun onDisconnected(p0: CameraDevice) {
}
@SuppressLint("MissingPermission")
override fun onError(p0: CameraDevice, p1: Int) {
}
},handler)
}
private fun getPermission()
{
if(ContextCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA)!=PackageManager.PERMISSION_GRANTED)
{
requestPermissions(arrayOf(android.Manifest.permission.CAMERA), 101)
}
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
if (grantResults[0] != PackageManager.PERMISSION_GRANTED)
{
getPermission()
}
}
}
1条答案
按热度按时间5gfr0r5j1#
尝试使用SSD-移动网络模型t-flite库支持那里的输出