У меня есть требование, чтобы я отображал TextureView как весь экран с наложенным прямоугольником поверх него. Я хотел бы получить изображение, полученное только в оверлее, а не весь просмотр TextureView. Я попытался обрезать изображение из всего изображения, но не повезло. Но я думал, что изображение должно быть обрезано перед сохранением в файл, а не после.
Вот мой код:
class CameraActivity : AppCompatActivity(), TextureView.SurfaceTextureListener {
private var camera: Camera? = null
private lateinit var previewSize: Size
private lateinit var file: File
private val viewModel: CameraViewModel by viewModel { parametersOf(intent.getSerializableExtra("proofRequest")) }
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.doc_camera_screen)
val manager = getSystemService(Context.CAMERA_SERVICE) as CameraManager
file = File(getExternalFilesDir(null), viewModel.getFileName())
camera = Camera.initInstance(manager)
}
override fun onResume() {
super.onResume()
if (textureView.isAvailable)
openCamera(textureView.width, textureView.height)
else
textureView.surfaceTextureListener = this
}
override fun onStart() {
super.onStart()
takePictureBtn.clicks().subscribe {
camera?.takePicture(object : ImageHandler {
override fun handleImage(image: Image): Runnable {
Log.v("hwjh", "hgh ${image.height} ${image.width} ---- ${cameraOverlay.width} -> ${cameraOverlay.handler}")
return ImageSaver(image, file)
}
override fun onImageHandled() {
val locationCoordinates = IntArray(2)
cameraOverlay.getLocationInWindow(locationCoordinates)
val returnIntent = Intent()
returnIntent.putExtra("result", file.absolutePath)
returnIntent.putExtra("width", cameraOverlay.width)
returnIntent.putExtra("height", cameraOverlay.height)
returnIntent.putExtra("xPosition", locationCoordinates[0])
returnIntent.putExtra("yPosition", locationCoordinates[1])
setResult(Activity.RESULT_OK, returnIntent)
finish()
}
})
}.disposeBy(lifecycle.disposers.onStop)
}
override fun onPause() {
super.onPause()
camera?.close()
}
private fun openCamera(width: Int, height: Int) {
try {
camera?.let {
setUpCameraOutputs(width, height, it)
configureTransform(width, height)
it.open()
val texture = textureView.surfaceTexture
texture.setDefaultBufferSize(previewSize.width, previewSize.height)
it.start(Surface(texture))
}
} catch (e: CameraAccessException) {
e.printStackTrace()
} catch (e: InterruptedException) {
throw RuntimeException("Interrupted while trying to lock camera opening.", e)
}
}
/**
* Sets up member variables related to camera.
*
* @param width The width of available size for camera preview
* @param height The height of available size for camera preview
*/
private fun setUpCameraOutputs(width: Int, height: Int, camera: Camera) {
try {
val largest = camera.getCaptureSize()
val realSize = Point()
windowManager?.defaultDisplay?.getRealSize(realSize)
val aspectRatio = realSize.x.toFloat() / realSize.y.toFloat()
camera.getPreviewSize(aspectRatio)
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
val displayRotation = windowManager?.defaultDisplay?.rotation ?: return
val sensorOrientation = camera.getSensorOrientation()
val swappedDimensions = areDimensionsSwapped(sensorOrientation, displayRotation)
val displaySize = Point()
windowManager?.defaultDisplay?.getSize(displaySize)
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize = if (swappedDimensions) {
camera.chooseOptimalSize(height, width, displaySize.y, displaySize.x, largest)
} else {
camera.chooseOptimalSize(width, height, displaySize.x, displaySize.y, largest)
}
} catch (e: CameraAccessException) {
e.printStackTrace()
} catch (e: NullPointerException) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the device this code runs.
ErrorDialogFragment.newInstance(getString(R.string.camera_error)).showAllowingStateLoss(supportFragmentManager, "CameraErrorDialog")
}
}
/**
* Determines if the dimensions are swapped given the phone's current rotation.
*
* @param sensorOrientation The current sensor orientation
* @param displayRotation The current rotation of the display
* @return true if the dimensions are swapped, false otherwise.
*/
private fun areDimensionsSwapped(sensorOrientation: Int, displayRotation: Int): Boolean {
var swappedDimensions = false
when (displayRotation) {
Surface.ROTATION_0, Surface.ROTATION_180 -> {
if (sensorOrientation == 90 || sensorOrientation == 270) {
swappedDimensions = true
}
}
Surface.ROTATION_90, Surface.ROTATION_270 -> {
if (sensorOrientation == 0 || sensorOrientation == 180) {
swappedDimensions = true
}
}
else -> {
}
}
return swappedDimensions
}
/**
* Configures the necessary [android.graphics.Matrix] transformation to `textureView`.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of `textureView` is fixed.
*
* @param viewWidth The width of `textureView`
* @param viewHeight The height of `textureView`
*/
private fun configureTransform(viewWidth: Int, viewHeight: Int) {
val rotation = windowManager.defaultDisplay.rotation
val matrix = Matrix()
val viewRect = RectF(0f, 0f, viewWidth.toFloat(), viewHeight.toFloat())
val bufferRect = RectF(0f, 0f, previewSize.height.toFloat(), previewSize.width.toFloat())
val centerX = viewRect.centerX()
val centerY = viewRect.centerY()
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY())
val scale = max(
viewHeight.toFloat() / previewSize.height,
viewWidth.toFloat() / previewSize.width)
with(matrix) {
setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL)
postScale(scale, scale, centerX, centerY)
postRotate((90 * (rotation - 2)).toFloat(), centerX, centerY)
}
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180f, centerX, centerY)
}
textureView.setTransform(matrix)
}
override fun onSurfaceTextureAvailable(texture: SurfaceTexture, width: Int, height: Int) {
openCamera(width, height)
}
override fun onSurfaceTextureSizeChanged(texture: SurfaceTexture, width: Int, height: Int) {
configureTransform(width, height)
}
override fun onSurfaceTextureDestroyed(texture: SurfaceTexture) = true
override fun onSurfaceTextureUpdated(texture: SurfaceTexture) = Unit
}
class Camera constructor(private val cameraManager: CameraManager) {
companion object {
// Make thread-safe Singleton
@Volatile
var instance: Camera? = null
private set
fun initInstance(cameraManager: CameraManager): Camera {
val i = instance
if (i != null) {
return i
}
return synchronized(this) {
val created = Camera(cameraManager)
instance = created
created
}
}
}
private val characteristics: CameraCharacteristics
/**
* An id for camera device
*/
private val cameraId: String
/**
* A [Semaphore] to prevent the app from exiting before closing the camera.
*/
private val openLock = Semaphore(1)
private var cameraDevice: CameraDevice? = null
/**
* An [ImageReader] that handles still image capture.
*/
private var imageReader: ImageReader? = null
/**
* A [CameraCaptureSession] for camera preview.
*/
private var captureSession: CameraCaptureSession? = null
private var focusListener: OnFocusListener? = null
/**
* The current state of camera state for taking pictures.
*/
private var state = State.PREVIEW
private var aeMode = CaptureRequest.CONTROL_AE_MODE_ON
private var preAfState: Int? = null
var wbMode: WBMode = WBMode.AUTO
/**
* A [Handler] for running tasks in the background.
*/
private var backgroundHandler: Handler? = null
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private var backgroundThread: HandlerThread? = null
private var surface: Surface? = null
private var isClosed = true
var deviceRotation: Int = 0 // Device rotation is defined by Screen Rotation
init {
cameraId = setUpCameraId(manager = cameraManager)
characteristics = cameraManager.getCameraCharacteristics(cameraId)
}
// Callbacks
private val cameraStateCallback = object : CameraDevice.StateCallback() {
override fun onOpened(camera: CameraDevice) {
cameraDevice = camera
openLock.release()
isClosed = false
}
override fun onClosed(camera: CameraDevice) {
isClosed = true
}
override fun onDisconnected(camera: CameraDevice) {
openLock.release()
camera.close()
cameraDevice = null
isClosed = true
}
override fun onError(camera: CameraDevice, error: Int) {
openLock.release()
camera.close()
cameraDevice = null
isClosed = true
}
}
private val captureStateCallback = object : CameraCaptureSession.StateCallback() {
override fun onConfigureFailed(session: CameraCaptureSession) {}
override fun onConfigured(session: CameraCaptureSession) {
// if camera is closed
if (isClosed) return
captureSession = session
startPreview()
}
}
private val captureCallback = object : CameraCaptureSession.CaptureCallback() {
private fun process(result: CaptureResult) {
when (state) {
State.PREVIEW -> {
val afState = result.get(CaptureResult.CONTROL_AF_STATE) ?: return
if (afState == preAfState) {
return
}
preAfState = afState
focusListener?.onFocusStateChanged(afState)
}
State.WAITING_LOCK -> {
val afState = result.get(CaptureResult.CONTROL_AF_STATE)
// Auto Focus state is not ready in the first place
if (afState == null) {
runPreCapture()
} else if (CaptureResult.CONTROL_AF_STATE_INACTIVE == afState ||
CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
// CONTROL_AE_STATE can be null on some devices
val aeState = result.get(CaptureResult.CONTROL_AE_STATE)
if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
captureStillPicture()
} else {
runPreCapture()
}
} else {
captureStillPicture()
}
}
State.WAITING_PRECAPTURE -> {
val aeState = result.get(CaptureResult.CONTROL_AE_STATE)
if (aeState == null
|| aeState == CaptureRequest.CONTROL_AE_STATE_PRECAPTURE
|| aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED
|| aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED) {
state = State.WAITING_NON_PRECAPTURE
}
}
State.WAITING_NON_PRECAPTURE -> {
val aeState = result.get(CaptureResult.CONTROL_AE_STATE)
if (aeState == null || aeState != CaptureRequest.CONTROL_AE_STATE_PRECAPTURE) {
captureStillPicture()
}
}
else -> {
}
}
}
override fun onCaptureProgressed(session: CameraCaptureSession,
request: CaptureRequest,
partialResult: CaptureResult) {
process(partialResult)
}
override fun onCaptureCompleted(session: CameraCaptureSession,
request: CaptureRequest,
result: TotalCaptureResult) {
process(result)
}
}
// Camera interfaces
/**
* Open camera and setup background handler
*/
fun open() {
try {
if (!openLock.tryAcquire(3L, TimeUnit.SECONDS)) {
throw IllegalStateException("Camera launch failed")
}
if (cameraDevice != null) {
openLock.release()
return
}
startBackgroundHandler()
cameraManager.openCamera(cameraId, cameraStateCallback, backgroundHandler)
} catch (e: SecurityException) {
}
}
/**
* Start camera. Should be called after open() is successful
*/
fun start(surface: Surface) {
this.surface = surface
// setup camera session
val size = characteristics.getCaptureSize(CompareSizesByArea())
imageReader = ImageReader.newInstance(size.width, size.height, ImageFormat.JPEG, 1)
cameraDevice?.createCaptureSession(
listOf(surface, imageReader?.surface),
captureStateCallback,
backgroundHandler
)
}
fun takePicture(handler: ImageHandler) {
if (cameraDevice == null) {
throw IllegalStateException("Camera device not ready")
}
if (isClosed) return
imageReader?.setOnImageAvailableListener({ reader ->
val image = reader.acquireNextImage()
backgroundHandler?.post(handler.handleImage(image = image))
handler.onImageHandled()
}, backgroundHandler)
lockFocus()
}
fun close() {
try {
if (openLock.tryAcquire(3, TimeUnit.SECONDS))
isClosed = true
captureSession?.close()
captureSession = null
cameraDevice?.close()
cameraDevice = null
surface?.release()
surface = null
imageReader?.close()
imageReader = null
stopBackgroundHandler()
} catch (e: InterruptedException) {
Log.e(TAG, "Error closing camera $e")
} finally {
openLock.release()
}
}
// internal methods
/**
* Set up camera Id from id list
*/
private fun setUpCameraId(manager: CameraManager): String {
for (cameraId in manager.cameraIdList) {
val characteristics = manager.getCameraCharacteristics(cameraId)
// We don't use a front facing camera in this sample.
val cameraDirection = characteristics.get(CameraCharacteristics.LENS_FACING)
if (cameraDirection != null &&
cameraDirection == CameraCharacteristics.LENS_FACING_FRONT) {
continue
}
return cameraId
}
throw IllegalStateException("Could not set Camera Id")
}
private fun startBackgroundHandler() {
if (backgroundThread != null) return
backgroundThread = HandlerThread("Camera-$cameraId").also {
it.start()
backgroundHandler = Handler(it.looper)
}
}
private fun stopBackgroundHandler() {
backgroundThread?.quitSafely()
try {
backgroundThread = null
backgroundHandler = null
} catch (e: InterruptedException) {
Log.e(TAG, "===== stop background error $e")
}
}
private fun startPreview() {
try {
if (!openLock.tryAcquire(1L, TimeUnit.SECONDS)) return
if (isClosed) return
state = State.PREVIEW
createPreviewRequestBuilder()?.let { builder ->
captureSession?.setRepeatingRequest(builder.build(), captureCallback, backgroundHandler)
}
} catch (e1: IllegalStateException) {
} catch (e2: CameraAccessException) {
} catch (e3: InterruptedException) {
} finally {
openLock.release()
}
}
@Throws(CameraAccessException::class)
private fun createPreviewRequestBuilder(): CaptureRequest.Builder? {
val builder = cameraDevice?.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
surface?.let {
builder?.addTarget(it)
}
enableDefaultModes(builder)
return builder
}
private fun enableDefaultModes(builder: CaptureRequest.Builder?) {
if (builder == null) return
// Auto focus should be continuous for camera preview.
// Use the same AE and AF modes as the preview.
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO)
builder.set(CaptureRequest.JPEG_ORIENTATION, 90)
if (characteristics.isContinuousAutoFocusSupported()) {
builder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
} else {
builder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_AUTO
)
}
if (characteristics.isAutoExposureSupported(aeMode)) {
builder.set(CaptureRequest.CONTROL_AE_MODE, aeMode)
} else {
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
}
builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY)
}
/**
* Lock the focus as the first step for a still image capture.
*/
private fun lockFocus() {
try {
state = State.WAITING_LOCK
val builder = createPreviewRequestBuilder()
if (!characteristics.isContinuousAutoFocusSupported()) {
// If continuous AF is not supported , start AF here
builder?.set(
CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_START
)
}
builder?.let { newBuilder ->
captureSession?.capture(newBuilder.build(), captureCallback, backgroundHandler)
}
} catch (e: CameraAccessException) {
Log.e(TAG, "lockFocus $e")
}
}
private fun runPreCapture() {
try {
state = State.WAITING_PRECAPTURE
val builder = createPreviewRequestBuilder()
builder?.set(
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START
)
builder?.let {
captureSession?.capture(builder.build(), captureCallback, backgroundHandler)
}
} catch (e: CameraAccessException) {
Log.e(TAG, "runPreCapture $e")
}
}
/**
* Capture a still picture. This method should be called when we get a response in
* [.captureCallback] from both [.lockFocus].
*/
private fun captureStillPicture() {
state = State.TAKEN
try {
// This is the CaptureRequest.Builder that we use to take a picture.
cameraDevice?.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE)?.let { builder ->
enableDefaultModes(builder)
imageReader?.let { builder.addTarget(it.surface) }
surface?.let { surface -> builder.addTarget(surface) }
captureSession?.stopRepeating()
captureSession?.capture(
builder.build(),
object : CameraCaptureSession.CaptureCallback() {
override fun onCaptureCompleted(session: CameraCaptureSession,
request: CaptureRequest,
result: TotalCaptureResult) {
// Once still picture is captured, ImageReader.OnImageAvailable gets called
// You can do completion task here
}
},
backgroundHandler)
}
} catch (e: CameraAccessException) {
Log.e(TAG, "captureStillPicture $e")
}
}
fun getCaptureSize() = characteristics.getCaptureSize(CompareSizesByArea())
fun getPreviewSize(aspectRatio: Float) = characteristics.getPreviewSize(aspectRatio)
/**
* Get sensor orientation.
* 0, 90, 180, 270.
*/
fun getSensorOrientation() = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0
fun chooseOptimalSize(textureViewWidth: Int,
textureViewHeight: Int,
maxWidth: Int,
maxHeight: Int,
aspectRatio: Size): Size =
characteristics.chooseOptimalSize(
textureViewWidth,
textureViewHeight,
maxWidth,
maxHeight,
aspectRatio
)
}
Я пробовал другие решения для обрезки, но все это относится к кадрированию после того, как изображение было снято. Но я хотел, чтобы изображение было сохранено после обрезки.
Спасибо, Синдху