Нарисуйте ограничивающую рамку на полном экране JavaCameraView - PullRequest
0 голосов
/ 11 июля 2019

После этого выпуск I Попробуйте установить мой JavaCameraView в портретном полноэкранном режиме. Решение @ghost работает для предварительного просмотра. Но у меня есть другая проблема. Я хочу нарисовать ограничивающий прямоугольник в полноэкранном режиме. Мое приложение обнаруживает объекты и рисует ограничивающую рамку на маленькой пейзажной рамке (хорошо ориентируется с помощью решения @OctavioCega).

'JavaCameraView.java': 'initializeCamera'

protected boolean initializeCamera(int width, int height) {
    Log.d(TAG, "Initialize java camera");
    boolean result = true;
    synchronized (this) {
        mCamera = null;

        if (mCameraIndex == CAMERA_ID_ANY) {
            Log.d(TAG, "Trying to open camera with old open()");
            try {
                mCamera = Camera.open();
            }
            catch (Exception e){
                Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
            }

            if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                boolean connected = false;
                for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
                    try {
                        mCamera = Camera.open(camIdx);
                        connected = true;
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
                    }
                    if (connected) break;
                }
            }
        } else {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                int localCameraIndex = mCameraIndex;
                if (mCameraIndex == CAMERA_ID_BACK) {
                    Log.i(TAG, "Trying to open back camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo( camIdx, cameraInfo );
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                } else if (mCameraIndex == CAMERA_ID_FRONT) {
                    Log.i(TAG, "Trying to open front camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo( camIdx, cameraInfo );
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                }
                if (localCameraIndex == CAMERA_ID_BACK) {
                    Log.e(TAG, "Back camera not found!");
                } else if (localCameraIndex == CAMERA_ID_FRONT) {
                    Log.e(TAG, "Front camera not found!");
                } else {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
                    try {
                        mCamera = Camera.open(localCameraIndex);
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
                    }
                }
            }
        }

        if (mCamera == null)
            return false;

        /* Now set camera parameters */
        try {
            Camera.Parameters params = mCamera.getParameters();
            Log.d(TAG, "getSupportedPreviewSizes()");
            List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();

            if (sizes != null) {
                /* Image format NV21 causes issues in the Android emulators */
                if (Build.FINGERPRINT.startsWith("generic")
                        || Build.FINGERPRINT.startsWith("unknown")
                        || Build.MODEL.contains("google_sdk")
                        || Build.MODEL.contains("Emulator")
                        || Build.MODEL.contains("Android SDK built for x86")
                        || Build.MANUFACTURER.contains("Genymotion")
                        || (Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
                        || "google_sdk".equals(Build.PRODUCT))
                    params.setPreviewFormat(ImageFormat.YV12);  // "generic" or "android" = android emulator
                else
                    params.setPreviewFormat(ImageFormat.NV21);

                mPreviewFormat = params.getPreviewFormat();
                if (!Build.MODEL.equals("GT-I9100")) params.setRecordingHint(true);
                params.setPreviewSize(1920, 1080);
                mCamera.setParameters(params);

                mFrameWidth = 1920;
                mFrameHeight = 1080;

                if (mFpsMeter != null) {
                    mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
                }

                int size = mFrameWidth * mFrameHeight;
                size  = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
                mBuffer = new byte[size];

                mCamera.addCallbackBuffer(mBuffer);
                mCamera.setPreviewCallbackWithBuffer(this);

                mFrameChain = new Mat[2];
                mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
                mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);

                AllocateCache();

                mCameraFrame = new JavaCameraFrame[2];
                mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
                mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);

                mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
                mCamera.setPreviewTexture(mSurfaceTexture);

                if (getOrientation().equals("portrait")) {
                    setDisplayOrientation(mCamera, 90);
                } else if (getOrientation().equals("reverse landscape")){
                    setDisplayOrientation(mCamera, 180);
                }
                mCamera.setPreviewDisplay(getHolder());

                mCamera.startPreview();
            }
            else
                result = false;
        } catch (Exception e) {
            result = false;
            e.printStackTrace();
        }
    }

    return result;
}

'CameraBridgeViewBase.java': 'deliveryAndDrawFrame'

protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
    Mat modified;

    if (mListener != null) {
        modified = mListener.onCameraFrame(frame);
    } else {
        modified = frame.rgba();
    }

    boolean bmpValid = true;
    if (modified != null) {
        try {
            Utils.matToBitmap(modified, mCacheBitmap);
        } catch(Exception e) {
            Log.e(TAG, "Mat type: " + modified);
            Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
            Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
            bmpValid = false;
        }
    }
}

Это моя функция 'onCameraFrame'

    @Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) throws FileNotFoundException {

    mRgba = inputFrame.rgba();
    // Rotate mRgba 90 degrees
    Mat frame = inputFrame.rgba();
    Core.transpose(frame, mRgbaT);
    Imgproc.resize(mRgbaT, mRgbaF, mRgbaF.size(), 0,0, 0);
    Core.flip(mRgbaF, mRgba, 1 );
    Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGBA2RGB);
    //Imgproc.GaussianBlur(frame,frame, new Size(5,5), 0);

    Size frame_size = new Size(416, 416);
    Scalar mean = new Scalar(127.5);

    Mat blob = Dnn.blobFromImage(frame, 0.00392, frame_size, mean, true, false);
    net.setInput(blob);

    List<Mat> result = new ArrayList<>();
    List<String> outBlobNames = net.getUnconnectedOutLayersNames();

    net.forward(result, outBlobNames);
    Log.d("DETECT", "netForward success!");
    Log.d("DETECT", "Frame Size: "+frame.width()+"X"+frame.height());
    float confThreshold = 0.25f;
    List<Integer> clsIds = new ArrayList<>();
    List<Float> confs = new ArrayList<>();
    List<Rect> rects = new ArrayList<>();
    List<String> labels = new ArrayList<>();
    Log.d("DETECT", "LIST MAT SIZE: "+result.size());
    for (int i = 0; i < result.size(); ++i)
    {
        // each row is a candidate detection, the 1st 4 numbers are
        // [center_x, center_y, width, height], followed by (N-4) class probabilities
        Mat level = result.get(i);
        for (int j = 0; j < level.rows(); ++j)
        {
            Mat row = level.row(j);
            Mat scores = row.colRange(5, level.cols());
            Core.MinMaxLocResult mm = Core.minMaxLoc(scores);
            float confidence = (float)mm.maxVal;
            Point classIdPoint = mm.maxLoc;
            if (confidence > confThreshold)
            {

                int centerX = (int)(row.get(0,0)[0] * frame.cols());
                int centerY = (int)(row.get(0,1)[0] * frame.rows());
                int width   = (int)(row.get(0,2)[0] * frame.cols());
                int height  = (int)(row.get(0,3)[0] * frame.rows());
                int left    = centerX - width  / 2;
                int top     = centerY - height / 2;

                clsIds.add((int)classIdPoint.x);
                confs.add(confidence);
                DecimalFormat df = new DecimalFormat("#.###");
                labels.add(""+classNames.get(clsIds.get(0))+ ": "+df.format(confs.get(0)));
                rects.add(new Rect(left, top, width, height));
            }
        }
    }

    // Apply non-maximum suppression procedure.
    float nmsThresh = 0.5f;
    MatOfFloat confidences = new MatOfFloat();
    confidences.fromList(confs);
    Rect[] boxesArray = rects.toArray(new Rect[0]);
    MatOfRect boxes = new MatOfRect(boxesArray);
    MatOfInt indices = new MatOfInt();
    Dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThresh, indices); //We draw the bounding boxes for objects here//
    Log.d("DETECT", "BEFORE BOX!");
    // Draw result boxes:
    if(!indices.empty())
    {
        int [] ind = indices.toArray();
        for (int i = 0; i < ind.length; ++i)
        {
            int idx = ind[i];
            Rect box = boxesArray[idx];
            Imgproc.rectangle(frame, box.tl(), box.br(), colors.get(clsIds.get(0)), 5);
            Imgproc.rectangle(frame, new Point(box.x-5, box.y-60), new Point(box.br().x,box.y), colors.get(clsIds.get(0)), Imgproc.FILLED);
            Imgproc.putText (
                    frame,                          // Matrix obj of the image
                    labels.get(i),          // Text to be added
                    new Point(box.x, box.y-5),               // point
                    Imgproc.FONT_HERSHEY_SIMPLEX ,      // front face
                    2,                               // front scale
                    new Scalar(255, 255, 255),             // Scalar object for color
                    3                                // Thickness
            );
            Log.d("DETECT", labels.get(i));
            try{
                Bitmap bmp = null ;
                Utils.matToBitmap(frame, bmp);
                String path = Environment.getExternalStorageDirectory().toString();
                OutputStream fOut = null;
                Integer counter = 0;
                File file = new File(path, "foto.jpg"); // the File to save , append increasing numeric counter to prevent files from getting overwritten.
                fOut = new FileOutputStream(file);
                bmp.compress(Bitmap.CompressFormat.JPEG, 85, fOut); // saving the Bitmap to a file compressed as a JPEG with 85% compression rate
                fOut.flush(); // Not really required
                fOut.close(); // do not forget to close the stream
                MediaStore.Images.Media.insertImage(getContentResolver(),file.getAbsolutePath(),file.getName(),file.getName());
            } catch (IOException e)
            {
                Log.e("ERROR", ""+e);
            }

        }
    }
    else
        Log.d("DETECT","ERROR: INDICES EMPTY!!!!!!!!!");

    return frame;
}

Объекты обнаружены, и кажется, что ограничивающие рамки нарисованы не над видимым полноэкранным портретным изображением, а над маленьким реальным видом.

Справа то, что я вижу, слева, если я сохраню кадр: ссылка на пример экрана

Как нарисовать ограничивающую рамку на полноразмерном экране? мне нужно изменить классы «JavaCameraView.java» и «CameraBridgeViewBase.java» или просто работать над моей функцией?

...