EmguCV SURF с камерой? - PullRequest
       61

EmguCV SURF с камерой?

0 голосов
/ 31 марта 2012

Я новичок в EMGU CV.Я хотел бы, чтобы SURF обнаружил более одного паттерна с помощью кулачка.Как это видео.Но сейчас я пытаюсь разработать этот шаблон для отправной точки.

Я рассмотрел пример EMGUCV SURF.Когда я пытаюсь реализовать этот код для примера захвата камеры, ошибка включается во время выполнения.Я искал больше, но не нашел ни одного примера кода.

Итак, вы предлагаете мне фрагмент кода или учебник, который хорошо объяснен?

Большое спасибо уже сейчас.

Коды ниже, над которыми я работаю;

...........................................
FrameRaw = capture.QueryFrame();
                    CamImageBox.Image = FrameRaw;
        Run(FrameRaw);
...........................................    

     private void Run(Image<Bgr, byte> TempImage)
            {

                Image<Gray, Byte> modelImage = new Image<Gray, byte>("sample.jpg");
                Image<Gray, Byte> observedImage = TempImage.Convert<Gray, Byte>();
                // Image<Gray, Byte> observedImage = new Image<Gray,byte>("box_in_scene.png");

                Stopwatch watch;
                HomographyMatrix homography = null;

                SURFDetector surfCPU = new SURFDetector(500, false);

                VectorOfKeyPoint modelKeyPoints;
                VectorOfKeyPoint observedKeyPoints;
                Matrix<int> indices;
                Matrix<float> dist;
                Matrix<byte> mask;

                if (GpuInvoke.HasCuda)
                {
                    GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                    using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                    using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                    using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                    {
                        modelKeyPoints = new VectorOfKeyPoint();
                        surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                        watch = Stopwatch.StartNew();

                        // extract features from the observed image
                        using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(observedImage))
                        using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                        using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                        using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, 2, 1))
                        using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuMatchIndices.Size, 1))
                        {
                            observedKeyPoints = new VectorOfKeyPoint();
                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                            matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);

                            indices = new Matrix<int>(gpuMatchIndices.Size);
                            dist = new Matrix<float>(indices.Size);
                            gpuMatchIndices.Download(indices);
                            gpuMatchDist.Download(dist);

                            mask = new Matrix<byte>(dist.Rows, 1);

                            mask.SetValue(255);

                            Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                    homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                            }

                            watch.Stop();
                        }
                    }
                }
                else
                {
                    //extract features from the object image
                    modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                    //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                    Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                    Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

                    BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                    matcher.Add(modelDescriptors);
                    int k = 2;
                    indices = new Matrix<int>(observedDescriptors.Rows, k);
                    dist = new Matrix<float>(observedDescriptors.Rows, k);
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                    mask = new Matrix<byte>(dist.Rows, 1);

                    mask.SetValue(255);

                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                    }

                    watch.Stop();
                }

                //Draw the matched keypoints
                Image<Bgr, Byte> result = Features2DTracker.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                   indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DTracker.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);

                #region draw the projected region on the image
                if (homography != null)
                {  //draw a rectangle along the projected model
                    Rectangle rect = modelImage.ROI;
                    PointF[] pts = new PointF[] { 
                   new PointF(rect.Left, rect.Bottom),
                   new PointF(rect.Right, rect.Bottom),
                   new PointF(rect.Right, rect.Top),
                   new PointF(rect.Left, rect.Top)};
                    homography.ProjectPoints(pts);

                    result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
                }
                #endregion

               // ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", watch.ElapsedMilliseconds));
            }

1 Ответ

0 голосов
/ 31 марта 2012

Я нашел учебник SURF , который вы использовали, но я не понимаю, почему он должен вызывать ошибку.Удалось ли выполнить учебный код сам по себе, без усложнения ускорения графического процессора?Более того, какая ошибка произошла?

...