Close, я собираюсь выяснить неперехваченное исключение в warpPerspective ()
opencv_4_3_0. js: 30 Uncaught 6533712 ___cxa_throw @ opencv_4_3_0. js: 30 dynCall_vii @ 0167cb82: 1 Module.dynCall_viiiii @ opencv_4_3_0. js: 30 dynCall_viiiii_966 @ VM513794: 4 warpPerspective @ VM514734: 11 прото. @ opencv_4_3_0. js: 30 Align_img2 @ opencv: 1932 onclick @ VM515781 opencv: 190
Должно быть, гомография еще не верна для отправки в warpPerspective (). Но выглядит достоверно, но трудно сказать:
homography :
Mat {$$: {…}}
cols: (...)
data: (...)
data8S: (...)
data16S: (...)
data16U: (...)
data32F: Float32Array(0)
buffer: ArrayBuffer(134217728)
[[Int8Array]]: Int8Array(134217728) [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, …]
[[Int16Array]]: Int16Array(67108864)
[0 … 999999]
[0 … 9999]
[10000 … 19999]
[20000 … 29999]
[30000 … 39999]
[30000 … 30099]
30000: 0
30001: 10800
30002: 12768
30003: 10812
30004: 0
30005: 10935
30006: 21504
30007: 10986
30008: -20480
30009: 11071
30010: -22464
30011: 11123
30012: -8461
30013: 11175
код:
function Align_img2() {
//image_A is the original image we are trying to align to
//image_B is the image we are trying to line up correctly
let image_A = cv.imread(imgElement_Baseline);
let image_B = cv.imread('imageChangeup');
let image_A_gray = new cv.Mat();
let image_B_gray = new cv.Mat();
//get size of baseline image (image A)
var image_A_width = image_A.cols;
var image_A_height = image_A.rows;
//resize image B to the baseline (image A) image
let image_A_dimensions = new cv.Size(image_A_width, image_A_height);
cv.resize(image_B, image_B, image_A_dimensions, cv.INTER_AREA);
// Convert both images to grayscale
cv.cvtColor(image_A, image_A_gray, cv.COLOR_BGRA2GRAY);
cv.cvtColor(image_B, image_B_gray, cv.COLOR_BGRA2GRAY);
// Initiate detector
var orb = new cv.ORB(1000);
var kpv_image_A = new cv.KeyPointVector();
var kpv_image_B = new cv.KeyPointVector();
var descriptors_image_A =new cv.Mat();
var descriptors_image_B =new cv.Mat();
var image_A_keypoints=new cv.Mat();
var image_B_keypoints =new cv.Mat();
mask = new cv.Mat();
orb.detectAndCompute(image_A_gray, new cv.Mat(), kpv_image_A, descriptors_image_A);
orb.detectAndCompute(image_B_gray, new cv.Mat(), kpv_image_B, descriptors_image_B);
let color = new cv.Scalar(0,255,0, 255);
// find matches
let bf = new cv.BFMatcher(cv.NORM_HAMMING, true);
// Match descriptors
let matches = new cv.DMatchVector();
bf.match(descriptors_image_A, descriptors_image_B, matches);
var good_matches = new cv.DMatchVector();
for (let i = 0; i < matches.size(); i++) {
if (matches.get(i).distance < 30) {
good_matches.push_back(matches.get(i));
console.log("Good match, distance: ", matches.get(i).distance, "queryIdx: ", matches.get(i).queryIdx);
}
}
// Debug to verify matches found
//var matches_img = new cv.Mat();
//cv.drawMatches(image_A_gray, kpv_image_A, image_B_gray, kpv_image_B, good_matches, matches_img, color);
//cv.imshow('imageChangeup', matches_img);
var points_A = [];
var points_B = [];
for (let i = 0; i < good_matches.size(); i++) {
console.log("good_matches.get(i).queryIdx", good_matches.get(i).queryIdx);
console.log("kpv_image_A.get(good_matches.get(i).queryIdx ).pt", kpv_image_A.get(good_matches.get(i).queryIdx ).pt);
console.log("points_A", points_A);
points_A.push(kpv_image_A.get(good_matches.get(i).queryIdx ).pt );
points_B.push(kpv_image_B.get(good_matches.get(i).trainIdx ).pt );
}
let mat_A = cv.matFromArray(points_A.length, 2, cv.CV_32F,points_A);
let mat_B = cv.matFromArray(points_B.length, 2, cv.CV_32F,points_B);
// Calculate Homography points_A and B need to be CV_32FC2 - a 32-bit, floating-point, and 2-channels structure
var homography = new cv.Mat();
homography = cv.findHomography(mat_A, mat_B, cv.RANSAC);
// Warp image to baseline_image based on homography
var image_B_final_result = new cv.Mat();
let dsize = new cv.Size(image_A.rows, image_A.cols);
//console.log("dsize", dsize); //this is Size {width: 1293, height: 1000} for our test image
cv.warpPerspective(image_B, image_B_final_result, homography, dsize);
cv.imshow('imageChangeup', image_B_final_result);
//matches_img.delete();
matches.delete();
bf.delete();
orb.delete();
kpv_image_A.delete();
kpv_image_B.delete();
descriptors_image_A.delete();
descriptors_image_B.delete();
image_A_keypoints.delete();
image_B_keypoints.delete();
image_A_gray.delete();
image_B_gray.delete();
homography.delete();
image_B_final_result.delete();
mat_A.delete();
mat_B.delete();
};