Итак, я пытаюсь создать панорамное изображение из 20 изображений и бросил пытаться использовать пример Mathsworks, потому что я не знаю, где. Поэтому я решил вручную найти подходящие пары изображений, а затем попытаться сшить их. Однако выход имеет очень большую белую рамку и включается, когда я их прошиваю.
Вот изображения: https://imgur.com/a/DZ6auhj
И вот сшитое изображение: https://imgur.com/a/dUeFHRG
clc;
clear;
close all;
% Load images.
buildingDir = fullfile('C:\Users\hayes\Documents\YEAR 2\CI285 Functional Programming\RonanHayes_AssignmentTwo\imageTry3');
%buildingDir = fullfile(toolboxdir('vision'), 'visiondata', 'building');
buildingScene = imageDatastore(buildingDir);
% Display images to be stitched
montage(buildingScene.Files);
% Read the first image from the image set.
I = readimage(buildingScene, 1);
% Initialize features for I(1)
grayImage = rgb2gray(I);
points = detectSURFFeatures(grayImage);
[features, points] = extractFeatures(grayImage, points);
% Initialize all the transforms to the identity matrix. Note that the
% projective transform is used here because the building images are fairly
% close to the camera. Had the scene been captured from a further distance,
% an affine transform would suffice.
numImages = numel(buildingScene.Files);
%tforms(numImages) = projective2d(eye(3));
tforms(numImages) = projective2d(eye(3));
% Initialize variable to hold image sizes.
imageSize = zeros(numImages,2);
% Iterate over remaining image pairs
for n = 2:numImages
% Store points and features for I(n-1).
pointsPrevious = points;
featuresPrevious = features;
% Read I(n).
I = readimage(buildingScene, n);
% Convert image to grayscale.
grayImage = rgb2gray(I);
%figure(3), imshow(grayImage);
% Save image size.
imageSize(n,:) = size(grayImage);
% Detect and extract SURF features for I(n).
points = detectSURFFeatures(grayImage);
[features, points] = extractFeatures(grayImage, points);
% Find correspondences between I(n) and I(n-1).
% ORIGINAL CODE
indexPairs = matchFeatures(features, featuresPrevious, 'Unique', true);
%matchedPoints = points(indexPairs(:,1), :);
%matchedPointsPrev = pointsPrevious(indexPairs(:,2), :);
% ALTERED
%indexPairs = matchFeatures(features, featuresPrevious, 'Unique', true);
matchedPoints = points(indexPairs(:,1),:);
matchedPointsPrev = pointsPrevious(indexPairs(:,2),:);
%indexPairs = matchFeatures(features, featuresPrevious, 'Unique', true) ;
%matchedPoints = points(indexPairs(1:2, 1));
%matchedPointsPrev = pointsPrevious(indexPairs(1:2, 2));
% Estimate the transformation between I(n) and I(n-1).
% WHY IS IT CHANGING POINTS WHEN I INCLUDE THIS?
tforms(n) = estimateGeometricTransform(matchedPoints, matchedPointsPrev,...
'projective', 'Confidence', 99.9, 'MaxNumTrials', 4000);
% Compute T(n) * T(n-1) * ... * T(1)
tforms(n).T = tforms(n).T * tforms(n-1).T;
end
% Compute the output limits for each transform
for i = 1:numel(tforms)
[xlim(i,:), ylim(i,:)] = outputLimits(tforms(i), [1 imageSize(i,2)], [1 imageSize(i,1)]);
end
avgXLim = mean(xlim, 2);
[~, idx] = sort(avgXLim);
centerIdx = floor((numel(tforms)+1)/2);
centerImageIdx = idx(centerIdx);
Tinv = invert(tforms(centerImageIdx));
for i = 1:numel(tforms)
tforms(i).T = tforms(i).T * Tinv.T;
end
for i = 1:numel(tforms)
[xlim(i,:), ylim(i,:)] = outputLimits(tforms(i), [1 imageSize(i,2)], [1 imageSize(i,1)]);
end
maxImageSize = max(imageSize);
% Find the minimum and maximum output limits
xMin = min([1; xlim(:)]);
xMax = max([maxImageSize(2); xlim(:)]);
yMin = min([1; ylim(:)]);
yMax = max([maxImageSize(1); ylim(:)]);
% Width and height of panorama.
width = round(xMax - xMin);
height = round(yMax - yMin);
% Initialize the "empty" panorama.
panorama = zeros([height width 3], 'like', I);
blender = vision.AlphaBlender('Operation', 'Binary mask', 'MaskSource', 'Input port');
% Create a 2-D spatial reference object defining the size of the panorama.
xLimits = [xMin xMax];
yLimits = [yMin yMax];
panoramaView = imref2d([height width], xLimits, yLimits);
% Create the panorama.
for i = 1:numImages
I = readimage(buildingScene, i);
% Transform I into the panorama.
warpedImage = imwarp(I, tforms(i), 'OutputView', panoramaView);
% Generate a binary mask.
mask = imwarp(true(size(I,1),size(I,2)), tforms(i), 'OutputView', panoramaView);
% Overlay the warpedImage onto the panorama.
panorama = step(blender, panorama, warpedImage, mask);
end
figure, imshow(panorama)
Сшитое изображение выглядит правильно, за исключением белой границы, блокирующей их соединение. Так что я не знаю, как это сделать