source: proiecte/pmake3d/make3d_original/Make3dSingleImageStanford_version0.1/image3dstiching/match/DenseMatch.m @ 37

Last change on this file since 37 was 37, checked in by (none), 14 years ago

Added original make3d

File size: 4.0 KB
Line 
1% *  This code was used in the following articles:
2% *  [1] Learning 3-D Scene Structure from a Single Still Image,
3% *      Ashutosh Saxena, Min Sun, Andrew Y. Ng,
4% *      In ICCV workshop on 3D Representation for Recognition (3dRR-07), 2007.
5% *      (best paper)
6% *  [2] 3-D Reconstruction from Sparse Views using Monocular Vision,
7% *      Ashutosh Saxena, Min Sun, Andrew Y. Ng,
8% *      In ICCV workshop on Virtual Representations and Modeling
9% *      of Large-scale environments (VRML), 2007.
10% *  [3] 3-D Depth Reconstruction from a Single Still Image,
11% *      Ashutosh Saxena, Sung H. Chung, Andrew Y. Ng.
12% *      International Journal of Computer Vision (IJCV), Aug 2007.
13% *  [6] Learning Depth from Single Monocular Images,
14% *      Ashutosh Saxena, Sung H. Chung, Andrew Y. Ng.
15% *      In Neural Information Processing Systems (NIPS) 18, 2005.
16% *
17% *  These articles are available at:
18% *  http://make3d.stanford.edu/publications
19% *
20% *  We request that you cite the papers [1], [3] and [6] in any of
21% *  your reports that uses this code.
22% *  Further, if you use the code in image3dstiching/ (multiple image version),
23% *  then please cite [2].
24% * 
25% *  If you use the code in third_party/, then PLEASE CITE and follow the
26% *  LICENSE OF THE CORRESPONDING THIRD PARTY CODE.
27% *
28% *  Finally, this code is for non-commercial use only.  For further
29% *  information and to obtain a copy of the license, see
30% *
31% *  http://make3d.stanford.edu/publications/code
32% *
33% *  Also, the software distributed under the License is distributed on an
34% * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
35% *  express or implied.   See the License for the specific language governing
36% *  permissions and limitations under the License.
37% *
38% */
39function [Pair]=DenseMatch(defaultPara, R, T, ImgInfo)
40
41% This function search for denser mach given pretty accurate R T
42
43Img1 = strrep(ImgInfo(1).ExifInfo.name,'.jpg','');
44Img2 = strrep(ImgInfo(2).ExifInfo.name,'.jpg','');
45I1=imreadbw([defaultPara.Fdir '/pgm/' Img1 '.pgm']); % function from sift
46I2=imreadbw([defaultPara.Fdir '/pgm/' Img2 '.pgm']); % function from sift
47[f1] = readSurf(Img1, defaultPara.Fdir, 'Dense'); % original features
48[f2] = readSurf(Img2, defaultPara.Fdir, 'Dense'); % original features
49[D1] = PorjPosi2Depth(size(I1), size(ImgInfo(1).Model.Depth.FitDepth), f1, ImgInfo(1).Model.Depth.FitDepth);
50[D2] = PorjPosi2Depth(size(I2), size(ImgInfo(2).Model.Depth.FitDepth), f2, ImgInfo(1).Model.Depth.FitDepth);
51
52% 1. Using BA's R and T and Scaled Mono-Depth to define match search space constrain
53% read in all surf features
54defaultPara.VertVar = 0.02;
55defaultPara.MaxRatio = 5;
56[ Rc1, Rc2, ConS1, ConS2, ConSRough1, ConSRough2] = CalMatchSearchRegin(defaultPara, [R; R'], [T; -R'*T], I1, I2, f1, f2, D1, D2, 1, 0);
57Vector2Ipoint([Rc1; ConS1],[defaultPara.Fdir '/surf/'],['RConS_' Img1]);
58Vector2Ipoint([Rc2; ConS2],[defaultPara.Fdir '/surf/'],['RConS_' Img2]);
59Vector2Ipoint([ConSRough1],[defaultPara.Fdir '/surf/'],['RConSRough_' Img1]);
60Vector2Ipoint([ConSRough2],[defaultPara.Fdir '/surf/'],['RConSRough_' Img2]);
61
62% 2. Do match search with all combinations satisfying Constrain from 2) using ralative threshould
63tic
64cd match
65system(['./surfMatchRConS.sh ' defaultPara.Fdir ' ' Img1 ' ' Img2 ' Dense ' '0.1 0.3']);   
66cd ..
67toc
68
69[f1, f2, matches] = readSurfMatches(Img1, Img2, defaultPara.Fdir, [ defaultPara.Type 'Dense'], 1, 1);
70figure; plotmatches(I1,I2,f1, f2,matches, 'Stacking', 'v', 'Interactive', 2);
71f1 = f1(:,matches(1,:));
72f2 = f2(:,matches(2,:));
73
74% % 3. triangulation
75% x_calib = [ inv(defaultPara.InrinsicK1)*[ f1; ones(1,length(f1))];...
76%             inv(defaultPara.InrinsicK2)*[ f2; ones(1,length(f2))]];
77% [ Pair.depth1 Pair.depth2] = triangulation( defaultPara, R, T, x_calib);
78% X_obj_1 = x_calib(1:3,:).*repmat(Pair.depth1, 3, 1);
79% X_obj_2 = R'*(x_calib(4:6,:).*repmat(Pair.depth2, 3, 1)) + repmat(-R'*T, 1, length(f1));
80% Structure.X_obj = (X_obj_1+X_obj_2)/2;
81Pair.Xim = [f1; f2];
82Pair.R = R;
83Pair.T = T;
84return;
Note: See TracBrowser for help on using the repository browser.