1 | % * This code was used in the following articles:
|
---|
2 | % * [1] Learning 3-D Scene Structure from a Single Still Image,
|
---|
3 | % * Ashutosh Saxena, Min Sun, Andrew Y. Ng,
|
---|
4 | % * In ICCV workshop on 3D Representation for Recognition (3dRR-07), 2007.
|
---|
5 | % * (best paper)
|
---|
6 | % * [2] 3-D Reconstruction from Sparse Views using Monocular Vision,
|
---|
7 | % * Ashutosh Saxena, Min Sun, Andrew Y. Ng,
|
---|
8 | % * In ICCV workshop on Virtual Representations and Modeling
|
---|
9 | % * of Large-scale environments (VRML), 2007.
|
---|
10 | % * [3] 3-D Depth Reconstruction from a Single Still Image,
|
---|
11 | % * Ashutosh Saxena, Sung H. Chung, Andrew Y. Ng.
|
---|
12 | % * International Journal of Computer Vision (IJCV), Aug 2007.
|
---|
13 | % * [6] Learning Depth from Single Monocular Images,
|
---|
14 | % * Ashutosh Saxena, Sung H. Chung, Andrew Y. Ng.
|
---|
15 | % * In Neural Information Processing Systems (NIPS) 18, 2005.
|
---|
16 | % *
|
---|
17 | % * These articles are available at:
|
---|
18 | % * http://make3d.stanford.edu/publications
|
---|
19 | % *
|
---|
20 | % * We request that you cite the papers [1], [3] and [6] in any of
|
---|
21 | % * your reports that uses this code.
|
---|
22 | % * Further, if you use the code in image3dstiching/ (multiple image version),
|
---|
23 | % * then please cite [2].
|
---|
24 | % *
|
---|
25 | % * If you use the code in third_party/, then PLEASE CITE and follow the
|
---|
26 | % * LICENSE OF THE CORRESPONDING THIRD PARTY CODE.
|
---|
27 | % *
|
---|
28 | % * Finally, this code is for non-commercial use only. For further
|
---|
29 | % * information and to obtain a copy of the license, see
|
---|
30 | % *
|
---|
31 | % * http://make3d.stanford.edu/publications/code
|
---|
32 | % *
|
---|
33 | % * Also, the software distributed under the License is distributed on an
|
---|
34 | % * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
---|
35 | % * express or implied. See the License for the specific language governing
|
---|
36 | % * permissions and limitations under the License.
|
---|
37 | % *
|
---|
38 | % */
|
---|
39 | function [ Rc1_2, Rc2_1, ConS1_2, ConS2_1, RoughConS1_2, RoughConS2_1]=EffCalMatchSearchRegin(defaultPara, ScaleImg1, ScaleImg2, x1, x2, R, T, D1, D2, FlagDist); |
---|
40 | |
---|
41 | % This function Calculate the Constrain of SurfMatch search space |
---|
42 | % with different information given: |
---|
43 | % 1) Estimated Rotation and Translation matrix and depths |
---|
44 | % 2) Estimated Rotation and Translation matrix (no depths) |
---|
45 | |
---|
46 | % Return: |
---|
47 | % Rc1_2 - (4 x length(x1) : the 4 element of each column is the vectorized rotation matrix) |
---|
48 | % constrain for Img1 as Target, Img2 as Field |
---|
49 | % Rc2_1 - (4 x length(x2) : the 4 element of each column is the vectorized rotation matrix) |
---|
50 | % constrain for Img2 as Target, Img1 as Field |
---|
51 | % ConS1_2 - (4 x length(x1)) constrain for Img1 as Target, Img2 as Field |
---|
52 | % ConS1_2([1 2],:) - reference corner for the constrain square (x y) |
---|
53 | % ConS1_2([3],:) - sqare width along the epipolar line |
---|
54 | % ConS1_2([4],:) - sqare height othorgonal to the epipolar line |
---|
55 | % ConS2_1 - (4 x length(x2)) constrain for Img2 as Target, Img1 as Field |
---|
56 | % ConS2_1([1 2],:) - reference corner for the constrain square (x y) |
---|
57 | % ConS2_1([3],:) - sqare width along the epipolar line |
---|
58 | % ConS2_1([4],:) - sqare height othorgonal to the epipolar line |
---|
59 | % RoughConS1_2 - (4 x length(x1)) constrain for Img1 as Target, Img2 as Field (not allow rotation) |
---|
60 | % RoughConS1_2([1 2 3 4],:) - [xmax; xmin; ymax; ymin]; |
---|
61 | % RoughConS2_1 - (4 x length(x2)) constrain for Img2 as Target, Img1 as Field (not allow rotation) |
---|
62 | % RoughConS2_1([1 2 3 4],:) - [xmax; xmin; ymax; ymin]; |
---|
63 | |
---|
64 | % initialize Parameter |
---|
65 | NegativeDepthTolerence = defaultPara.NegativeDepthTolerence; |
---|
66 | MaxRatio =defaultPara.MaxRatio;%300; |
---|
67 | MinRatio = 1/MaxRatio; |
---|
68 | HeightImg1 = defaultPara.VertVar*max( ScaleImg1); |
---|
69 | HeightImg2 = defaultPara.VertVar*max( ScaleImg2); |
---|
70 | |
---|
71 | K1 = size(x1,2); |
---|
72 | K2 = size(x2,2); |
---|
73 | x1 = [x1; ones(1,K1)]; |
---|
74 | x2 = [x2; ones(1,K2)]; |
---|
75 | x1_calib = inv(defaultPara.InrinsicK1)*x1; |
---|
76 | x2_calib = inv(defaultPara.InrinsicK2)*x2; |
---|
77 | |
---|
78 | if isempty( R) || isempty( T) |
---|
79 | Rc1_2 = zeros(4, K1); |
---|
80 | Rc1_2( [1 4],:) = 1; |
---|
81 | Rc2_1 = zeros(4, K2); |
---|
82 | Rc2_1( [1 4],:) = 1; |
---|
83 | ConS1_2(1,:) = zeros(1, K1); |
---|
84 | ConS1_2(2,:) = ScaleImg2(2)/2*ones(1, K1); |
---|
85 | ConS1_2(3,:) = ScaleImg2(1); |
---|
86 | ConS1_2(4,:) = ScaleImg2(2)/2; |
---|
87 | ConS2_1(1,:) = zeros(1, K2); |
---|
88 | ConS2_1(2,:) = ScaleImg1(2)/2*ones(1, K2); |
---|
89 | ConS2_1(3,:) = ScaleImg1(1); |
---|
90 | ConS2_1(4,:) = ScaleImg1(2)/2; |
---|
91 | RoughConS1_2(1,:) = ScaleImg2(1)*ones(1,K1); |
---|
92 | RoughConS1_2(2,:) = 0; |
---|
93 | RoughConS1_2(3,:) = ScaleImg2(2)*ones(1,K1); |
---|
94 | RoughConS1_2(4,:) = 0; |
---|
95 | RoughConS2_1(1,:) = ScaleImg1(1)*ones(1,K2); |
---|
96 | RoughConS2_1(2,:) = 0; |
---|
97 | RoughConS2_1(3,:) = ScaleImg1(2)*ones(1,K2); |
---|
98 | RoughConS2_1(4,:) = 0; |
---|
99 | else |
---|
100 | R1_2 = R(1:3,:); |
---|
101 | R2_1 = R(4:6,:); |
---|
102 | T1_2 = T(1:3,:); |
---|
103 | T2_1 = T(4:6,:); |
---|
104 | |
---|
105 | T1_2_hat = [[0 -T1_2(3) T1_2(2)];... |
---|
106 | [T1_2(3) 0 -T1_2(1)];... |
---|
107 | [-T1_2(2) T1_2(1) 0]]; |
---|
108 | E = T1_2_hat*R1_2; |
---|
109 | F = inv(defaultPara.InrinsicK2')*E*inv(defaultPara.InrinsicK1) |
---|
110 | |
---|
111 | % I1 project on I2 ========================================== |
---|
112 | % 1) calculated the closed Depth and the Farest Depth that can be seen from Img2 |
---|
113 | % find Two End points of Epipolar line on Img2 |
---|
114 | [ EndPointsImg2 ] = EndPointsFromF(F, x1, ScaleImg2); |
---|
115 | [ EndPointsDepthImg1(1,:) dump Error] = triangulation( defaultPara, R1_2, T1_2, [x1; inv(defaultPara.InrinsicK1)*[EndPointsImg2(1:2,:); ones(1,K1)]]); |
---|
116 | [ EndPointsDepthImg1(2,:) dump Error] = triangulation( defaultPara, R1_2, T1_2, [x1; inv(defaultPara.InrinsicK2)*[EndPointsImg2(3:4,:); ones(1,K1)]]); |
---|
117 | EndPointsDepthImg1 = sort(EndPointsDepthImg1,1); % make the EndPointsDepthImg1 in acend order from top to bottom |
---|
118 | |
---|
119 | % 2) prune depth range |
---|
120 | if ~isempty( D1 ) |
---|
121 | MaxD1 = D1*MaxRatio; |
---|
122 | MinD1 = D1*MinRatio; |
---|
123 | % prune by EndPointsDepth |
---|
124 | MaxD1 = min(MaxD1, EndPointsDepthImg1(2,:)); |
---|
125 | MaxD1 = max(MaxD1, EndPointsDepthImg1(1,:)); |
---|
126 | MinD1 = max(MinD1, EndPointsDepthImg1(1,:)); |
---|
127 | MinD1 = min(MinD1, EndPointsDepthImg1(2,:)); |
---|
128 | else |
---|
129 | MaxD1 = EndPointsDepthImg1(2,:); |
---|
130 | MinD1 = EndPointsDepthImg1(1,:); |
---|
131 | end |
---|
132 | % prune by additional constrain ========OPtional |
---|
133 | MaxD1 = min(MaxD1, defaultPara.FarestDist); |
---|
134 | MinD1 = max(MinD1, defaultPara.Closestdist); |
---|
135 | % ============================================== |
---|
136 | |
---|
137 | % calculate the projection position |
---|
138 | x1CaMax3D = inv(defaultPara.InrinsicK1)*(x1.*repmat(MaxD1,3,1)); % 3-D position in camera 1 coordinate (3 by n) |
---|
139 | x1CaMin3D = inv(defaultPara.InrinsicK1)*(x1.*repmat(MinD1,3,1)); % 3-D position in camera 1 coordinate (3 by n) |
---|
140 | x1CaMaxHomo = [ x1CaMax3D; ones(1,K1)]; % into homogenous coordinate (4 by n) |
---|
141 | x1CaMinHomo = [ x1CaMin3D; ones(1,K1)]; % into homogenous coordinate (4 by n) |
---|
142 | x1_2Max3D = [R1_2 T1_2]*x1CaMaxHomo; % 3-D position in camera 2 coordinate (3 by n) |
---|
143 | x1_2MaxHomo = defaultPara.InrinsicK2*x1_2Max3D; % image homo coordinate in camera2 (3 by n) |
---|
144 | x1_2Max = [ x1_2MaxHomo(1,:)./x1_2MaxHomo(3,:); x1_2MaxHomo(2,:)./x1_2MaxHomo(3,:)]; % image coordinate (2 by n) |
---|
145 | x1_2Min3D = [R1_2 T1_2]*x1CaMinHomo; % 3-D position in camera 2 coordinate (3 by n) |
---|
146 | x1_2MinHomo = defaultPara.InrinsicK2*x1_2Min3D; % image homo coordinate in camera2 (3 by n) |
---|
147 | x1_2Min = [ x1_2MinHomo(1,:)./x1_2MinHomo(3,:); x1_2MinHomo(2,:)./x1_2MinHomo(3,:)]; % image coordinate (2 by n) |
---|
148 | |
---|
149 | % expand the search space a little bit in case the R and T are not accurate enough |
---|
150 | x1_2Max = x1_2Max + (x1_2Max - x1_2Min)*NegativeDepthTolerence;%Min529 |
---|
151 | x1_2Min = x1_2Min + (x1_2Min - x1_2Max)*NegativeDepthTolerence;%Min529 |
---|
152 | |
---|
153 | % Define Constrain (simple rectangle) |
---|
154 | [ Rc1_2, ConS1_2, RoughConS1_2 ]=Points2SqareConstrain( [ x1_2Max; x1_2Min], HeightImg1); |
---|
155 | |
---|
156 | % =========================================================== |
---|
157 | |
---|
158 | % I2 project on I1 ========================================== |
---|
159 | % 1) calculated the closed Depth and the Farest Depth that can be seen from Img1 |
---|
160 | % find Two End points of Epipolar line on Img2 |
---|
161 | [ EndPointsImg1 ] = EndPointsFromF(F', x2, ScaleImg1); |
---|
162 | [ EndPointsDepthImg2(1,:) dump Error] = triangulation( defaultPara, R2_1, T2_1, [x2; inv(defaultPara.InrinsicK1)*[EndPointsImg1(1:2,:); ones(1,K2)]]); |
---|
163 | [ EndPointsDepthImg2(2,:) dump Error] = triangulation( defaultPara, R2_1, T2_1, [x2; inv(defaultPara.InrinsicK2)*[EndPointsImg1(3:4,:); ones(1,K2)]]); |
---|
164 | EndPointsDepthImg2 = sort(EndPointsDepthImg2,1); % make the EndPointsDepthImg1 in acend order from top to bottom |
---|
165 | |
---|
166 | % 2) prune depth range |
---|
167 | if ~isempty( D2) |
---|
168 | MaxD2 = D2*MaxRatio; |
---|
169 | MinD2 = D2*MinRatio; |
---|
170 | % prune by EndPointsDepth |
---|
171 | MaxD2 = min(MaxD2, EndPointsDepthImg2(2,:)); |
---|
172 | MaxD2 = max(MaxD2, EndPointsDepthImg2(1,:)); |
---|
173 | MinD2 = max(MinD2, EndPointsDepthImg2(1,:)); |
---|
174 | MinD2 = min(MinD2, EndPointsDepthImg2(2,:)); |
---|
175 | else |
---|
176 | MaxD2 = EndPointsDepthImg2(2,:); |
---|
177 | MinD2 = EndPointsDepthImg2(1,:); |
---|
178 | end |
---|
179 | % prune by additional constrain ========OPtional |
---|
180 | MaxD2 = min(MaxD2, defaultPara.FarestDist); |
---|
181 | MinD2 = max(MinD2, defaultPara.Closestdist); |
---|
182 | % ============================================== |
---|
183 | |
---|
184 | % calculate the projection position |
---|
185 | x2CaMax3D = inv(defaultPara.InrinsicK2)*(x2.*repmat(MaxD2,3,1)); % 3-D position in camera 1 coordinate (3 by n) |
---|
186 | x2CaMin3D = inv(defaultPara.InrinsicK2)*(x2.*repmat(MinD2,3,1)); % 3-D position in camera 1 coordinate (3 by n) |
---|
187 | x2CaMaxHomo = [ x2CaMax3D; ones(1,K2)]; % into homogenous coordinate (4 by n) |
---|
188 | x2CaMinHomo = [ x2CaMin3D; ones(1,K2)]; % into homogenous coordinate (4 by n) |
---|
189 | x2_1Max3D = [R2_1 T2_1]*x2CaMaxHomo; % 3-D position in camera 2 coordinate (3 by n) |
---|
190 | x2_1MaxHomo = defaultPara.InrinsicK2*x2_1Max3D; % image homo coordinate in camera2 (3 by n) |
---|
191 | x2_1Max = [ x2_1MaxHomo(1,:)./x2_1MaxHomo(3,:); x2_1MaxHomo(2,:)./x2_1MaxHomo(3,:)]; % image coordinate (2 by n) |
---|
192 | x2_1Min3D = [R2_1 T2_1]*x2CaMinHomo; % 3-D position in camera 2 coordinate (3 by n) |
---|
193 | x2_1MinHomo = defaultPara.InrinsicK2*x2_1Min3D; % image homo coordinate in camera2 (3 by n) |
---|
194 | x2_1Min = [ x2_1MinHomo(1,:)./x2_1MinHomo(3,:); x2_1MinHomo(2,:)./x2_1MinHomo(3,:)]; % image coordinate (2 by n) |
---|
195 | |
---|
196 | % expand the search space a little bit in case the R and T are not accurate enough |
---|
197 | x2_1Max = x2_1Max + (x2_1Max - x2_1Min)*NegativeDepthTolerence;%Min529 |
---|
198 | x2_1Min = x2_1Min + (x2_1Min - x2_1Max)*NegativeDepthTolerence;%Min529 |
---|
199 | |
---|
200 | % Define Constrain (simple rectangle) |
---|
201 | [ Rc2_1, ConS2_1, RoughConS2_1 ]=Points2SqareConstrain( [ x2_1Max; x2_1Min], HeightImg2); |
---|
202 | |
---|
203 | % =========================================================== |
---|
204 | if FlagDisp |
---|
205 | %figure; |
---|
206 | %dispMatchSearchRegin(I1, I2, x1, x2, tempConS1_2, tempConS2_1, F, ... |
---|
207 | %x1_2Max, MaxD1, x1_2Min, MinD1, ... |
---|
208 | %x2_1Max, MaxD2, x2_1Min, MinD2, ... |
---|
209 | %FlagRotate, 'Stacking', 'h', 'Interactive', 0); |
---|
210 | figure; |
---|
211 | dispMatchSearchRegin(I1, I2, x1, x2, tempConSConS1_2, tempConSConS2_1, F, FlagRotate, 'Stacking', 'v', 'Interactive', 0); |
---|
212 | end |
---|
213 | end |
---|
214 | end |
---|
215 | |
---|
216 | % return |
---|