4 Copyright (C) 2008 Verena Kaynig.
6 This program is free software; you can redistribute it and/or
7 modify it under the terms of the GNU General Public License
8 as published by the Free Software Foundation (http://www.gnu.org/licenses/gpl.txt )
10 This program is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with this program; if not, write to the Free Software
17 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
21 /* **************************************************************************** *
22 * This ImageJ Plugin estimates a non linear lens distortion in *
23 * the images given and corrects all images with the same transformation *
27 * - show histogram of beta coefficients to evaluate kernel dimension needed *
28 * - show SIFT matches before and after correction? *
29 * - give numerical xcorr results in a better manner *
30 * - show visual impression of the stitching ? *
32 * - do mask images properly to incorporate into TrakEM *
34 * Author: Verena Kaynig *
35 * Kontakt: verena.kaynig@inf.ethz.ch *
37 * SIFT implementation: Stephan Saalfeld *
38 * **************************************************************************** */
40 package lenscorrection
;
43 import ij
.gui
.GenericDialog
;
44 import ij
.plugin
.PlugIn
;
45 import ij
.process
.ColorProcessor
;
46 import ij
.process
.ImageProcessor
;
48 import ij
.io
.DirectoryChooser
;
50 import ij
.io
.FileSaver
;
52 import mpi
.fruitfly
.general
.MultiThreading
;
53 import mpicbg
.models
.*;
54 import mpicbg
.ij
.SIFT
;
55 import mpicbg
.imagefeatures
.*;
57 import java
.util
.ArrayList
;
58 import java
.util
.Arrays
;
59 import java
.util
.Collection
;
60 import java
.util
.Collections
;
61 import java
.util
.List
;
62 import java
.util
.concurrent
.atomic
.AtomicInteger
;
63 import java
.awt
.Color
;
64 import java
.awt
.geom
.AffineTransform
;
67 import org
.jfree
.chart
.*;
68 import org
.jfree
.chart
.plot
.PlotOrientation
;
69 import org
.jfree
.data
.category
.DefaultCategoryDataset
;
74 public class Distortion_Correction
implements PlugIn
{
76 static public class BasicParam
78 final public FloatArray2DSIFT
.Param sift
= new FloatArray2DSIFT
.Param();
81 * Closest/next closest neighbour distance ratio
83 public float rod
= 0.92f
;
86 * Maximal allowed alignment error in px
88 public float maxEpsilon
= 32.0f
;
91 * Inlier/candidates ratio
93 public float minInlierRatio
= 0.2f
;
96 * Implemeted transformation models for choice
98 final static public String
[] modelStrings
= new String
[]{ "Translation", "Rigid", "Similarity", "Affine" };
99 public int expectedModelIndex
= 1;
102 * Order of the polynomial kernel
104 public int dimension
= 5;
107 * Regularization factor
109 public double lambda
= 0.01;
114 sift
.maxOctaveSize
= 1600;
115 sift
.minOctaveSize
= 400;
118 public void addFields( final GenericDialog gd
)
120 SIFT
.addFields( gd
, sift
);
122 gd
.addNumericField( "closest/next_closest_ratio :", rod
, 2 );
124 gd
.addMessage( "Geometric Consensus Filter :" );
125 gd
.addNumericField( "maximal_alignment_error :", maxEpsilon
, 2, 6, "px" );
126 gd
.addNumericField( "inlier_ratio :", minInlierRatio
, 2 );
127 gd
.addChoice( "expected_transformation :", modelStrings
, modelStrings
[ expectedModelIndex
] );
129 gd
.addMessage( "Lens Model :" );
130 gd
.addNumericField( "power_of_polynomial_kernel :", dimension
, 0 );
131 gd
.addNumericField( "lambda :", lambda
, 6 );
134 public boolean readFields( final GenericDialog gd
)
136 SIFT
.readFields( gd
, sift
);
138 rod
= ( float )gd
.getNextNumber();
140 maxEpsilon
= ( float )gd
.getNextNumber();
141 minInlierRatio
= ( float )gd
.getNextNumber();
142 expectedModelIndex
= gd
.getNextChoiceIndex();
144 dimension
= ( int )gd
.getNextNumber();
145 lambda
= ( double )gd
.getNextNumber();
147 return !gd
.invalidNumber();
150 public boolean setup( final String title
)
152 final GenericDialog gd
= new GenericDialog( title
);
157 if ( gd
.wasCanceled() ) return false;
159 while ( !readFields( gd
) );
165 public BasicParam
clone()
167 final BasicParam p
= new BasicParam();
169 p
.dimension
= dimension
;
170 p
.expectedModelIndex
= expectedModelIndex
;
172 p
.maxEpsilon
= maxEpsilon
;
173 p
.minInlierRatio
= minInlierRatio
;
180 static public class PluginParam
extends BasicParam
182 public String source_dir
= "";
183 public String target_dir
= "";
184 public String saveFileName
= "distCorr.txt";
186 public boolean applyCorrection
= true;
187 public boolean visualizeResults
= true;
189 public int numberOfImages
= 9;
190 public int firstImageIndex
= 0;
193 * Original and calibrated images are supposed to have the same names,
194 * but are in different directories
196 public String
[] names
;
198 public int saveOrLoad
= 0;
201 public void addFields( final GenericDialog gd
)
203 super.addFields( gd
);
207 public boolean readFields( final GenericDialog gd
)
209 super.readFields( gd
);
211 return !gd
.invalidNumber();
215 * Setup as a three step dialog.
218 public boolean setup( final String title
)
221 while ( source_dir
== "" )
223 final DirectoryChooser dc
= new DirectoryChooser( "Calibration Images" );
224 source_dir
= dc
.getDirectory();
225 if ( null == source_dir
) return false;
227 source_dir
= source_dir
.replace( '\\', '/' );
228 if ( !source_dir
.endsWith( "/" ) ) source_dir
+= "/";
231 final String exts
= ".tif.jpg.png.gif.tiff.jpeg.bmp.pgm";
232 names
= new File( source_dir
).list(
235 public boolean accept( File dir
, String name
)
237 int idot
= name
.lastIndexOf( '.' );
238 if ( -1 == idot
) return false;
239 return exts
.contains( name
.substring( idot
).toLowerCase() );
242 Arrays
.sort( names
);
244 final GenericDialog gd
= new GenericDialog( title
);
246 gd
.addNumericField( "number_of_images :", 9, 0 );
247 gd
.addChoice( "first_image :", names
, names
[ 0 ] );
248 gd
.addNumericField( "power_of_polynomial_kernel :", dimension
, 0 );
249 gd
.addNumericField( "lambda :", lambda
, 6 );
250 gd
.addCheckbox( "apply_correction_to_images", applyCorrection
);
251 gd
.addCheckbox( "visualize results", visualizeResults
);
252 final String
[] options
= new String
[]{ "save", "load" };
253 gd
.addChoice( "What to do? ", options
, options
[ saveOrLoad
] );
254 gd
.addStringField( "file_name: ", saveFileName
);
257 if (gd
.wasCanceled()) return false;
259 numberOfImages
= ( int )gd
.getNextNumber();
260 firstImageIndex
= gd
.getNextChoiceIndex();
261 dimension
= ( int )gd
.getNextNumber();
262 lambda
= gd
.getNextNumber();
263 applyCorrection
= gd
.getNextBoolean();
264 visualizeResults
= gd
.getNextBoolean();
265 saveOrLoad
= gd
.getNextChoiceIndex();
266 saveFileName
= gd
.getNextString();
268 if ( saveOrLoad
== 0 || visualizeResults
)
270 final GenericDialog gds
= new GenericDialog( title
);
271 SIFT
.addFields( gds
, sift
);
273 gds
.addNumericField( "closest/next_closest_ratio :", rod
, 2 );
275 gds
.addMessage( "Geometric Consensus Filter:" );
276 gds
.addNumericField( "maximal_alignment_error :", maxEpsilon
, 2, 6, "px" );
277 gds
.addNumericField( "inlier_ratio :", minInlierRatio
, 2 );
278 gds
.addChoice( "expected_transformation :", modelStrings
, modelStrings
[ expectedModelIndex
] );
281 if ( gds
.wasCanceled() ) return false;
283 SIFT
.readFields( gds
, sift
);
285 rod
= ( float )gds
.getNextNumber();
287 maxEpsilon
= ( float )gds
.getNextNumber();
288 minInlierRatio
= ( float )gds
.getNextNumber();
289 expectedModelIndex
= gds
.getNextChoiceIndex();
291 return !( gd
.invalidNumber() || gds
.invalidNumber() );
294 return !gd
.invalidNumber();
298 static final public BasicParam p
= new BasicParam();
299 static final public PluginParam sp
= new PluginParam();
301 NonLinearTransform nlt
= new NonLinearTransform();
302 AbstractAffineModel2D
< ?
>[] models
;
304 public void run(String arg
)
306 if ( !sp
.setup( "Lens Correction" ) ) return;
308 IJ
.log( sp
.source_dir
+ sp
.names
[ 0 ] );
309 final ImagePlus imgTmp
= new Opener().openImage( sp
.source_dir
+ sp
.names
[ 0 ] );
310 final int imageWidth
= imgTmp
.getWidth(), imageHeight
=imgTmp
.getHeight();
311 /** imgTmp was just needed to get width and height of the images */
314 List
< List
< PointMatch
> > inliers
= null;
315 List
< Feature
>[] siftFeatures
= extractSIFTFeaturesThreaded( sp
.numberOfImages
, sp
.source_dir
, sp
.names
);
317 List
< PointMatch
>[] inliersTmp
= new ArrayList
[ sp
.numberOfImages
* ( sp
.numberOfImages
- 1 ) ];
318 models
= new AbstractAffineModel2D
[ sp
.numberOfImages
* ( sp
.numberOfImages
- 1 ) ];
320 IJ
.showStatus( "Estimating Correspondences" );
321 for( int i
= 0; i
< sp
.numberOfImages
; ++i
)
323 IJ
.log( "Estimating correspondences of image " + i
);
324 IJ
.showProgress( ( i
+ 1 ), sp
.numberOfImages
);
325 extractSIFTPointsThreaded( i
, siftFeatures
, inliersTmp
, models
);
329 inliers
= new ArrayList
< List
< PointMatch
> >();
330 for ( int i
= 0; i
< inliersTmp
.length
; i
++ )
332 // if vector at inliersTmp[i] contains only one null element,
333 // its size is still 1
334 if (inliersTmp
[i
].size() > 10){
335 wholeCount
+= inliersTmp
[i
].size();
337 //if I do not do this then the models have not the
338 //right index corresponding to the inliers positions in the vector
339 inliers
.add(inliersTmp
[i
]);
342 //this is really really ugly
343 double[][] tp
= new double[wholeCount
][6];
344 double h1
[][] = new double[wholeCount
][2];
345 double h2
[][] = new double[wholeCount
][2];
347 for ( int i
= 0; i
< inliers
.size(); ++i
)
349 if ( inliers
.get(i
).size() > 10 )
351 double[][] points1
= new double[inliers
.get(i
).size()][2];
352 double[][] points2
= new double[inliers
.get(i
).size()][2];
354 for (int j
=0; j
< inliers
.get(i
).size(); j
++){
356 float[] tmp1
= ((PointMatch
) inliers
.get(i
).get(j
)).getP1().getL();
357 float[] tmp2
= ((PointMatch
) inliers
.get(i
).get(j
)).getP2().getL();
359 points1
[j
][0] = (double) tmp1
[0];
360 points1
[j
][1] = (double) tmp1
[1];
361 points2
[j
][0] = (double) tmp2
[0];
362 points2
[j
][1] = (double) tmp2
[1];
364 h1
[count
] = new double[] {(double) tmp1
[0], (double) tmp1
[1]};
365 h2
[count
] = new double[] {(double) tmp2
[0], (double) tmp2
[1]};
367 models
[i
].createAffine().getMatrix(tp
[count
]);
373 if ( sp
.saveOrLoad
== 0 )
375 nlt
= distortionCorrection( h1
, h2
, tp
, sp
.dimension
, sp
.lambda
, imageWidth
, imageHeight
);
376 nlt
.visualizeSmall( sp
.lambda
);
381 final GenericDialog gdl
= new GenericDialog( "New lambda?" );
382 gdl
.addMessage( "If the distortion field shows a clear translation, \n it is likely that you need to increase lambda." );
383 gdl
.addNumericField( "lambda :", sp
.lambda
, 6 );
385 if ( gdl
.wasCanceled() ) break;
386 sp
.lambda
= gdl
.getNextNumber();
387 nlt
= distortionCorrection( h1
, h2
, tp
, sp
.dimension
, sp
.lambda
, imageWidth
, imageHeight
);
388 nlt
.visualizeSmall( sp
.lambda
);
390 nlt
.save( sp
.source_dir
+ sp
.saveFileName
);
392 //after all preprocessing is done, estimate the distortion correction transform
395 if ( sp
.saveOrLoad
== 1 )
397 nlt
.load( sp
.source_dir
+ sp
.saveFileName
);
401 if ( sp
.applyCorrection
|| sp
.visualizeResults
)
403 sp
.target_dir
= correctImages();
406 if ( sp
.visualizeResults
)
408 IJ
.log( "call nlt.visualize()" );
410 IJ
.log( "call evaluateCorrection(inliers)" );
411 evaluateCorrection( inliers
);
413 //System.out.println("FINISHED");
416 public void evaluateCorrection( List
< List
< PointMatch
> > inliers
)
418 IJ
.showStatus("Evaluating Distortion Correction");
419 double[][] original
= new double[ sp
.numberOfImages
][ 2 ];
420 double[][] corrected
= new double[ sp
.numberOfImages
][ 2 ];
423 for ( int i
= sp
.firstImageIndex
; i
< sp
.numberOfImages
; i
++ )
425 original
[ i
] = evaluateCorrectionXcorr( i
, sp
.source_dir
);
426 corrected
[ i
] = evaluateCorrectionXcorr( i
, sp
.target_dir
);
428 DefaultCategoryDataset dataset
= new DefaultCategoryDataset();
429 DefaultCategoryDataset datasetGain
= new DefaultCategoryDataset();
430 DefaultCategoryDataset datasetGrad
= new DefaultCategoryDataset();
432 for ( int i
= 0; i
< ( original
.length
); ++i
)
434 dataset
.setValue(Math
.abs(original
[i
][0]), "before", "image" + i
);
435 dataset
.setValue(Math
.abs(corrected
[i
][0]), "after", "image" + i
);
436 datasetGrad
.setValue(Math
.abs(original
[i
][1]), "before", "image" + i
);
437 datasetGrad
.setValue(Math
.abs(corrected
[i
][1]), "after", "image" + i
);
439 datasetGain
.setValue(Math
.abs(corrected
[i
][0]) - Math
.abs(original
[i
][0]), "gray","image" + i
);
440 datasetGain
.setValue(Math
.abs(corrected
[i
][1]) - Math
.abs(original
[i
][1]), "grad","image" + i
);
443 final JFreeChart chart
= ChartFactory
.createBarChart(
444 "Xcorr before and after correction",
448 PlotOrientation
.VERTICAL
,
451 final ImagePlus imp
= new ImagePlus( "Plot", chart
.createBufferedImage( 500, 300 ) );
454 final JFreeChart chartGrad
= ChartFactory
.createBarChart(
455 "XcorrGradient before and after correction",
459 PlotOrientation
.VERTICAL
,
462 final ImagePlus impGrad
= new ImagePlus( "Plot", chartGrad
.createBufferedImage( 500, 300 ) );
465 final JFreeChart chartGain
= ChartFactory
.createBarChart(
470 PlotOrientation
.VERTICAL
,
473 final ImagePlus impGain
= new ImagePlus( "Plot", chartGain
.createBufferedImage( 500, 300 ) );
476 visualizePoints( inliers
);
478 //write xcorr data to file
479 String original0
= "", original1
= "", corrected0
= "", corrected1
= "", gain0
= "", gain1
= "";
480 for ( int i
= 0; i
< ( original
.length
); ++i
)
482 original0
= original0
+ Double
.toString(original
[i
][0]) + "; ";
483 original1
= original1
+ Double
.toString(original
[i
][1]) + "; ";
484 corrected0
= corrected0
+ Double
.toString(corrected
[i
][0]) + "; ";
485 corrected1
= corrected1
+ Double
.toString(corrected
[i
][1]) + "; ";
486 gain0
= gain0
+ Double
.toString(Math
.abs(corrected
[i
][0]) - Math
.abs(original
[i
][0])) + "; ";
487 gain1
= gain1
+ Double
.toString(Math
.abs(corrected
[i
][1]) - Math
.abs(original
[i
][1])) + "; ";
492 BufferedWriter out
= new BufferedWriter(new FileWriter( sp
.source_dir
+ "xcorrData.log" ) );
494 out
.write( original0
);
497 out
.write(original1
);
500 out
.write(corrected0
);
503 out
.write(corrected1
);
513 catch (Exception e
){System
.err
.println("Error: " + e
.getMessage());};
517 protected void extractSIFTPoints(
519 List
< Feature
>[] siftFeatures
,
520 List
< List
< PointMatch
> > inliers
,
521 List
< AbstractAffineModel2D
< ?
> > models
)
524 //save all matching candidates
525 List
< List
< PointMatch
> > candidates
= new ArrayList
< List
< PointMatch
> >();
527 for ( int j
= 0; j
< siftFeatures
.length
; j
++ )
529 if ( index
== j
) continue;
530 candidates
.add( FloatArray2DSIFT
.createMatches( siftFeatures
[ index
], siftFeatures
[ j
], 1.5f
, null, Float
.MAX_VALUE
, 0.5f
) );
533 //get rid of the outliers and save the transformations to match the inliers
534 for ( int i
= 0; i
< candidates
.size(); ++i
)
536 List
< PointMatch
> tmpInliers
= new ArrayList
< PointMatch
>();
538 final AbstractAffineModel2D
< ?
> m
;
539 switch ( sp
.expectedModelIndex
)
542 m
= new TranslationModel2D();
545 m
= new RigidModel2D();
548 m
= new SimilarityModel2D();
551 m
= new AffineModel2D();
566 catch( NotEnoughDataPointsException e
) { e
.printStackTrace(); }
568 inliers
.add( tmpInliers
);
574 * Estimates a polynomial distortion model for a set of
575 * {@linkplain PointMatch corresponding points} and returns the inverse of
576 * this model which then may be used to undo the distortion.
580 * a list of affines in the same order as matches that transfer a match
581 * collection into a common image frame
582 * @param dimension the order of the polynomial model
583 * @param lambda regularization factor
587 * @return a polynomial distortion model to undo the distortion
589 static public NonLinearTransform
createInverseDistortionModel(
590 final List
< Collection
< PointMatch
> > matches
,
591 final List
< AffineTransform
> affines
,
594 final int imageWidth
,
595 final int imageHeight
)
598 for ( Collection
< PointMatch
> l
: matches
)
600 wholeCount
+= l
.size();
602 final double[][] tp
= new double[wholeCount
][6];
603 final double h1
[][] = new double[wholeCount
][2];
604 final double h2
[][] = new double[wholeCount
][2];
606 for (int i
=0; i
< matches
.size(); i
++)
608 if (matches
.get(i
).size() > 10)
610 final double[][] points1
= new double[matches
.get(i
).size()][2];
611 final double[][] points2
= new double[matches
.get(i
).size()][2];
614 for ( PointMatch match
: matches
.get( i
) )
616 final float[] tmp1
= match
.getP1().getL();
617 final float[] tmp2
= match
.getP2().getL();
619 points1
[j
][0] = (double) tmp1
[0];
620 points1
[j
][1] = (double) tmp1
[1];
621 points2
[j
][0] = (double) tmp2
[0];
622 points2
[j
][1] = (double) tmp2
[1];
624 h1
[count
] = new double[] {(double) tmp1
[0], (double) tmp1
[1]};
625 h2
[count
] = new double[] {(double) tmp2
[0], (double) tmp2
[1]};
627 affines
.get( i
).getMatrix( tp
[ count
] );
635 NonLinearTransform nlt
= distortionCorrection(h1
, h2
, tp
, dimension
, lambda
, imageWidth
, imageHeight
);
636 nlt
.inverseTransform( h1
);
641 static protected NonLinearTransform
distortionCorrection(double hack1
[][], double hack2
[][], double transformParams
[][], int dimension
, double lambda
, int w
, int h
){
642 IJ
.showStatus("Getting the Distortion Field");
643 NonLinearTransform nlt
= new NonLinearTransform(dimension
, w
, h
);
645 double expandedX
[][] = nlt
.kernelExpandMatrixNormalize(hack1
);
646 double expandedY
[][] = nlt
.kernelExpandMatrix(hack2
);
648 int s
= expandedX
[0].length
;
649 Matrix S1
= new Matrix(2*s
, 2*s
);
650 Matrix S2
= new Matrix(2*s
,1);
652 for (int i
=0; i
< expandedX
.length
; i
++){
653 Matrix xk_ij
= new Matrix(expandedX
[i
],1);
654 Matrix xk_ji
= new Matrix(expandedY
[i
],1);
656 Matrix yk1a
= xk_ij
.minus(xk_ji
.times(transformParams
[i
][0]));
657 Matrix yk1b
= xk_ij
.times(0.0).minus(xk_ji
.times(-transformParams
[i
][2]));
658 Matrix yk2a
= xk_ij
.times(0.0).minus(xk_ji
.times(-transformParams
[i
][1]));
659 Matrix yk2b
= xk_ij
.minus(xk_ji
.times(transformParams
[i
][3]));
661 Matrix y
= new Matrix(2,2*s
);
662 y
.setMatrix(0, 0, 0, s
-1, yk1a
);
663 y
.setMatrix(0, 0, s
, 2*s
-1, yk1b
);
664 y
.setMatrix(1, 1, 0, s
-1, yk2a
);
665 y
.setMatrix(1, 1, s
, 2*s
-1, yk2b
);
667 Matrix xk
= new Matrix(2,2*expandedX
[0].length
);
668 xk
.setMatrix(0, 0, 0, s
-1, xk_ij
);
669 xk
.setMatrix(1, 1, s
, 2*s
-1, xk_ij
);
671 double[] vals
= {hack1
[i
][0], hack1
[i
][1]};
672 Matrix c
= new Matrix(vals
, 2);
674 Matrix X
= xk
.transpose().times(xk
).times(lambda
);
675 Matrix Y
= y
.transpose().times(y
);
677 S1
= S1
.plus(Y
.plus(X
));
679 double trans1
= (transformParams
[i
][2]* transformParams
[i
][5] - transformParams
[i
][0]*transformParams
[i
][4]);
680 double trans2
= (transformParams
[i
][1]* transformParams
[i
][4] - transformParams
[i
][3]*transformParams
[i
][5]);
681 double[] trans
= {trans1
, trans2
};
684 Matrix translation
= new Matrix(trans
, 2);
685 Matrix YT
= y
.transpose().times(translation
);
686 Matrix XC
= xk
.transpose().times(c
).times(lambda
);
688 S2
= S2
.plus(YT
.plus(XC
));
692 Matrix regularize
= Matrix
.identity(S1
.getRowDimension(), S1
.getColumnDimension());
693 Matrix beta
= new Matrix(S1
.plus(regularize
.times(0.001)).inverse().times(S2
).getColumnPackedCopy(),s
);
695 nlt
.setBeta(beta
.getArray());
696 nlt
.inverseTransform(hack1
);
700 protected String
correctImages()
702 if ( !sp
.applyCorrection
)
704 sp
.target_dir
= System
.getProperty( "user.dir" ).replace( '\\', '/' ) + "/distCorr_tmp/";
705 System
.out
.println( "Tmp target directory: " + sp
.target_dir
);
707 if ( new File( sp
.target_dir
).exists() )
709 System
.out
.println( "removing old tmp directory!" );
711 final String
[] filesToDelete
= new File( sp
.target_dir
).list();
712 for ( int i
= 0; i
< filesToDelete
.length
; i
++ )
714 System
.out
.println( filesToDelete
[ i
] );
715 boolean deleted
= new File( sp
.target_dir
+ filesToDelete
[ i
] ).delete();
716 if ( !deleted
) IJ
.log( "Error: Could not remove temporary directory!" );
718 new File( sp
.target_dir
).delete();
722 // Create one directory
723 boolean success
= ( new File( sp
.target_dir
) ).mkdir();
724 if ( success
) new File( sp
.target_dir
).deleteOnExit();
726 catch ( Exception e
)
728 IJ
.showMessage( "Error! Could not create temporary directory. " + e
.getMessage() );
731 if ( sp
.target_dir
== "" )
733 final DirectoryChooser dc
= new DirectoryChooser( "Target Directory" );
734 sp
.target_dir
= dc
.getDirectory();
735 if ( null == sp
.target_dir
) return "";
736 sp
.target_dir
= sp
.target_dir
.replace( '\\', '/' );
737 if ( !sp
.target_dir
.endsWith( "/" ) ) sp
.target_dir
+= "/";
740 final String
[] namesTarget
= new File( sp
.target_dir
).list( new FilenameFilter()
742 public boolean accept( File dir
, String namesTarget
)
744 int idot
= namesTarget
.lastIndexOf( '.' );
745 if ( -1 == idot
) return false;
746 return namesTarget
.contains( namesTarget
.substring( idot
).toLowerCase() );
750 if ( namesTarget
.length
> 0 ) IJ
.showMessage( "Overwrite Message", "There are already images in that directory. These will be used for evaluation." );
754 IJ
.showStatus( "Correcting Images" );
756 final Thread
[] threads
= MultiThreading
.newThreads();
757 final AtomicInteger ai
= new AtomicInteger( sp
.applyCorrection ?
0 : sp
.firstImageIndex
);
759 for ( int ithread
= 0; ithread
< threads
.length
; ++ithread
)
761 threads
[ ithread
] = new Thread()
765 setPriority( Thread
.NORM_PRIORITY
);
767 for ( int i
= ai
.getAndIncrement(); i
< ( sp
.applyCorrection ? sp
.names
.length
: ( sp
.firstImageIndex
+ sp
.numberOfImages
) ); i
= ai
.getAndIncrement() )
769 IJ
.log( "Correcting image " + sp
.names
[ i
] );
770 final ImagePlus imps
= new Opener().openImage( sp
.source_dir
+ sp
.names
[ i
] );
771 imps
.setProcessor( imps
.getTitle(), imps
.getProcessor().convertToShort( false ) );
772 ImageProcessor
[] transErg
= nlt
.transform( imps
.getProcessor() );
773 imps
.setProcessor( imps
.getTitle(), transErg
[ 0 ] );
774 if ( !sp
.applyCorrection
) new File( sp
.target_dir
+ sp
.names
[ i
] ).deleteOnExit();
775 new FileSaver( imps
).saveAsTiff( sp
.target_dir
+ sp
.names
[ i
] );
780 MultiThreading
.startAndJoin( threads
);
782 return sp
.target_dir
;
785 protected double[] evaluateCorrectionXcorr( int index
, String directory
)
787 ImagePlus im1
= new Opener().openImage( directory
+ sp
.names
[ index
] );
788 im1
.setProcessor( sp
.names
[ index
], im1
.getProcessor().convertToShort( false ) );
791 ArrayList
< Double
> xcorrVals
= new ArrayList
< Double
>();
792 ArrayList
< Double
> xcorrValsGrad
= new ArrayList
< Double
>();
794 for ( int i
= 0; i
< sp
.numberOfImages
; i
++ )
800 if ( models
[ index
* ( sp
.numberOfImages
- 1 ) + count
] == null )
806 ImagePlus newImg
= new Opener().openImage( directory
+ sp
.names
[ i
+ sp
.firstImageIndex
] );
807 newImg
.setProcessor( newImg
.getTitle(), newImg
.getProcessor().convertToShort( false ) );
809 newImg
.setProcessor( sp
.names
[ i
+ sp
.firstImageIndex
], applyTransformToImageInverse( models
[ index
* ( sp
.numberOfImages
- 1 ) + count
], newImg
.getProcessor() ) );
810 ImageProcessor testIp
= im1
.getProcessor().duplicate();
812 // If you want to see the stitching improvement run this
813 // for ( int x=0; x < testIp.getWidth(); x++){
814 // for (int y=0; y < testIp.getHeight(); y++){
815 // testIp.set(x, y, Math.abs(im1.getProcessor().get(x,y) -
816 // newImg.getProcessor().get(x,y)));
820 // ImagePlus testImg = new ImagePlus(sp.names[index] + " minus " +
821 // sp.names[i], testIp);
826 xcorrVals
.add( getXcorrBlackOut( im1
.getProcessor(), newImg
.getProcessor() ) );
828 xcorrValsGrad
.add( getXcorrBlackOutGradient( im1
.getProcessor(), newImg
.getProcessor() ) );
832 Collections
.sort( xcorrVals
);
833 Collections
.sort( xcorrValsGrad
);
835 double[] medians
= { xcorrVals
.get( xcorrVals
.size() / 2 ), xcorrValsGrad
.get( xcorrValsGrad
.size() / 2 ) };
837 double m1
= 0.0, m2
= 0.0;
838 for ( int i
= 0; i
< xcorrVals
.size(); i
++ )
840 m1
+= xcorrVals
.get( i
);
841 m2
+= xcorrValsGrad
.get( i
);
844 m1
/= xcorrVals
.size();
845 m2
/= xcorrVals
.size();
847 double[] means
= { m1
, m2
};
853 ImageProcessor
applyTransformToImageInverse(
854 AbstractAffineModel2D
< ?
> a
, ImageProcessor ip
){
855 ImageProcessor newIp
= ip
.duplicate();
858 for (int x
=0; x
<ip
.getWidth(); x
++){
859 for (int y
=0; y
<ip
.getHeight(); y
++){
860 float[] position
= {(float)x
,(float)y
};
861 // float[] newPosition = a.apply(position);
862 float[] newPosition
= {0,0,};
865 newPosition
= a
.applyInverse(position
);
867 catch ( NoninvertibleModelException e
) {}
869 int xn
= (int) newPosition
[0];
870 int yn
= (int) newPosition
[1];
872 if ( (xn
>= 0) && (yn
>= 0) && (xn
< ip
.getWidth()) && (yn
< ip
.getHeight()))
873 newIp
.set(xn
,yn
,ip
.get(x
,y
));
880 double getXcorrBlackOutGradient(ImageProcessor ip1
, ImageProcessor ip2
){
881 ImageProcessor ip1g
= getGradientSobel(ip1
);
882 ImageProcessor ip2g
= getGradientSobel(ip2
);
884 return getXcorrBlackOut(ip1g
, ip2g
);
887 //this blends out gradients that include black pixels to make the sharp border caused
888 //by the nonlinear transformation not disturb the gradient comparison
889 //FIXME: this should be handled by a mask image!
890 ImageProcessor
getGradientSobel(ImageProcessor ip
){
891 ImageProcessor ipGrad
= ip
.duplicate();
894 for (int i
=1; i
<ipGrad
.getWidth()-1; i
++){
895 for (int j
=1; j
<ipGrad
.getHeight()-1; j
++){
896 if(ip
.get(i
-1,j
-1)==0 || ip
.get(i
-1,j
)==0 || ip
.get(i
-1,j
+1)==0 ||
897 ip
.get(i
,j
-1)==0 || ip
.get(i
,j
)==0 || ip
.get(i
,j
+1)==0 ||
898 ip
.get(i
+1,j
-1)==0 || ip
.get(i
+1,j
)==0 || ip
.get(i
+1,j
+1)==0 )
901 double gradX
= (double) -ip
.get(i
-1, j
-1) - 2* ip
.get(i
-1,j
) - ip
.get(i
-1,j
+1)
902 +ip
.get(i
+1, j
-1) + 2* ip
.get(i
+1,j
) + ip
.get(i
+1,j
+1);
904 double gradY
= (double) -ip
.get(i
-1, j
-1) - 2* ip
.get(i
,j
-1) - ip
.get(i
+1,j
-1)
905 +ip
.get(i
-1, j
+1) + 2* ip
.get(i
,j
+1) + ip
.get(i
+1,j
+1);
907 double mag
= Math
.sqrt(gradX
*gradX
+ gradY
*gradY
);
908 ipGrad
.setf(i
,j
,(float) mag
);
915 //tested the result against matlab routine, this worked fine
916 static double getXcorrBlackOut(ImageProcessor ip1
, ImageProcessor ip2
){
918 ip1
= ip1
.convertToFloat();
919 ip2
= ip2
.convertToFloat();
921 //If this is not done, the black area from the transformed image influences xcorr result
922 //better alternative would be to use mask images and only calculate xcorr of
923 //the region present in both images.
924 for (int i
=0; i
<ip1
.getWidth(); i
++){
925 for (int j
=0; j
<ip1
.getHeight(); j
++){
926 if (ip1
.get(i
,j
) == 0)
928 if (ip2
.get(i
,j
) == 0)
933 // FloatProcessor ip1f = (FloatProcessor)ip1.convertToFloat();
934 // FloatProcessor ip2f = (FloatProcessor)ip2.convertToFloat();
936 float[] data1
= ( float[] )ip1
.getPixels();
937 float[] data2
= ( float[] )ip2
.getPixels();
939 double[] data1b
= new double[data1
.length
];
940 double[] data2b
= new double[data2
.length
];
943 double mean1
= 0.0, mean2
= 0.0;
945 for (int i
=0; i
< data1
.length
; i
++){
946 //if ((data1[i] == 0) || (data2[i] == 0))
948 data1b
[i
] = data1
[i
];
949 data2b
[i
] = data2
[i
];
955 mean1
/= (double) count
;
956 mean2
/= (double) count
;
958 double L2_1
= 0.0, L2_2
= 0.0;
959 for (int i
=0; i
< count
; i
++){
960 L2_1
+= (data1b
[i
] - mean1
) * (data1b
[i
] - mean1
);
961 L2_2
+= (data2b
[i
] - mean2
) * (data2b
[i
] - mean2
);
964 L2_1
= Math
.sqrt(L2_1
);
965 L2_2
= Math
.sqrt(L2_2
);
968 for (int i
=0; i
< count
; i
++){
969 xcorr
+= ((data1b
[i
]-mean1
) / L2_1
) * ((data2b
[i
]-mean2
) / L2_2
);
972 //System.out.println("XcorrVal: " + xcorr);
977 void visualizePoints( List
< List
< PointMatch
> > inliers
)
979 ColorProcessor ip
= new ColorProcessor(nlt
.getWidth(), nlt
.getHeight());
980 ip
.setColor(Color
.red
);
983 for (int i
=0; i
< inliers
.size(); i
++){
984 for (int j
=0; j
< inliers
.get(i
).size(); j
++){
985 float[] tmp1
= inliers
.get(i
).get(j
).getP1().getW();
986 float[] tmp2
= inliers
.get(i
).get(j
).getP2().getL();
987 ip
.setColor(Color
.red
);
988 ip
.drawDot((int) tmp2
[0], (int) tmp2
[1]);
989 ip
.setColor(Color
.blue
);
990 ip
.drawDot((int) tmp1
[0], (int) tmp1
[1]);
994 ImagePlus points
= new ImagePlus("", ip
);
998 public void getTransform(double[][] points1
, double[][] points2
, double[][] transformParams
){
999 double[][] p1
= new double[points1
.length
][3];
1000 double[][] p2
= new double[points2
.length
][3];
1002 for (int i
=0; i
< points1
.length
; i
++){
1003 p1
[i
][0] = points1
[i
][0];
1004 p1
[i
][1] = points1
[i
][1];
1007 p2
[i
][0] = points2
[i
][0];
1008 p2
[i
][1] = points2
[i
][1];
1013 Matrix s1
= new Matrix(p1
);
1014 Matrix s2
= new Matrix(p2
);
1015 Matrix t
= (s1
.transpose().times(s1
)).inverse().times(s1
.transpose()).times(s2
);
1017 for (int i
=0; i
< transformParams
.length
; i
++){
1018 if (transformParams
[i
][0] == -10){
1019 transformParams
[i
][0] = t
.get(0,0);
1020 transformParams
[i
][1] = t
.get(0,1);
1021 transformParams
[i
][2] = t
.get(1,0);
1022 transformParams
[i
][3] = t
.get(1,1);
1023 transformParams
[i
][4] = t
.get(2,0);
1024 transformParams
[i
][5] = t
.get(2,1);
1034 static List
< Feature
>[] extractSIFTFeaturesThreaded(
1035 final int numberOfImages
, final String directory
,
1036 final String
[] names
){
1037 //extract all SIFT Features
1039 final List
< Feature
>[] siftFeatures
= new ArrayList
[numberOfImages
];
1040 final Thread
[] threads
= MultiThreading
.newThreads();
1041 final AtomicInteger ai
= new AtomicInteger(0); // start at second slice
1043 IJ
.showStatus("Extracting SIFT Features");
1044 for (int ithread
= 0; ithread
< threads
.length
; ++ithread
) {
1045 threads
[ithread
] = new Thread() {
1047 for (int i
= ai
.getAndIncrement(); i
< numberOfImages
; i
= ai
.getAndIncrement())
1049 final ArrayList
< Feature
> fs
= new ArrayList
< Feature
>();
1050 ImagePlus imps
= new Opener().openImage(directory
+ names
[i
+ sp
.firstImageIndex
]);
1051 imps
.setProcessor(imps
.getTitle(), imps
.getProcessor().convertToFloat());
1053 FloatArray2DSIFT sift
= new FloatArray2DSIFT( sp
.sift
.clone() );
1054 SIFT ijSIFT
= new SIFT( sift
);
1056 ijSIFT
.extractFeatures( imps
.getProcessor(), fs
);
1058 Collections
.sort( fs
);
1059 IJ
.log("Extracting SIFT of image: "+i
);
1067 MultiThreading
.startAndJoin(threads
);
1069 return siftFeatures
;
1072 static protected void extractSIFTPointsThreaded(
1074 final List
< Feature
>[] siftFeatures
,
1075 final List
< PointMatch
>[] inliers
,
1076 final AbstractAffineModel2D
< ?
>[] models
)
1079 // save all matching candidates
1080 final List
< PointMatch
>[] candidates
= new List
[ siftFeatures
.length
- 1 ];
1082 final Thread
[] threads
= MultiThreading
.newThreads();
1083 final AtomicInteger ai
= new AtomicInteger( 0 ); // start at second
1086 for ( int ithread
= 0; ithread
< threads
.length
; ++ithread
)
1088 threads
[ ithread
] = new Thread()
1092 setPriority( Thread
.NORM_PRIORITY
);
1094 for ( int j
= ai
.getAndIncrement(); j
< candidates
.length
; j
= ai
.getAndIncrement() )
1096 int i
= ( j
< index ? j
: j
+ 1 );
1097 candidates
[ j
] = FloatArray2DSIFT
.createMatches( siftFeatures
[ index
], siftFeatures
[ i
], 1.5f
, null, Float
.MAX_VALUE
, 0.5f
);
1103 MultiThreading
.startAndJoin( threads
);
1105 // get rid of the outliers and save the rigid transformations to match
1108 final AtomicInteger ai2
= new AtomicInteger( 0 );
1109 for ( int ithread
= 0; ithread
< threads
.length
; ++ithread
)
1111 threads
[ ithread
] = new Thread()
1115 setPriority( Thread
.NORM_PRIORITY
);
1116 for ( int i
= ai2
.getAndIncrement(); i
< candidates
.length
; i
= ai2
.getAndIncrement() )
1119 List
< PointMatch
> tmpInliers
= new ArrayList
< PointMatch
>();
1121 // RigidModel2D.estimateBestModel(candidates.get(i),
1122 // tmpInliers, sp.min_epsilon, sp.max_epsilon,
1123 // sp.min_inlier_ratio);
1125 final AbstractAffineModel2D
< ?
> m
;
1126 switch ( sp
.expectedModelIndex
)
1129 m
= new TranslationModel2D();
1132 m
= new RigidModel2D();
1135 m
= new SimilarityModel2D();
1138 m
= new AffineModel2D();
1144 boolean modelFound
= false;
1147 modelFound
= m
.filterRansac( candidates
[ i
], tmpInliers
, 1000, sp
.maxEpsilon
, sp
.minInlierRatio
, 10 );
1149 catch ( NotEnoughDataPointsException e
)
1155 IJ
.log( "Model found:\n " + candidates
[ i
].size() + " candidates\n " + tmpInliers
.size() + " inliers\n " + String
.format( "%.2f", m
.getCost() ) + "px average displacement" );
1157 IJ
.log( "No Model found." );
1159 inliers
[ index
* ( sp
.numberOfImages
- 1 ) + i
] = tmpInliers
;
1160 models
[ index
* ( sp
.numberOfImages
- 1 ) + i
] = m
;
1161 // System.out.println("**** MODEL ADDED: " +
1162 // (index*(sp.numberOfImages-1)+i));
1168 MultiThreading
.startAndJoin( threads
);