Mercurial > repos > imgteam > landmark_registration
diff landmark_registration.py @ 2:4e089a0983b1 draft
"planemo upload for repository https://github.com/BMCV/galaxy-image-analysis/tools/landmark_registration/ commit 927b78d47c31714776ccdf3d16f26c3779298abb"
author | imgteam |
---|---|
date | Sun, 20 Feb 2022 15:46:58 +0000 |
parents | b0503eec7bd6 |
children | aee73493bf53 |
line wrap: on
line diff
--- a/landmark_registration.py Fri Feb 22 19:04:47 2019 -0500 +++ b/landmark_registration.py Sun Feb 20 15:46:58 2022 +0000 @@ -1,27 +1,65 @@ -from skimage.measure import ransac -from skimage.transform import AffineTransform -import pandas as pd -import numpy as np +""" +Copyright 2017-2022 Biomedical Computer Vision Group, Heidelberg University. + +Distributed under the MIT license. +See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + +""" + import argparse -def landmark_registration(points_file1, points_file2, out_file, residual_threshold=2, max_trials=100, delimiter="\t"): - points1 = pd.read_csv(points_file1, delimiter=delimiter) - points2 = pd.read_csv(points_file2, delimiter=delimiter) +import numpy as np +import pandas as pd +from scipy.linalg import lstsq +from skimage.measure import ransac +from skimage.transform import AffineTransform + + +def landmark_registration(pts_f1, pts_f2, out_f, res_th=None, max_ite=None, delimiter="\t"): + + points1 = pd.read_csv(pts_f1, delimiter=delimiter) + points2 = pd.read_csv(pts_f2, delimiter=delimiter) + + src = np.concatenate([np.array(points1['x']).reshape([-1, 1]), + np.array(points1['y']).reshape([-1, 1])], + axis=-1) + dst = np.concatenate([np.array(points2['x']).reshape([-1, 1]), + np.array(points2['y']).reshape([-1, 1])], + axis=-1) - src = np.concatenate([np.array(points1['x']).reshape([-1,1]), np.array(points1['y']).reshape([-1,1])], axis=-1) - dst = np.concatenate([np.array(points2['x']).reshape([-1,1]), np.array(points2['y']).reshape([-1,1])], axis=-1) + if res_th is not None and max_ite is not None: + model_robust, inliers = ransac((src, dst), AffineTransform, min_samples=3, residual_threshold=res_th, max_trials=max_ite) + pd.DataFrame(model_robust.params).to_csv(out_f, header=None, index=False, sep="\t") - model = AffineTransform() - model_robust, inliers = ransac((src, dst), AffineTransform, min_samples=3, - residual_threshold=residual_threshold, max_trials=max_trials) - pd.DataFrame(model_robust.params).to_csv(out_file, header=None, index=False, sep="\t") + else: + A = np.zeros((src.size, 6)) + A[0:src.shape[0], [0, 1]] = src + A[0:src.shape[0], 2] = 1 + A[src.shape[0]:, [3, 4]] = src + A[src.shape[0]:, 5] = 1 + b = dst.T.flatten().astype('float64') + x = lstsq(A, b) + + tmat = np.eye(3) + tmat[0, :] = x[0].take([0, 1, 2]) + tmat[1, :] = x[0].take([3, 4, 5]) + pd.DataFrame(tmat).to_csv(out_f, header=None, index=False, sep="\t") + if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Estimate transformation from points") - parser.add_argument("points_file1", help="Paste path to src points") - parser.add_argument("points_file2", help="Paste path to dst points") - parser.add_argument("warp_matrix", help="Paste path to warp_matrix.csv that should be used for transformation") - parser.add_argument("--residual_threshold", dest="residual_threshold", help="Maximum distance for a data point to be classified as an inlier.", type=float, default=2) - parser.add_argument("--max_trials", dest="max_trials", help="Maximum number of iterations for random sample selection.", type=int, default=100) + parser = argparse.ArgumentParser(description="Estimate affine transformation matrix based on landmark coordinates") + parser.add_argument("fn_pts1", help="Coordinates of SRC landmarks (tsv file)") + parser.add_argument("fn_pts2", help="Coordinates of DST landmarks (tsv file)") + parser.add_argument("fn_tmat", help="Path the output (transformation matrix)") + parser.add_argument("--res_th", dest="res_th", type=float, help="Maximum distance for a data point to be classified as an inlier") + parser.add_argument("--max_ite", dest="max_ite", type=int, help="Maximum number of iterations for random sample selection") args = parser.parse_args() - landmark_registration(args.points_file1, args.points_file2, args.warp_matrix, residual_threshold=args.residual_threshold, max_trials=args.max_trials) + + res_th = None + if args.res_th: + res_th = args.res_th + max_ite = None + if args.max_ite: + max_ite = args.max_ite + + landmark_registration(args.fn_pts1, args.fn_pts2, args.fn_tmat, res_th=res_th, max_ite=max_ite)