1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159
| #include "cv.h"
#include "highgui.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <math.h>
#include <float.h>
#include <limits.h>
#include <time.h>
#include <ctype.h>
double *mygetPerspectiveTransform(CvPoint2D32f P1, CvPoint2D32f P2, CvPoint2D32f P3, CvPoint2D32f P4){
double *H;
H = new double [18];
double *adj=H+9;
double sx = (P1.x-P2.x)+(P3.x-P4.x);
double sy = (P1.y-P2.y)+(P3.y-P4.y);
double dx1 = P2.x-P3.x;
double dx2 = P4.x-P3.x;
double dy1 = P2.y-P3.y;
double dy2 = P4.y-P3.y;
double z = (dx1*dy2)-(dy1*dx2);
double g = ((sx*dy2)-(sy*dx2))/z;
double h = ((sy*dx1)-(sx*dy1))/z;
// matrice de transformation
double a=H[0]=P2.x-P1.x+g*P2.x;
double b=H[1]=P4.x-P1.x+h*P4.x;
double c=H[2]=P1.x;
double d=H[3]=P2.y-P1.y+g*P2.y;
double e=H[4]=P4.y-P1.y+h*P4.y;
double f=H[5]=P1.y;
H[6]=g;
H[7]=h;
H[8]=1;
// matrice de transformation inverse (matrice adjointe)
adj[0]=e-f*h;
adj[1]=c*h-b;
adj[2]=b*f-c*e;
adj[3]=f*g-d;
adj[4]=a-c*g;
adj[5]=c*d-a*f;
adj[6]=d*h-e*g;
adj[7]=b*g-a*h;
adj[8]=a*e-b*d;
return H;
}
double * invert(double u, double v, double* system) {
double *H;
H = new double [2];
H[0] = (system[0]*u+system[1]*v+system[2])/(system[6]*u+system[7]*v+1);
H[1] = (system[3]*u+system[4]*v+system[5])/(system[6]*u+system[7]*v+1);
return H;
}
int main(int argc, char* argv[]){
IplImage* image_src = cvLoadImage(argv[1],1);
IplImage* image_dst = cvCreateImage( cvGetSize(image_src), 8, 1 );
CvPoint2D32f P1, P2, P3, P4;
P1.x = 449.549;
P1.y = 362.789;
P2.x = 591.633;
P2.y = 374.233;
P3.x = 588.618;
P3.y = 60.3181;
P4.x = 513.554;
P4.y = 53.2928;
printf("P1.x = %f\n",P1.x);
printf("P1.y = %f\n",P1.y);
printf("P2.x = %f\n",P2.x);
printf("P2.y = %f\n",P2.y);
printf("P3.x = %f\n",P3.x);
printf("P3.y = %f\n",P3.y);
printf("P4.x = %f\n",P4.x);
printf("P4.y = %f\n",P4.y);
CvPoint2D32f P1T, P2T, P3T, P4T;
P1T.x = 400.0;
P1T.y = 253.0;
P2T.x = 660.0;
P2T.y = 253.0;
P3T.x = 510.0;
P3T.y = 106.0;
P4T.x = 555.0;
P4T.y = 106.0;
cvCircle( image_src, cvPointFrom32f(P1T), 9, CV_RGB(0,0,255), 3); //bleu
cvCircle( image_src, cvPointFrom32f(P2T), 9, CV_RGB(0,255,0), 3); //vert
cvCircle( image_src, cvPointFrom32f(P3T), 9, CV_RGB(255,0,0), 3); // rouge
cvCircle( image_src, cvPointFrom32f(P4T), 9, CV_RGB(255,255,0), 3); // jaune
double* system = mygetPerspectiveTransform(P1T,P2T,P3T,P4T);
printf("system[0] : %g\n",system[0]);
printf("system[1] : %g\n",system[1]);
printf("system[2] : %g\n",system[2]);
printf("system[3] : %g\n",system[3]);
printf("system[4] : %g\n",system[4]);
printf("system[5] : %g\n",system[5]);
printf("system[6] : %g\n",system[6]);
printf("system[7] : %g\n",system[7]);
double width=image_src->width;
double height=image_src->height;
// pour chaque pixel de l'image cible
for(int y=0 ; y<image_src->width ; y++) {
for(int x=0; x < image_src->height;x++) {
// conversion dans le repère orthonormé (u,v) [0,1]x[0,1]
double u = (double)x/width;
double v = (double)y/height;
double* P = invert(u,v,system);
int sx=(int)round(P[0]);
int sy=(int)round(P[1]);
//printf("SY = %d - SX = %d Y = %d - X = %d \n",sy,sx,y,x);
// pixel source
if(sy < image_src->width && sy >= 0 && sx < image_src->height && sx >= 0){
// on évite que le get cherche un pixel qui est en dehors de l'image
CvScalar rgb=cvGet2D(image_src,sx,sy);
int dx=x;
int dy=y;
cvSet2D(image_dst,dx,dy,rgb);
}
}
}
cvNamedWindow("Source Image", 1 );
cvNamedWindow("Destination Image", 1 );
cvShowImage( "Source Image", image_src );
cvShowImage( "Destination Image",image_dst );
cvWaitKey();
return 0;
} |
Partager