改变图片强调可修改r,如s.val[i]*scale*r

来源:互联网 发布:大黄蜂视频加密软件 编辑:程序博客网 时间:2024/05/20 00:52
/************中心环形矢量场*马鞍矢量场*****卷积白噪声纹理***********/#include <math.h>#include <stdio.h>#include <stdlib.h>#include <malloc.h>#include <cv.h>#include <highgui.h>#include <iostream>#include <fstream>#include "netcdfcpp.h"using namespace std;#define  SQUARE_FLOW_FIELD_SZ400#define DISCRETE_FILTER_SIZE2048        //离散的滤波尺寸#define  LOWPASS_FILTR_LENGTH10.00000f//低通滤波长度#define LINE_SQUARE_CLIP_MAX100000.0f//线性平方夹#define VECTOR_COMPONENT_MIN   0.050000f //矢量分量最小值void     SyntheszSaddle(int  n_xres,  int     n_yres,  float*   pVectr);void NormalizVectrs(int  n_xres,  int     n_yres,  float*   pVectr,float* vecSize);void     GenBoxFiltrLUT(int  LUTsiz,  float*  p_LUT0,  float*   p_LUT1);void     MakeWhiteNoise(int  n_xres,  int     n_yres,  float*  pNoise);void FlowImagingLIC(int  n_xres,  int     n_yres,  float*   pVectr,   float*  pNoise,  float*  pImage,  float*  p_LUT0,  float*  p_LUT1,  float  krnlen);void  WriteImage2PPM(int  n_xres,  int     n_yres,  float*  pImage,char*  f_name);void  color(int n_xres, int n_yres,float *pImage,float* vecSize);double maxvecmag;voidmain(){intn_xres = 721;intn_yres = 281;// intn_xres = 1441;// intn_yres = 561;// intn_xres = 2881;// intn_yres = 1121;// // intn_xres = 5761;// intn_yres = 2241;// intn_xres = 11521;//  intn_yres = 2241;float*pVectr = (float*         ) malloc( sizeof(float        ) * n_xres * n_yres * 2 );float*p_LUT0 = (float* ) malloc( sizeof(float        ) * DISCRETE_FILTER_SIZE);float*p_LUT1 = (float* ) malloc( sizeof(float        ) * DISCRETE_FILTER_SIZE);float*pNoise = (float* ) malloc( sizeof(float) * n_xres * n_yres     );float*pImage = (float* ) malloc( sizeof(float) * n_xres * n_yres     );float*vecSize = (float* ) malloc( sizeof(float) * n_xres*n_yres );SyntheszSaddle(  n_xres,   n_yres,   pVectr);//CenterFiled(n_xres, n_yres, pVectr);//包含矢量归一化NormalizVectrs(n_xres, n_yres, pVectr,vecSize);//所以这就不用矢量归一化了,因为之前的马鞍矢量场生成函数里没有归一化,才有此步的MakeWhiteNoise(n_xres, n_yres, pNoise);GenBoxFiltrLUT(DISCRETE_FILTER_SIZE, p_LUT0, p_LUT1);FlowImagingLIC(n_xres, n_yres, pVectr, pNoise, pImage, p_LUT0, p_LUT1, LOWPASS_FILTR_LENGTH);color(n_xres, n_yres,pImage,vecSize);//WriteImage2PPM(n_xres, n_yres, pImage, "LIC.ppm");//WriteImage2PPM(n_xres, n_yres, pImage, "LIC_721_281.ppm");// WriteImage2PPM(n_xres, n_yres, pImage, "LIC1441_561.ppm");// WriteImage2PPM(n_xres, n_yres, pImage, "LIC2281_1121.ppm"); //WriteImage2PPM(n_xres, n_yres, pImage, "LIC5761_1121.ppm");//WriteImage2PPM(n_xres, n_yres, pImage, "LIC_11521_2241.ppm");//system("pause");free(pVectr);pVectr = NULL;free(p_LUT0);p_LUT0 = NULL;free(p_LUT1);p_LUT1 = NULL;free(pNoise);pNoise = NULL;free(pImage);pImage = NULL;}///中心环形矢量场图形synthesize a saddle-shaped vector field     ///voidSyntheszSaddle(int  n_xres,  int  n_yres,  float*  pVectr){   static const int LatNum = n_yres;static const int LonNum = n_xres;static const int Time = 1;static const int TMP = Time*LonNum*LatNum;NcFile dataReadFile("global_721_281.nc",NcFile::ReadOnly);// NcFile dataReadFile("global_1441_561.nc",NcFile::ReadOnly);// NcFile dataReadFile("global_2881_1121.nc",NcFile::ReadOnly); //NcFile dataReadFile("global_5761_2241.nc",NcFile::ReadOnly); //NcFile dataReadFile("global_11521_2241.nc",NcFile::ReadOnly);if (!dataReadFile.is_valid()){std::cout<<"couldn't open file!"<<std::endl;}double *Tmp_UX = new double[TMP];double *Tmp_VY = new double[TMP];double *Tmp_LAT = new double[TMP];double *Tmp_LON = new double[TMP];NcVar *dataTmp_LAT = dataReadFile.get_var("LAT");NcVar *dataTmp_LON = dataReadFile.get_var("LONN359_361");NcVar *dataTmp_UX = dataReadFile.get_var("UX");NcVar *dataTmp_VY = dataReadFile.get_var("VY");dataTmp_LAT->get(Tmp_LAT,LatNum,LatNum);dataTmp_LON->get(Tmp_LON,LonNum,LonNum);dataTmp_UX->get(Tmp_UX,Time,LatNum,LonNum);dataTmp_VY->get(Tmp_VY,Time,LatNum,LonNum);for(int  j = 0;  j < n_yres;  j ++)for(int  i = 0;  i < n_xres;  i ++){int index = (  (n_yres - 1 - j) * n_xres + i  )  <<  1;//int index = j*n_yres+i;pVectr[index    ] = Tmp_UX[j*LonNum+i];pVectr[index +1   ]= Tmp_VY[j*LonNum+i];}delete []Tmp_UX;delete []Tmp_VY;delete []Tmp_LAT;}///normalize the vector field     ///// void    NormalizVectrs(int  n_xres,  int  n_yres,  float*  pVectr)// {// // // for(int j = 0;  j < n_yres;  j ++)// for(int i = 0;  i < n_xres;  i ++)// {// intindex = (j * n_xres + i) << 1;// floatvcMag = float(  sqrt( double(pVectr[index] * pVectr[index] + pVectr[index + 1] * pVectr[index + 1]) )  );// // floatscale = (vcMag == 0.0f) ? 0.0f : 1.0f / vcMag;//矢量大小归一化后的矢量值// //pVectr[index    ] *= scale;// pVectr[index    ]=pVectr[index    ]*scale;// //cout<<"pVectr[index    ]="<<pVectr[index    ];// pVectr[index + 1] *= scale;// //cout<<"pVectr[index    ]="<<pVectr[index  +1  ];// // }// }void    NormalizVectrs(int n_xres,  int  n_yres,  float*  pVectr,float* vecSize){for(int j = 0;  j < n_yres;  j ++)for(int i = 0;  i < n_xres;  i ++){intindex = (j * n_xres + i) << 1;floatvcMag = float(  sqrt( double(pVectr[index] * pVectr[index] + pVectr[index + 1] * pVectr[index + 1]) )  );vecSize[j * n_xres + i]=vcMag;if (vcMag<10000&&vcMag>maxvecmag){maxvecmag=vcMag;}floatscale = (vcMag == 0.0f) ? 0.0f : 1.0f / vcMag;pVectr[index    ] *= scale*5.5;//????????????????????????????????????????????????????????????原来问题出在这pVectr[index + 1] *= scale*5.5;// cout<<"pVectr["<<index<<"]=="<<pVectr[index]<<endl;// cout<<"pVectr["<<index+1<<"]=="<<pVectr[index+1]<<endl;// //fin>>index>>"=">>pVectr[index    ] >>index+1>>pVectr[index+1]>>"/n";}}///make white noise as the LIC input texture     ///// voidMakeWhiteNoise(int  n_xres,  int  n_yres,  float*  pNoise)// {// IplImage * NoiseImg=cvCreateImage(cvSize(n_xres,n_yres),IPL_DEPTH_8U,1);// CvScalar s;// // for(int  j = 0;   j < n_yres;  j ++)// {// for(int  i = 0;   i < n_xres;  i ++)// // // for(int  j = 0;   j < n_yres;  j=j +10)//产生稀疏白噪声// // {// // for(int  i = 0;   i < n_xres;  i=i+10)// // {// int  r = rand();// r = (  (r & 0xff) + ( (r & 0xff00) >> 8 )  ) & 0xff;// pNoise[j * n_xres + i] = (float) r;// s = cvGet2D(NoiseImg,i,j);// s.val[0]=r;// s.val[1]=r;// s.val[2]=r;// cvSet2D(NoiseImg,i,j,s);// }// }// // }voidMakeWhiteNoise(int  n_xres,  int  n_yres,  float*  pNoise){for(int  j = 0;   j < n_yres;  j ++)for(int  i = 0;   i < n_xres;  i ++){int  r = rand();r = (  (r & 0xff) + ( (r & 0xff00) >> 8 )  ) & 0xff;pNoise[j * n_xres + i] = (float) r;}}///generate box filter LUTs     ///void    GenBoxFiltrLUT(int  LUTsiz,  float*  p_LUT0,  float*  p_LUT1){  for(int  i = 0;  i < LUTsiz;  i ++)  p_LUT0[i] = p_LUT1[i] = i;}void color(int n_xres,int n_yres, float* pImage,float* vecSize){IplImage * licImage = cvCreateImage(cvSize(n_xres,n_yres),IPL_DEPTH_8U,3);IplImage* img = cvLoadImage("11.jpg",1);int k = 0;double magind;double mag;double newMag;double x = 0.1;//x为非线性映射因子,且x!=1CvScalar colorTable[500];CvScalar s,s1;for (int i = 0;i < img->width;i++){s = cvGet2D(img,1,i);colorTable[i] =s;}for (int j=0;j<n_yres;j++){for (int i= 0;i<n_xres;i++){if (k>=img->width){k=0;}doublescale= pImage[j * n_xres + i]/255.0f;////////////////////////////////////////////////////////////////////成功生成彩色图像的关键mag = vecSize[j * n_xres + i];//********矢量大小归一化******if (mag<1000){magind = (mag/maxvecmag);}//非线性颜色增强LICnewMag =(pow(x,magind)-1)/(x-1);s = cvGet2D(licImage,j,i);//渐变颜色映射表int k = int(newMag*446); s1.val[0]=colorTable[k].val[0]*(k+1-newMag*446)+colorTable[k+1].val[0]*(newMag*446-k);s1.val[1]=colorTable[k].val[1]*(k+1-newMag*446)+colorTable[k+1].val[1]*(newMag*446-k);s1.val[2]=colorTable[k].val[2]*(k+1-newMag*446)+colorTable[k+1].val[2]*(newMag*446-k); s1.val[0]*=scale*2.1;////////?????????????????????????????????????可改s1.val[1]*=scale*2.1; s1.val[2]*=scale*2.1;//cout<<"s1.val[3]="<<s1.val[1]<<endl;cvSet2D(licImage,j,i,s1);}}//Mat AlphaImage= imread("s.jpg");//cv::Mat AlphaImage = imread("licImage",1);cvNamedWindow("lic_three channles",0);cvShowImage("lic_three channles",licImage);cvWaitKey(0);system("pause");cvDestroyWindow("lic_three channles");cvReleaseImage(&licImage);}///write the LIC image to a PPM file     ///voidWriteImage2PPM(int  n_xres,  int  n_yres,  float*  pImage,  char*  f_name){FILE*o_file;if(   ( o_file = fopen(f_name, "w") )  ==  NULL   )  {  printf("Can't open output file\n");  return;  }fprintf(o_file, "P6\n%d %d\n255\n", n_xres, n_yres);for(int  j = 0;  j < n_yres;  j ++)for(int  i = 0;  i < n_xres;  i ++){unsigned  charunchar = pImage[j * n_xres + i];//某点像素的灰度纹理值fprintf(o_file, "%c%c%c", unchar, unchar, unchar);//}fclose (o_file);o_file = NULL;}///flow imaging (visualization) through Line Integral Convolution     ///voidFlowImagingLIC(int     n_xres,  int     n_yres,  float*  pVectr,  float*  pNoise,  float*  pImage,  float*  p_LUT0,  float*  p_LUT1,  float   krnlen){intvec_id;///ID in the VECtor buffer (for the input flow field)intadvDir;///ADVection DIRection (0: positive;  1: negative)intadvcts;///number of ADVeCTion stepS per direction (a step counter)intADVCTS = int(krnlen * 3);///MAXIMUM number of advection steps per direction to break dead loops//跳出死循环的条件floatvctr_x;///x-component  of the VeCToR at the forefront pointfloatvctr_y;///y-component  of the VeCToR at the forefront pointfloatclp0_x;///x-coordinate of CLiP point 0 (current)floatclp0_y;///y-coordinate of CLiP point 0(current)floatclp1_x;///x-coordinate of CLiP point 1 (next   )floatclp1_y;///y-coordinate of CLiP point 1 (next   )floatsamp_x;///x-coordinate of the SAMPle in the current pixelfloatsamp_y;///y-coordinate of the SAMPle in the current pixelfloattmpLen;///TeMPorary LENgth of a trial clipped-segmentfloatsegLen;///SEGment   LENgthfloatcurLen;///CURrent   LENgth of the streamlinefloatprvLen;///PReVious  LENgth of the streamlinefloatW_ACUM;///ACcuMulated Weight from the seed to the current streamline forefrontfloattexVal;///TEXture VALuefloatsmpWgt;///WeiGhT of the current SaMPlefloatt_acum[2];///two ACcUMulated composite Textures for the two directions, perspectively 两个方向的卷积和floatw_acum[2];///two ACcUMulated Weighting values   for the two directions, perspectively 两个方向的权重和float*wgtLUT = NULL;///WeiGhT Look Up Table pointing to the target filter LUT权重查找表floatlen2ID = (DISCRETE_FILTER_SIZE - 1) / krnlen;///map a curve LENgth TO an ID in the LUT///for each pixel in the 2D output LIC image///for(int  j = 0; j < n_yres;  j ++)for(int  i = 0; i < n_xres;  i ++){///init the composite texture accumulators and the weight accumulators///每一个像素点为起始点,初始化一次权重和卷积和t_acum[0] = t_acum[1] = w_acum[0] = w_acum[1] = 0.0f;//初始化正反方向卷积和及权重和///for either advection direction///分别计算正反方向的卷积和及权重和for(advDir = 0;  advDir < 2;  advDir ++){///init the step counter, curve-length measurer, and streamline seed/////初始化当前方向上前进的步数和当前流线的总长advcts = 0;//前进的步数curLen = 0.0f;clp0_x = i + 0.5f;clp0_y = j + 0.5f;///access the target filter LUT///LUT显示查找表wgtLUT = (advDir == 0) ? p_LUT0 : p_LUT1;///until the streamline is advected long enough or a tightly  spiralling center / focus is encountered///while( curLen < krnlen && advcts < ADVCTS ) //??????{///access the vector at the sample///vec_id = ( int(clp0_y) * n_xres + int(clp0_x) ) << 1;vctr_x = pVectr[vec_id    ];vctr_y = pVectr[vec_id + 1];///in case of a critical point///遇到零向量,结束循环if( vctr_x == 0.0f && vctr_y == 0.0f ){t_acum[advDir] = (advcts == 0) ? 0.0f : t_acum[advDir];   ///this line is indeed unnecessaryw_acum[advDir] = (advcts == 0) ? 1.0f : w_acum[advDir];break;}///negate the vector for the backward-advection case///相反的方向取相反的方向vctr_x = (advDir == 0) ? vctr_x : -vctr_x;vctr_y = (advDir == 0) ? vctr_y : -vctr_y;///clip the segment against the pixel boundaries --- find the shorter from the two clipped segments//////replace  all  if-statements  whenever  possible  as  they  might  affect the computational speed///segLen = LINE_SQUARE_CLIP_MAX;//cout<<"segLen="<<segLen<<endl;//cout<<"VECTOR_COMPONENT_MIN="<<LINE_SQUARE_CLIP_MAX<<endl;segLen = (vctr_x < -VECTOR_COMPONENT_MIN) ? ( int(     clp0_x         ) - clp0_x ) / vctr_x : segLen;//int(0.5)=0segLen = (vctr_x >  VECTOR_COMPONENT_MIN) ? ( int( int(clp0_x) + 1.5f ) - clp0_x ) / vctr_x : segLen;segLen = (vctr_y < -VECTOR_COMPONENT_MIN) ?(      (    (  tmpLen = ( int(     clp0_y)          - clp0_y ) / vctr_y  )  <  segLen    ) ? tmpLen : segLen      ) : segLen;segLen = (vctr_y >  VECTOR_COMPONENT_MIN) ?(      (    (  tmpLen = ( int( int(clp0_y) + 1.5f ) - clp0_y ) / vctr_y  )  <  segLen    ) ? tmpLen : segLen      ) : segLen;///update the curve-length measurers///prvLen = curLen;curLen+= segLen;segLen+= 0.0004f;///check if the filter has reached either end///segLen = (curLen > krnlen) ? ( (curLen = krnlen) - prvLen ) : segLen;///obtain the next clip point///clp1_x = clp0_x + vctr_x * segLen;clp1_y = clp0_y + vctr_y * segLen;///obtain the middle point of the segment as the texture-contributing sample///samp_x = (clp0_x + clp1_x) * 0.5f;samp_y = (clp0_y + clp1_y) * 0.5f;///obtain the texture value of the sample///texVal = pNoise[ int(samp_y) * n_xres + int(samp_x) ];///update the accumulated weight and the accumulated composite texture (texture x weight)///W_ACUM = wgtLUT[ int(curLen * len2ID) ];smpWgt = W_ACUM - w_acum[advDir];w_acum[advDir]  = W_ACUM;t_acum[advDir] += texVal * smpWgt;///update the step counter and the "current" clip point///advcts ++;clp0_x = clp1_x;clp0_y = clp1_y;///check if the streamline has gone beyond the flow field///if( clp0_x < 0.0f || clp0_x >= n_xres || clp0_y < 0.0f || clp0_y >= n_yres)  break;}}///normalize the accumulated composite texture///texVal = (t_acum[0] + t_acum[1]) / (w_acum[0] + w_acum[1]);///clamp the texture value against the displayable intensity range [0, 255]texVal = (texVal <   0.0f) ?   0.0f : texVal;texVal = (texVal > 255.0f) ? 255.0f : texVal; pImage[j * n_xres + i] = (float) texVal;// if (j * n_xres + i>400000)// {// cout<<"pImage["<<j * n_xres + i<<"]="<<pImage[j * n_xres + i]<<endl;// }} }

0 0
原创粉丝点击