px4flow 解读

来源:互联网 发布:java代码流程图 编辑:程序博客网 时间:2024/05/22 15:18

FlowAlgorithm

2017.7.25by snow




文本摘自http://blog.csdn.net/zhaohui1995_yang/article/details/51346695

主要来分析最后一个函数computeflow

 

原版代码的光流算法主要是使用hist直方图算法,这段代码主要可以分成两部分来看,第一部分是生成直方图,第二部分是根据直方图来进行位移向量的计算。由于是光流,那么就是飞机底部的摄像头随时间移动的一个趋势,那么一定是需要至少两幅图的数据,分别为frame1frame2

 

第一步的jifor循环是采样点的循环,之中的jjii的循环是对于一个小邻域的采样。采样点是从frame1找的,之后根据采样点的坐标在frame2的-winminwinmin的一个小矩形种找一个最相似的点,简单来说,就是找到采样点在两幅图的对应位置。

PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD变量来判断对于特征点采样的质量要求,越大越严苛。表示是否当前这个点可以作为计算光流的点。原函数计算了梯度等。

之后找第二张图中一个最相似的点,计算sad(sumof absolutedifference)也就是对于两张图的差异的绝对值的和进行计算,然后作为相似度的衡量,越小越相似。PARAM_BOTTOM_FLOW_VALUE_THRESHOLD则是对于相似度的一个阈值判断,越小越相似,越大越严苛,如果不满足这个条件,说明两张图片几乎找不到对应点,那么就没戏了,接着看下一个采样点吧。

如果很幸运,frame1中找到了可以计算的采样点,而且在frame2中也找到了对应的点,那么可以计算xy方向的差值,或者说是一个移动向量,分别计算xy两个方向的移动,这样就能够获得移动的向量,其中两个方向xy移动的多少则记录在一个直方图的数组中。

所以最终xy的直方图数组中记录的是所有挑战过两关的英雄点,记录移动的大小和方向。

 

以上是对于hist的产生,除了hist之外还有个东西,是对应点的移动的数据,也就是所有采样点的移动的方向和大小,dirssubdirsdirs记录的是像素级别的移动,subdirs记录的是亚像素级,也就是半个像素点的移动。

 

有了histdirs,那么用histdirs的数据代表原来的数据,就不用frame1frame2了,可以光荣退役了,因为有了更能表示的一种方式——直方图和方向向量。

第一步是一个小参数的判断,是否filter,这个感觉影响不大,效果都差不多,不知道其他人是不是这样。

如果是PARAM_BOTTOM_FLOW_HIST_FILTER,那么就是对于hist的处理,如果不是,那么就是对于dirssubdirs的计算,比较简单,简简单单的求和然后处以有效的采样点个数就可以了。

最后融合gyro的信息进行调整。


算法解读:原创



Px4flow:

1.开辟一段图像存储空间

/*fast image buffers for calculations */

uint8_timage_buffer_8bit_1[FULL_IMAGE_SIZE]__attribute__((section(".ccm")));


uint8_timage_buffer_8bit_2[FULL_IMAGE_SIZE]__attribute__((section(".ccm")));


2.初始化光流的一些的默认参数

/*load settings and parameters */

global_data_reset_param_defaults();


3.初始化图像捕获

/*enable image capturing */

enable_image_capture();

/**

*@brief Initialize DCMI DMA and enable image capturing

*/

voidenable_image_capture(void)

{

dcmi_clock_init();

dcmi_hw_init();

dcmi_dma_init(global_data.param[PARAM_IMAGE_WIDTH]* global_data.param[PARAM_IMAGE_HEIGHT]);//图像大小设定

mt9v034_context_configuration();

dcmi_dma_enable();

}



4.初始化图像存储空间,为图像存储做准备

/*initand clear fast image buffers */

for(inti = 0; i < global_data.param[PARAM_IMAGE_WIDTH]* global_data.param[PARAM_IMAGE_HEIGHT];i++)

{

image_buffer_8bit_1[i]= 0;

image_buffer_8bit_2[i]= 0;

}

5.分配指针,用作图像数据调用

uint8_t* current_image = image_buffer_8bit_1;

uint8_t* previous_image = image_buffer_8bit_2;


6.初始化超声波距离信息,为光流计算做准备

/*sonarconfig*/

floatsonar_distance_filtered = 0.0f; //distance in meter

floatsonar_distance_raw = 0.0f; //distance in meter

booldistance_valid = false;

sonar_config();


7.初始化光流变量

/*bottom flow variables */

floatpixel_flow_x = 0.0f;

floatpixel_flow_y = 0.0f;

floatpixel_flow_x_sum = 0.0f;

floatpixel_flow_y_sum = 0.0f;

floatvelocity_x_sum = 0.0f;

floatvelocity_y_sum = 0.0f;

floatvelocity_x_lp = 0.0f;

floatvelocity_y_lp = 0.0f;

intvalid_frame_count = 0;

intpixel_flow_count = 0;


staticfloataccumulated_flow_x = 0;

staticfloataccumulated_flow_y = 0;

staticfloataccumulated_gyro_x = 0;

staticfloataccumulated_gyro_y = 0;

staticfloataccumulated_gyro_z = 0;

staticuint16_taccumulated_framecount = 0;

staticuint16_taccumulated_quality = 0;

staticuint32_tintegration_timespan = 0;

staticuint32_tlasttime = 0;

uint32_ttime_since_last_sonar_update= 0;


8.判断参数PARAM_VIDEO_ONLY,如果被置1,进入图像校准路径(参数配置选项)


/*calibration routine */

if(FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY]))

{

while(FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY]))

{

/**

*@brief Calibration image collection routine restart

*/

dcmi_restart_calibration_routine();


/*waiting for first quarter of image */

while(get_frame_counter()< 2){}

dma_copy_image_buffers(&current_image,&previous_image, FULL_IMAGE_SIZE, 1);


/*waiting for second quarter of image */

while(get_frame_counter()< 3){}

dma_copy_image_buffers(&current_image,&previous_image, FULL_IMAGE_SIZE, 1);


/*waiting for all image parts */

while(get_frame_counter()< 4){}


send_calibration_image(&previous_image,&current_image);


if(FLOAT_AS_BOOL(global_data.param[PARAM_SYSTEM_SEND_STATE]))

communication_system_state_send();


communication_receive_usb();

debug_message_send_one();

communication_parameter_send();


LEDToggle(LED_COM);

}


dcmi_restart_calibration_routine();

LEDOff(LED_COM);

}


9.初始化图像大小

uint16_timage_size = global_data.param[PARAM_IMAGE_WIDTH]* global_data.param[PARAM_IMAGE_HEIGHT];


10.初始化焦距

/*calculate focal_length in pixel */

constfloatfocal_length_px=(global_data.param[PARAM_FOCAL_LENGTH_MM])/ (4.0f * 6.0f) * 1000.0f; //originalfocallenght:12mmpixelsize:6um,binning4 enabled


11.获取雷达有效距离,有效返回1,无效返回0

/*get sonar data */

distance_valid= sonar_read(&sonar_distance_filtered, &sonar_distance_raw);


12.开始计算光流

/*compute optical flow */

if(FLOAT_EQ_INT(global_data.param[PARAM_SENSOR_POSITION],BOTTOM))

{

/*copy recent image to faster ram */

dma_copy_image_buffers(&current_image,&previous_image, image_size, 1);


/*compute optical flow */

qual= compute_flow(previous_image, current_image, x_rate, y_rate, z_rate,&pixel_flow_x, &pixel_flow_y);


/*

*real point P (X,Y,Z), image plane projection p (x,y,z), focal-lengthf, distance-to-scene Z

*x / f = X / Z

*y / f = Y / Z

*/

floatflow_compx = pixel_flow_x / focal_length_px /(get_time_between_images() / 1000000.0f);

floatflow_compy = pixel_flow_y / focal_length_px /(get_time_between_images() / 1000000.0f);


/*integrate velocity and output values only if distance is valid */

if(distance_valid)

{

/*calcvelocity (negative of flow values scaled with distance) */

floatnew_velocity_x = - flow_compx * sonar_distance_filtered;

floatnew_velocity_y = - flow_compy * sonar_distance_filtered;


time_since_last_sonar_update= (get_boot_time_us()- get_sonar_measure_time());


if(qual > 0)

{

velocity_x_sum+= new_velocity_x;

velocity_y_sum+= new_velocity_y;

valid_frame_count++;


uint32_tdeltatime = (get_boot_time_us() - lasttime);

integration_timespan+= deltatime;

accumulated_flow_x+= pixel_flow_y / focal_length_px * 1.0f;//radaxis swapped to align x flow around y axis

accumulated_flow_y+= pixel_flow_x / focal_length_px * -1.0f;//rad

accumulated_gyro_x+= x_rate * deltatime / 1000000.0f;//rad

accumulated_gyro_y+= y_rate * deltatime / 1000000.0f;//rad

accumulated_gyro_z+= z_rate * deltatime / 1000000.0f;//rad

accumulated_framecount++;

accumulated_quality+= qual;


/*lowpassvelocity output */

velocity_x_lp= global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]* new_velocity_x +

(1.0f- global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW])* velocity_x_lp;

velocity_y_lp= global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]* new_velocity_y +

(1.0f- global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW])* velocity_y_lp;

}

else

{

/*taking flow as zero */

velocity_x_lp= (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW])* velocity_x_lp;

velocity_y_lp= (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW])* velocity_y_lp;

}

}

else

{

/*taking flow as zero */

velocity_x_lp= (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW])* velocity_x_lp;

velocity_y_lp= (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW])* velocity_y_lp;

}

//updatelasttime

lasttime= get_boot_time_us();


pixel_flow_x_sum+= pixel_flow_x;

pixel_flow_y_sum+= pixel_flow_y;

pixel_flow_count++;


}


13.传输数据

voidupdate_TX_buffer(floatpixel_flow_x,floatpixel_flow_y,

floatflow_comp_m_x,floatflow_comp_m_y,uint8_tqual,

floatground_distance,floatgyro_x_rate,floatgyro_y_rate,

floatgyro_z_rate,int16_tgyro_temp,legacy_12c_data_t*pd) {

staticuint16_tframe_count = 0;


i2c_framef;

i2c_integral_framef_integral;


f.frame_count= frame_count;

f.pixel_flow_x_sum= pixel_flow_x * 10.0f;

f.pixel_flow_y_sum= pixel_flow_y * 10.0f;

f.flow_comp_m_x= flow_comp_m_x * 1000;

f.flow_comp_m_y= flow_comp_m_y * 1000;

f.qual= qual;

f.ground_distance= ground_distance * 1000;


f.gyro_x_rate= gyro_x_rate * getGyroScalingFactor();

f.gyro_y_rate= gyro_y_rate * getGyroScalingFactor();

f.gyro_z_rate= gyro_z_rate * getGyroScalingFactor();

f.gyro_range= getGyroRange();


uint32_ttime_since_last_sonar_update;


time_since_last_sonar_update= (get_boot_time_us()

-get_sonar_measure_time());


if(time_since_last_sonar_update < 255 * 1000) {

f.sonar_timestamp= time_since_last_sonar_update / 1000; //converttoms

}else{

f.sonar_timestamp= 255;

}


staticfloataccumulated_flow_x = 0;

staticfloataccumulated_flow_y = 0;

staticuint16_taccumulated_framecount = 0;

staticuint16_taccumulated_quality = 0;

staticfloataccumulated_gyro_x = 0;

staticfloataccumulated_gyro_y = 0;

staticfloataccumulated_gyro_z = 0;

staticuint32_tintegration_timespan = 0;

staticuint32_tlasttime = 0;


/*calculate focal_length in pixel */

constfloatfocal_length_px = ((global_data.param[PARAM_FOCAL_LENGTH_MM])

/(4.0f * 6.0f) * 1000.0f);//originalfocallenght:12mmpixelsize:6um,binning4 enabled


//reset if readout has been performed

if(stop_accumulation == 1) {


//debugoutput

// mavlink_msg_optical_flow_send(MAVLINK_COMM_2,get_boot_time_us(),

// global_data.param[PARAM_SENSOR_ID],accumulated_flow_x * 10.0f,

// accumulated_gyro_x* 10.0f, integration_timespan,

// accumulated_framecount,(uint8_t) (accumulated_quality / accumulated_framecount),ground_distance);


integration_timespan= 0;

accumulated_flow_x= 0;//mrad

accumulated_flow_y= 0;//mrad

accumulated_framecount= 0;

accumulated_quality= 0;

accumulated_gyro_x= 0;//mrad

accumulated_gyro_y= 0;//mrad

accumulated_gyro_z= 0;//mrad

stop_accumulation= 0;

}


//accumulateflow and gyrovalues betweensucessiveI2C readings

//updateonly if qual!=0

if(qual > 0) {

uint32_tdeltatime = (get_boot_time_us() - lasttime);

integration_timespan+= deltatime;

accumulated_flow_x+= pixel_flow_y * 1000.0f / focal_length_px * 1.0f;//mradaxis swapped to align x flow around y axis

accumulated_flow_y+= pixel_flow_x * 1000.0f / focal_length_px * -1.0f;//mrad

accumulated_framecount++;

accumulated_quality+= qual;

accumulated_gyro_x+= gyro_x_rate * deltatime * 0.001f;//mrad gyro_x_rate * 1000.0f*deltatime/1000000.0f;

accumulated_gyro_y+= gyro_y_rate * deltatime * 0.001f;//mrad

accumulated_gyro_z+= gyro_z_rate * deltatime * 0.001f;//mrad

}


//updatelasttime

lasttime= get_boot_time_us();


f_integral.frame_count_since_last_readout= accumulated_framecount;

f_integral.gyro_x_rate_integral= accumulated_gyro_x * 10.0f; //mrad*10

f_integral.gyro_y_rate_integral= accumulated_gyro_y * 10.0f; //mrad*10

f_integral.gyro_z_rate_integral= accumulated_gyro_z * 10.0f; //mrad*10

f_integral.pixel_flow_x_integral= accumulated_flow_x * 10.0f; //mrad*10

f_integral.pixel_flow_y_integral= accumulated_flow_y * 10.0f; //mrad*10

f_integral.integration_timespan= integration_timespan; //microseconds

f_integral.ground_distance= ground_distance * 1000; //mmeters

f_integral.sonar_timestamp= time_since_last_sonar_update; //microseconds

f_integral.qual=

(uint8_t)(accumulated_quality / accumulated_framecount); //0-255linear quality measurement 0=bad, 255=best

f_integral.gyro_temperature= gyro_temp;//Temperature* 100 incenti-degreesCelsius


notpublishedIndexFrame1= 1 - publishedIndexFrame1;//choose not the current published 1 buffer

notpublishedIndexFrame2= 1 - publishedIndexFrame2;//choose not the current published 2 buffer


//HACK!! To get the data

//In a V2 hwbuild with can and uavcanthese macros

//are used to passsthe data to uavcan.


uavcan_export(&pd->frame,&f, I2C_FRAME_SIZE);

uavcan_export(&pd->integral_frame,&f_integral, I2C_INTEGRAL_FRAME_SIZE);


//fill I2C transmitbuffer1 with frame1 values

memcpy(&(txDataFrame1[notpublishedIndexFrame1]),

&f,I2C_FRAME_SIZE);


//fill I2C transmitbuffer2 with frame2 values

memcpy(&(txDataFrame2[notpublishedIndexFrame2]),

&f_integral,I2C_INTEGRAL_FRAME_SIZE);


//swapbuffers frame1 if I2C bus is idle

if(readout_done_frame1) {

publishedIndexFrame1= 1 - publishedIndexFrame1;

}


//swapbuffers frame2 if I2C bus is idle

if(readout_done_frame2) {

publishedIndexFrame2= 1 - publishedIndexFrame2;

}


frame_count++;


}



数据整理



f_integral.gyro_x_rate_integral=accumulated_gyro_x* 10.0f; //mrad*10


accumulated_gyro_x+= gyro_x_rate * deltatime * 0.001f; //mrad gyro_x_rate * 1000.0f*deltatime/1000000.0f;


gyro_x_rate由(/*gyroscope coordinate transformation */

floatx_rate = y_rate_sensor; //change x and y rates

floaty_rate = - x_rate_sensor;

floatz_rate = z_rate_sensor; //z is correct

陀螺仪读出并进行坐标转换



f_integral.pixel_flow_x_integral= accumulated_flow_x * 10.0f; //mrad*10


accumulated_flow_x+= pixel_flow_y * 1000.0f / focal_length_px * 1.0f;//mradaxis swapped to align x flow around y axis


pixel_flow_yflow_compute()计算而出


f_integral.integration_timespan= integration_timespan; //时间


integration_timespan+= deltatime;


uint32_tdeltatime = (get_boot_time_us() - lasttime);


lasttime=get_boot_time_us();



f_integral.qual= (uint8_t)(accumulated_quality / accumulated_framecount);//求平均值//在此可是直接使用计算所得质量




//飞控EKF2使用光流数据

flow.flowdata(0)= optical_flow.pixel_flow_x_integral;

flow.flowdata(1)= optical_flow.pixel_flow_y_integral;

flow.quality= optical_flow.quality;

flow.gyrodata(0)= optical_flow.gyro_x_rate_integral;

flow.gyrodata(1)= optical_flow.gyro_y_rate_integral;

flow.gyrodata(2)= optical_flow.gyro_z_rate_integral;

flow.dt= optical_flow.integration_timespan;