difference gradient
来源:互联网 发布:3dsmax 显卡优化驱动 编辑:程序博客网 时间:2024/05/20 06:55
#include <math.h> #include <stdlib.h>#include <stdio.h>void show_vector(float *A, int n) { for (int i = 0; i < n; i++) { printf("%2.5f \t", A[i]); } printf("\n");}float func( float* x ){ float result = 0.0; result = x[0] * x[0] * x[0] + x[1] *x[1]; return result;}/* Calculating gradient of a function with different methods*/void forward(){ int n = 2; float x[] = { 0, 0}; float pf[] = { 0, 0}; float dx = 0.001; float f = func( x ); for( int i = 0; i < n; i++ ) { x[i] += dx; float newF = func( x ); float diff = newF - f; pf[i] = diff/dx; x[i] -= dx; } printf( "forward pf = \n" ); show_vector( pf, n );}void twoside(){ int n = 2; float x[] = { 0, 0}; float pf[] = { 0, 0}; float dx = 0.01; float f = func( x ); for( int i = 0; i < n; i++ ) { x[i] += dx; float F_forward = func( x ); x[i] -= 2*dx; float F_backward = func( x ); x[i] += dx; float diff = F_forward - F_backward ; pf[i] = diff/(2*dx); } printf( "two side pf = \n" ); show_vector( pf, n );}void fourthorder(){ int n = 2; float x[] = { 0, 0}; float pf[] = { 0, 0}; float dx = 0.01; float f = func( x ); for( int i = 0; i < n; i++ ) { float x0 = x[i]; x[i] = x0 + dx; float F_forward_1 = func( x ); x[i] = x0 + 2*dx; float F_forward_2 = func( x ); x[i] = x0 - dx; float F_backward_1 = func( x ); x[i] = x0 - 2*dx; float F_backward_2 = func( x ); x[i] = x0; float diff = 2* F_forward_2 + 16*F_forward_1 - 16 F_backward_1 - 2* F_backward_2 ; pf[i] = diff/(4*dx); } printf( "fourth order pf = \n" ); show_vector( pf, n ); }void main(){ forward(); twoside(); fourthorder(); return;}
阅读全文
0 0
- difference gradient
- gradient
- gradient
- gradient
- 09 gradient
- Gradient 渐变
- CSS3 Gradient
- linear-gradient
- CSS3 Gradient
- CSS3 Gradient
- gradient descent
- gradient descent
- Gradient Vectors
- css3 gradient
- Gradient Descent
- CSS3 Gradient
- css3 gradient
- Gradient Boosting
- Tomcat中JVM内存溢出及合理配置(转:http://blog.csdn.net/ye1992/article/details/9344807)
- 爱快安装说明
- 看完让你彻底搞懂Websocket原理
- HM代码总结一
- 用户停留网站无操作,一定时间自动退出
- difference gradient
- An internal error occurred during: "Updating Maven Project". Unsupported I问题解决备忘
- awk-sed数据处理
- ABAP操作EXECL详细
- Spring Boot (二) Spring Boot for JSP
- 这位新同事又矮又重,凭啥是阿里P8?
- 机器学习之逻辑回归 Logistic Regression(一)
- Android <activity-alias> 的用法
- java手动实现集合(数组形式)