fork download
  1. #define _CRT_SECURE_NO_WARNINGS
  2.  
  3. #include <stdio.h>
  4. #include <stdlib.h>
  5. #include <string.h>
  6. #include <math.h>
  7. #include <time.h> //為了取得時間序列
  8.  
  9. //在程式開始之前先定義函數以便讓main知道
  10. void FileLoad_scan_number_of_training_sets();
  11. void Normalize_data();
  12. void Initial_the_weight();
  13. void Initialize_the_sum();
  14. void Compute_internal_state(int);
  15. void Compute_epison(int);
  16. void Updata_weight_and_bias(int);
  17. void Move_weight_and_bias();
  18.  
  19. double function_of_hidden_layer(double);
  20. double function_of_output_layer(double);
  21. double first_direvative_of_function_of_hidden_layer(double);
  22. double first_direvative_of_function_of_output_layer(double);
  23.  
  24.  
  25. //可供使用者調整
  26. double learning_rate = 0.05;
  27. double mementum;
  28. int time_step = 1000;
  29. int input_dimension=2;
  30. int output_dimension=2;
  31. int number_of_neuron_hidden_layer = 10;
  32. int number_of_training_sets = 0;
  33.  
  34. int i,j;
  35.  
  36. double **data_input = 0; //save input of training data
  37. double **data_output = 0; //save target of training daya
  38.  
  39. double zz; //save tmp calculation number
  40.  
  41. double *tmp_matrix; //save tmp matrix of all training data
  42.  
  43. double *max=0; //save the maximum number of training data in each group
  44. double *min=0; //save the minimum number of training data in each group
  45.  
  46. double **weight_between_input_and_hidden; //weight of the connection between input layer and hidden layer
  47. double **weight_between_hidden_and_output; //weight of the connection between hidden layer and output layer
  48.  
  49. double *bias_hidden; //bias in hidden layer
  50. double *bias_output; //bias in output layer
  51.  
  52. double cc; //save tmp random number
  53.  
  54. double *internal_state_between_input_and_hidden; //internal state in hidden layer
  55. double *internal_state_between_hidden_and_output; //internal state in output layer
  56.  
  57. double *activation_value_hidden; //activation value in hidden layer
  58. double *y_value_output; //output value in output layer
  59.  
  60. double sum_of_weight=0; //for calculating the sum of weight
  61. double *total_error; //save total error in output layer
  62. double *epison_hidden; //epison number in hidden layer
  63. double *epison_output; //epison number in output layer
  64. double sum_of_epison=0; //for calculating the sum of epison
  65.  
  66. int idx = 0;
  67. int count = 0;
  68.  
  69. double **sum_of_weight_between_input_and_hidden;
  70. double **sum_of_weight_between_hidden_and_output;
  71.  
  72. double *sum_of_bias_hidden=0;
  73. double *sum_of_bias_output=0;
  74.  
  75. double **modification_of_weight_between_input_and_hidden=0;
  76. double **modification_of_weight_between_hidden_and_output=0;
  77.  
  78. double *modification_of_bias_hidden=0;
  79. double *modification_of_bias_output=0;
  80.  
  81. double nn;
  82.  
  83. int main(){
  84.  
  85. FILE *Consq_error_first_dimen;
  86. FILE *Consq_error_second_dimen;
  87. FILE *Consq_output_first_dimen;
  88. FILE *Consq_output_second_dimen;
  89.  
  90. Consq_error_first_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_error_first_dimen.txt", "w+");
  91. Consq_error_second_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_error_second_dimen.txt","w+");
  92. Consq_output_first_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_output_first_dimen.txt", "w+");
  93. Consq_output_second_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_output_scond_dimen.txt", "w+");
  94.  
  95. FileLoad_scan_number_of_training_sets();
  96. Normalize_data();
  97. Initial_the_weight();
  98.  
  99. for(count = 0; count<time_step; count++)
  100. {
  101. Initialize_the_sum();
  102. for(idx=0;idx<(number_of_training_sets-1);idx++)
  103. {
  104. Compute_internal_state(idx);
  105. Compute_epison(idx);
  106. Updata_weight_and_bias(idx);
  107. for(i=0;i<output_dimension;i++)
  108. {
  109. if(i==0)
  110. {
  111. fprintf(Consq_error_first_dimen, "%f ", total_error[i]);
  112. fprintf(Consq_output_first_dimen, "%f ", y_value_output[i]);
  113. }
  114. if(i==1)
  115. {
  116. fprintf(Consq_error_second_dimen, "%f ", total_error[i]);
  117. fprintf(Consq_output_second_dimen, "%f ", y_value_output[i]);
  118. }
  119. }
  120. fprintf(Consq_error_first_dimen, "\n");
  121. fprintf(Consq_error_second_dimen, "\n");
  122. fprintf(Consq_output_first_dimen, "\n");
  123. fprintf(Consq_output_second_dimen, "\n");
  124. }
  125. Move_weight_and_bias();
  126. }
  127.  
  128. fclose(Consq_error_first_dimen);
  129. fclose(Consq_error_second_dimen);
  130. fclose(Consq_output_first_dimen);
  131. fclose(Consq_output_second_dimen);
  132.  
  133. printf("Build file successfully!");
  134. printf("\n");
  135. printf("\n");
  136. printf("\n");
  137. system("pause");
  138. return 0;
  139. }
  140.  
  141. void FileLoad_scan_number_of_training_sets()
  142. {
  143. char *char_of_load_training_data;
  144.  
  145. int length_of_first_column = 500; //每一行字源的長度,可隨意亂設
  146.  
  147. char first_column_information[500]; //建立一個字元長度為1000的字元矩陣
  148. char comma[] = ","; //以逗號當作分隔符號
  149.  
  150.  
  151. FILE *datafile;
  152. datafile = fopen("circle_fail.txt", "r");
  153. if(datafile == NULL) //判斷開檔是否成功
  154. {
  155. printf(" Fail to open file. "); //讀檔失敗的話
  156. }
  157. else
  158. {
  159. while (fgets(first_column_information, sizeof(first_column_information), datafile)) //一次讀取一行
  160. {
  161. number_of_training_sets = number_of_training_sets + 1; //先掃描一遍整體的行數
  162. }
  163. fseek(datafile,0,SEEK_SET); //將讀檔指標從SEEK_SET(文件一開始),指向位移0行的位置(可將指標指定欲達到的位置)
  164.  
  165. //生成input的二維動態陣列
  166. data_input = (double**)malloc(sizeof(double*)*number_of_training_sets);
  167. for(i=0; i<number_of_training_sets; i++)
  168. {
  169. data_input[i]=(double*)malloc(sizeof(double)*input_dimension);
  170. }
  171.  
  172. //生成output的二維動態陣列
  173. data_output = (double**)malloc(sizeof(double*)*number_of_training_sets);
  174. for(i=0; i<number_of_training_sets; i++)
  175. {
  176. data_output[i]=(double*)malloc(sizeof(double)*output_dimension);
  177. }
  178.  
  179. for(i=0;i<number_of_training_sets;i++)
  180. {
  181. if ((fgets(first_column_information, sizeof(first_column_information), datafile))!= NULL) //將data從頭再讀一遍
  182. {
  183. char_of_load_training_data = strtok(first_column_information,comma); //以comma符號為data分割
  184. for(j=0; j<input_dimension; j++)
  185. {
  186. if(char_of_load_training_data!=NULL) //當分割出來的字不是空的時候
  187. {
  188. zz = atof(char_of_load_training_data); //將字元轉換成double
  189. data_input[i][j]=zz; //將讀到的double存進input矩陣
  190. data_output[i][j]=zz; //將讀到的double存進output矩陣
  191. //printf("%f ", data_input[i][j]);
  192. char_of_load_training_data = strtok(NULL, comma); //繼續分割
  193. }
  194. }
  195. }
  196. //printf("\n");
  197. }
  198. }
  199. fclose(datafile); //把檔案關起來以避免錯誤
  200. }
  201.  
  202.  
  203. //標準化training data以使得其範圍落在0和1之間
  204. void Normalize_data()
  205. {
  206. tmp_matrix = (double*)malloc(sizeof(double)*number_of_training_sets);
  207. max = (double*)malloc(sizeof(double)*input_dimension);
  208. min = (double*)malloc(sizeof(double)*input_dimension);
  209.  
  210. for( i=0; i<input_dimension; i++)
  211. {
  212. max[i] = 0; //先把最大值設成一個任意的很小值
  213. min[i] = 9999; //先把最小值設成一個任意的很大值
  214. }
  215.  
  216. for( i=0; i<input_dimension; i++)
  217. {
  218. for( j=0; j<number_of_training_sets; j++)
  219. {
  220. tmp_matrix[j] = data_input[j][i];
  221. if(tmp_matrix[j]>max[i]) //抓出最大值
  222. {
  223. max[i] = tmp_matrix[j];
  224. }
  225. if(tmp_matrix[j]<min[i]) //抓出最小值
  226. {
  227. min[i] = tmp_matrix[j];
  228. }
  229. }
  230. }
  231.  
  232. for(i=0; i<input_dimension; i++)
  233. {
  234. for(j=0; j<number_of_training_sets; j++)
  235. {
  236. if(max[i] != min[i])
  237. {
  238. data_input[j][i] = ((data_input[j][i] - min[i]) / (max[i] - min[i]))*1.8-0.9; //normalize data
  239. data_output[j][i] = ((data_output[j][i] - min[i]) / (max[i] - min[i]))*1.8-0.9;
  240. }
  241. else
  242. {
  243. data_input[j][i] = 0; //當其最大值與最小值相等時
  244. data_output[j][i] = 0;
  245. }
  246. }
  247. }
  248.  
  249. //for(i=0;i<number_of_training_sets;i++)
  250. //{
  251. // for(j=0;j<input_dimension;j++)
  252. // {
  253. // printf("%f ", data_input[i][j]);
  254. // }
  255. // printf("\n");
  256. //}
  257. }
  258.  
  259. void Initial_the_weight()
  260. {
  261. weight_between_input_and_hidden = (double**)malloc(sizeof(double*)*number_of_neuron_hidden_layer);
  262. for(i=0;i<number_of_neuron_hidden_layer;i++)
  263. {
  264. weight_between_input_and_hidden[i] = (double*)malloc(sizeof(double*)*input_dimension);
  265. }
  266.  
  267. weight_between_hidden_and_output=(double**)malloc(sizeof(double*)*output_dimension);
  268. for(i=0;i<output_dimension;i++)
  269. {
  270. weight_between_hidden_and_output[i]=(double*)malloc(sizeof(double*)*number_of_neuron_hidden_layer);
  271. }
  272.  
  273. sum_of_weight_between_input_and_hidden = (double**)malloc(sizeof(double*)*number_of_neuron_hidden_layer);
  274. for(i=0;i<number_of_neuron_hidden_layer;i++)
  275. {
  276. sum_of_weight_between_input_and_hidden[i] = (double*)malloc(sizeof(double*)*input_dimension);
  277. }
  278.  
  279. sum_of_weight_between_hidden_and_output=(double**)malloc(sizeof(double*)*output_dimension);
  280. for(i=0;i<output_dimension;i++)
  281. {
  282. sum_of_weight_between_hidden_and_output[i]=(double*)malloc(sizeof(double*)*number_of_neuron_hidden_layer);
  283. }
  284.  
  285. modification_of_weight_between_input_and_hidden = (double**)malloc(sizeof(double*)*number_of_neuron_hidden_layer);
  286. for(i=0;i<number_of_neuron_hidden_layer;i++)
  287. {
  288. modification_of_weight_between_input_and_hidden[i] = (double*)malloc(sizeof(double*)*input_dimension);
  289. }
  290.  
  291. modification_of_weight_between_hidden_and_output=(double**)malloc(sizeof(double*)*output_dimension);
  292. for(i=0;i<output_dimension;i++)
  293. {
  294. modification_of_weight_between_hidden_and_output[i]=(double*)malloc(sizeof(double*)*number_of_neuron_hidden_layer);
  295. }
  296.  
  297.  
  298. bias_hidden = (double*)malloc(sizeof(double)*number_of_neuron_hidden_layer);
  299. bias_output = (double*)malloc(sizeof(double)*output_dimension);
  300.  
  301. sum_of_bias_hidden = (double*)malloc(sizeof(double)*number_of_neuron_hidden_layer);
  302. sum_of_bias_output = (double*)malloc(sizeof(double)*output_dimension);
  303.  
  304. modification_of_bias_hidden = (double*)malloc(sizeof(double)*number_of_neuron_hidden_layer);
  305. modification_of_bias_output = (double*)malloc(sizeof(double)*output_dimension);
  306.  
  307. srand(100);
  308. for(i=0;i<number_of_neuron_hidden_layer;i++)
  309. {
  310. for(j=0;j<input_dimension;j++)
  311. {
  312. //srand(rand()*100);
  313. cc=((double)rand()/(double)(RAND_MAX)); //利用除以最大的亂數來得到0到1之間的亂數浮點數
  314. weight_between_input_and_hidden[i][j]=cc;
  315. }
  316. }
  317.  
  318. srand(200);
  319. for(i=0;i<output_dimension;i++)
  320. {
  321. for(j=0;j<number_of_neuron_hidden_layer;j++)
  322. {
  323. cc=((double)rand()/(double)(RAND_MAX)); //利用除以最大的亂數來得到0到1之間的亂數浮點數
  324. weight_between_hidden_and_output[i][j]=cc;
  325. }
  326. }
  327.  
  328. srand(300);
  329. for(i=0;i<number_of_neuron_hidden_layer;i++)
  330. {
  331. //srand(rand()*100);
  332. bias_hidden[i] = ((double)rand()/(double)(RAND_MAX)); //利用除以最大的亂數來得到0到1之間的亂數浮點數
  333. }
  334.  
  335. srand(400);
  336. for(i=0;i<output_dimension;i++)
  337. {
  338. //srand(rand()*100);
  339. bias_output[i] = ((double)rand()/(double)(RAND_MAX)); //利用除以最大的亂數來得到0到1之間的亂數浮點數
  340. //printf("%f ", bias_output[i]);
  341. }
  342.  
  343.  
  344. internal_state_between_input_and_hidden = (double*)malloc(sizeof(double)*number_of_neuron_hidden_layer);
  345. internal_state_between_hidden_and_output = (double*)malloc(sizeof(double)*output_dimension);
  346. activation_value_hidden= (double*)malloc(sizeof(double)*number_of_neuron_hidden_layer);
  347. y_value_output = (double*)malloc(sizeof(double)*output_dimension);
  348.  
  349. epison_output = (double*)malloc(sizeof(double)*output_dimension);
  350. epison_hidden = (double*)malloc(sizeof(double)*number_of_neuron_hidden_layer);
  351.  
  352. total_error = (double*)malloc(sizeof(double)*output_dimension);
  353.  
  354. }
  355.  
  356. void Initialize_the_sum()
  357. {
  358. for(i=0;i<output_dimension;i++)
  359. {
  360. for(j=0;j<number_of_neuron_hidden_layer;j++)
  361. {
  362. sum_of_weight_between_hidden_and_output[i][j]=0;
  363. modification_of_weight_between_hidden_and_output[i][j]=0;
  364. }
  365. sum_of_bias_output[i] = 0;
  366. modification_of_bias_output[i]=0;
  367. }
  368.  
  369. for(i=0;i<number_of_neuron_hidden_layer;i++)
  370. {
  371. for(j=0;j<input_dimension;j++)
  372. {
  373. sum_of_weight_between_input_and_hidden[i][j]=0;
  374. modification_of_weight_between_input_and_hidden[i][j]=0;
  375. }
  376. sum_of_bias_hidden[i]=0;
  377. modification_of_bias_hidden[i]=0;
  378. }
  379.  
  380. }
  381.  
  382. void Compute_internal_state(int data_idx)
  383. {
  384. for(i=0;i<number_of_neuron_hidden_layer;i++)
  385. {
  386. sum_of_weight = 0;
  387. internal_state_between_input_and_hidden[i] = 0;
  388. for(j=0;j<input_dimension;j++)
  389. {
  390. sum_of_weight = sum_of_weight + weight_between_input_and_hidden[i][j] * data_input[data_idx][j];
  391. }
  392. internal_state_between_input_and_hidden[i] = sum_of_weight + bias_hidden[i];
  393. activation_value_hidden[i] = function_of_hidden_layer(internal_state_between_input_and_hidden[i]);
  394. }
  395.  
  396. sum_of_weight=0;
  397.  
  398. for(i=0;i<output_dimension;i++)
  399. {
  400. sum_of_weight = 0;
  401. internal_state_between_hidden_and_output[i] = 0;
  402. for(j=0;j<number_of_neuron_hidden_layer;j++)
  403. {
  404. sum_of_weight = sum_of_weight + weight_between_hidden_and_output[i][j] * activation_value_hidden[j];
  405. }
  406. internal_state_between_hidden_and_output[i] = sum_of_weight + bias_output[i];
  407. y_value_output[i] = function_of_output_layer(internal_state_between_hidden_and_output[i]);
  408. }
  409.  
  410.  
  411. for(i=0;i<output_dimension;i++)
  412. {
  413. total_error[i] = (pow((y_value_output[i] - data_output[data_idx+1][i]),2))/2;
  414. //printf("%f ", total_error[i]);
  415. }
  416. }
  417.  
  418.  
  419. void Compute_epison(int data_idx)
  420. {
  421. for(i=0;i<output_dimension;i++)
  422. {
  423. epison_output[i] = first_direvative_of_function_of_output_layer(internal_state_between_hidden_and_output[i])*(y_value_output[i] - data_output[data_idx+1][i]);
  424. }
  425.  
  426. for(i=0;i<number_of_neuron_hidden_layer;i++)
  427. {
  428. for(j=0;j<output_dimension;j++)
  429. {
  430. sum_of_epison = sum_of_epison + weight_between_hidden_and_output[j][i] * epison_output[j];
  431. }
  432. epison_hidden[i] = (first_direvative_of_function_of_hidden_layer(internal_state_between_input_and_hidden[i]))*sum_of_epison;
  433. }
  434. }
  435.  
  436.  
  437. void Updata_weight_and_bias(int data_idx)
  438. {
  439. for(i=0; i<output_dimension; i++)
  440. {
  441. for(j=0;j<number_of_neuron_hidden_layer; j++)
  442. {
  443. modification_of_weight_between_hidden_and_output[i][j] = epison_output[i] * activation_value_hidden[j];
  444. }
  445. modification_of_bias_output[i] = epison_output[i];
  446. }
  447.  
  448. for(i=0;i<output_dimension;i++)
  449. {
  450. for(j=0;j<number_of_neuron_hidden_layer;j++)
  451. {
  452. sum_of_weight_between_hidden_and_output[i][j] = sum_of_weight_between_hidden_and_output[i][j] + modification_of_weight_between_hidden_and_output[i][j];
  453. }
  454. sum_of_bias_output[i] = sum_of_bias_output[i] + modification_of_bias_output[i];
  455. }
  456.  
  457. for(i=0;i<number_of_neuron_hidden_layer;i++)
  458. {
  459. for(j=0;j<input_dimension;j++)
  460. {
  461. modification_of_weight_between_input_and_hidden[i][j] = epison_hidden[i] * data_input[data_idx][j];
  462. }
  463. modification_of_bias_hidden[i] = epison_hidden[i];
  464. }
  465.  
  466. for(i=0; i<number_of_neuron_hidden_layer; i++)
  467. {
  468. for(j=0;j<input_dimension;j++)
  469. {
  470. sum_of_weight_between_input_and_hidden[i][j] = sum_of_weight_between_input_and_hidden[i][j] + modification_of_weight_between_input_and_hidden[i][j];
  471. }
  472. sum_of_bias_hidden[i] = sum_of_bias_hidden[i] + modification_of_bias_hidden[i];
  473. }
  474. }
  475.  
  476. void Move_weight_and_bias()
  477. {
  478. for(i=0;i<output_dimension;i++)
  479. {
  480. for(j=0;j<number_of_neuron_hidden_layer;j++)
  481. {
  482. weight_between_hidden_and_output[i][j] = weight_between_hidden_and_output[i][j] - (learning_rate * sum_of_weight_between_hidden_and_output[i][j]);
  483. }
  484. bias_output[i] = bias_output[i] - learning_rate * sum_of_bias_output[i];
  485. }
  486.  
  487. for(i=0; i<number_of_neuron_hidden_layer;i++)
  488. {
  489. for(j=0;j<input_dimension;j++)
  490. {
  491. weight_between_input_and_hidden[i][j] = weight_between_input_and_hidden[i][j] - (learning_rate * sum_of_weight_between_input_and_hidden[i][j]);
  492. }
  493. bias_hidden[i] = bias_hidden[i] - learning_rate * sum_of_bias_hidden[i];
  494. }
  495. }
  496.  
  497.  
  498. //give data in input layer
  499. double function_of_hidden_layer(double z)
  500. {
  501. return ((exp(z)-exp(-z))/(exp(z)+exp(-z))) ;
  502. }
  503.  
  504. // give data in hidden layer
  505. double function_of_output_layer(double z)
  506. {
  507. return ((exp(z)-exp(-z))/(exp(z)+exp(-z)));
  508. }
  509.  
  510. double first_direvative_of_function_of_hidden_layer(double z)
  511. {
  512. return (4/(pow((exp(z)+exp(-z)),2)));
  513. }
  514.  
  515. double first_direvative_of_function_of_output_layer(double z)
  516. {
  517. return (4/(pow((exp(z)+exp(-z)),2)));
  518. }
Compilation error #stdin compilation error #stdout 0s 0KB
stdin
Standard input is empty
compilation info
Main.java:1: error: illegal character: \35
#define _CRT_SECURE_NO_WARNINGS
^
Main.java:3: error: illegal character: \35
#include <stdio.h>
^
Main.java:4: error: illegal character: \35
#include <stdlib.h>
^
Main.java:5: error: illegal character: \35
#include <string.h>
^
Main.java:6: error: illegal character: \35
#include <math.h>
^
Main.java:7: error: illegal character: \35
#include <time.h>		//????????
^
Main.java:11: error: class, interface, or enum expected
void Normalize_data();
^
Main.java:12: error: class, interface, or enum expected
void Initial_the_weight();
^
Main.java:13: error: class, interface, or enum expected
void Initialize_the_sum();
^
Main.java:14: error: class, interface, or enum expected
void Compute_internal_state(int);
^
Main.java:15: error: class, interface, or enum expected
void Compute_epison(int);
^
Main.java:16: error: class, interface, or enum expected
void Updata_weight_and_bias(int);
^
Main.java:17: error: class, interface, or enum expected
void Move_weight_and_bias();
^
Main.java:19: error: class, interface, or enum expected
double function_of_hidden_layer(double);
^
Main.java:20: error: class, interface, or enum expected
double function_of_output_layer(double);
^
Main.java:21: error: class, interface, or enum expected
double first_direvative_of_function_of_hidden_layer(double);
^
Main.java:22: error: class, interface, or enum expected
double first_direvative_of_function_of_output_layer(double);
^
Main.java:26: error: class, interface, or enum expected
double learning_rate = 0.05;
^
Main.java:27: error: class, interface, or enum expected
double mementum;
^
Main.java:28: error: class, interface, or enum expected
int time_step = 1000;
^
Main.java:29: error: class, interface, or enum expected
int input_dimension=2;
^
Main.java:30: error: class, interface, or enum expected
int output_dimension=2;
^
Main.java:31: error: class, interface, or enum expected
int number_of_neuron_hidden_layer = 10;
^
Main.java:32: error: class, interface, or enum expected
int number_of_training_sets = 0;
^
Main.java:34: error: class, interface, or enum expected
int i,j;
^
Main.java:36: error: class, interface, or enum expected
double **data_input = 0;		//save input of training data
^
Main.java:37: error: class, interface, or enum expected
double **data_output = 0;			//save target of training daya
^
Main.java:39: error: class, interface, or enum expected
double zz;					//save tmp calculation number
^
Main.java:41: error: class, interface, or enum expected
double *tmp_matrix;				//save tmp matrix of all training data
^
Main.java:43: error: class, interface, or enum expected
double *max=0;				//save the maximum number of training data in each group
^
Main.java:44: error: class, interface, or enum expected
double *min=0;				//save the minimum number of training data in each group
^
Main.java:46: error: class, interface, or enum expected
double **weight_between_input_and_hidden;			//weight of the connection between input layer and hidden layer
^
Main.java:47: error: class, interface, or enum expected
double **weight_between_hidden_and_output;			//weight of the connection between hidden layer and output layer
^
Main.java:49: error: class, interface, or enum expected
double *bias_hidden;			//bias in hidden layer
^
Main.java:50: error: class, interface, or enum expected
double *bias_output;			//bias in output layer
^
Main.java:52: error: class, interface, or enum expected
double cc;					//save tmp random number
^
Main.java:54: error: class, interface, or enum expected
double *internal_state_between_input_and_hidden;			//internal state in hidden layer
^
Main.java:55: error: class, interface, or enum expected
double *internal_state_between_hidden_and_output;			//internal state in output layer
^
Main.java:57: error: class, interface, or enum expected
double *activation_value_hidden;			//activation value in hidden layer
^
Main.java:58: error: class, interface, or enum expected
double *y_value_output;						//output value in output layer
^
Main.java:60: error: class, interface, or enum expected
double sum_of_weight=0;			//for calculating the sum of weight
^
Main.java:61: error: class, interface, or enum expected
double *total_error;			//save total error in output layer
^
Main.java:62: error: class, interface, or enum expected
double *epison_hidden;			//epison number in hidden layer
^
Main.java:63: error: class, interface, or enum expected
double *epison_output;			//epison number in output layer
^
Main.java:64: error: class, interface, or enum expected
double sum_of_epison=0;			//for calculating the sum of epison
^
Main.java:66: error: class, interface, or enum expected
int idx = 0;
^
Main.java:67: error: class, interface, or enum expected
int count = 0;
^
Main.java:69: error: class, interface, or enum expected
double **sum_of_weight_between_input_and_hidden;
^
Main.java:70: error: class, interface, or enum expected
double **sum_of_weight_between_hidden_and_output;
^
Main.java:72: error: class, interface, or enum expected
double *sum_of_bias_hidden=0;
^
Main.java:73: error: class, interface, or enum expected
double *sum_of_bias_output=0;
^
Main.java:75: error: class, interface, or enum expected
double **modification_of_weight_between_input_and_hidden=0;
^
Main.java:76: error: class, interface, or enum expected
double **modification_of_weight_between_hidden_and_output=0;
^
Main.java:78: error: class, interface, or enum expected
double *modification_of_bias_hidden=0;
^
Main.java:79: error: class, interface, or enum expected
double *modification_of_bias_output=0;
^
Main.java:81: error: class, interface, or enum expected
double nn;
^
Main.java:83: error: class, interface, or enum expected
int main(){
^
Main.java:86: error: class, interface, or enum expected
	FILE *Consq_error_second_dimen;
	^
Main.java:87: error: class, interface, or enum expected
	FILE *Consq_output_first_dimen;
	^
Main.java:88: error: class, interface, or enum expected
	FILE *Consq_output_second_dimen;
	^
Main.java:90: error: class, interface, or enum expected
	Consq_error_first_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_error_first_dimen.txt", "w+");
	^
Main.java:91: error: class, interface, or enum expected
	Consq_error_second_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_error_second_dimen.txt","w+");
	^
Main.java:92: error: class, interface, or enum expected
	Consq_output_first_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_output_first_dimen.txt", "w+");
	^
Main.java:93: error: class, interface, or enum expected
	Consq_output_second_dimen = fopen("C:\\Users\\Wen\\Desktop\\Consq_output_scond_dimen.txt", "w+");
	^
Main.java:95: error: class, interface, or enum expected
	FileLoad_scan_number_of_training_sets();
	^
Main.java:96: error: class, interface, or enum expected
	Normalize_data();
	^
Main.java:97: error: class, interface, or enum expected
	Initial_the_weight();
	^
Main.java:99: error: class, interface, or enum expected
	for(count = 0; count<time_step; count++)
	^
Main.java:99: error: class, interface, or enum expected
	for(count = 0; count<time_step; count++)
	               ^
Main.java:99: error: class, interface, or enum expected
	for(count = 0; count<time_step; count++)
	                                ^
Main.java:102: error: class, interface, or enum expected
		for(idx=0;idx<(number_of_training_sets-1);idx++)
		^
Main.java:102: error: class, interface, or enum expected
		for(idx=0;idx<(number_of_training_sets-1);idx++)
		          ^
Main.java:102: error: class, interface, or enum expected
		for(idx=0;idx<(number_of_training_sets-1);idx++)
		                                          ^
Main.java:105: error: class, interface, or enum expected
			Compute_epison(idx);
			^
Main.java:106: error: class, interface, or enum expected
			Updata_weight_and_bias(idx);
			^
Main.java:107: error: class, interface, or enum expected
			for(i=0;i<output_dimension;i++)
			^
Main.java:107: error: class, interface, or enum expected
			for(i=0;i<output_dimension;i++)
			        ^
Main.java:107: error: class, interface, or enum expected
			for(i=0;i<output_dimension;i++)
			                           ^
Main.java:112: error: class, interface, or enum expected
					fprintf(Consq_output_first_dimen, "%f  ", y_value_output[i]);
					^
Main.java:113: error: class, interface, or enum expected
				}
				^
Main.java:117: error: class, interface, or enum expected
					fprintf(Consq_output_second_dimen, "%f  ", y_value_output[i]);
					^
Main.java:118: error: class, interface, or enum expected
				}
				^
Main.java:121: error: class, interface, or enum expected
			fprintf(Consq_error_second_dimen, "\n");
			^
Main.java:122: error: class, interface, or enum expected
			fprintf(Consq_output_first_dimen, "\n");
			^
Main.java:123: error: class, interface, or enum expected
			fprintf(Consq_output_second_dimen, "\n");
			^
Main.java:124: error: class, interface, or enum expected
		}
		^
Main.java:126: error: class, interface, or enum expected
	}
	^
Main.java:129: error: class, interface, or enum expected
	fclose(Consq_error_second_dimen);
	^
Main.java:130: error: class, interface, or enum expected
	fclose(Consq_output_first_dimen);
	^
Main.java:131: error: class, interface, or enum expected
	fclose(Consq_output_second_dimen);
	^
Main.java:133: error: class, interface, or enum expected
	printf("Build file successfully!");
	^
Main.java:134: error: class, interface, or enum expected
	printf("\n");
	^
Main.java:135: error: class, interface, or enum expected
	printf("\n");
	^
Main.java:136: error: class, interface, or enum expected
	printf("\n");
	^
Main.java:137: error: class, interface, or enum expected
	system("pause");
	^
Main.java:138: error: class, interface, or enum expected
	return 0;
	^
Main.java:139: error: class, interface, or enum expected
}
^
Main.java:145: error: class, interface, or enum expected
	int length_of_first_column = 500;		//????????,?????
	^
Main.java:147: error: class, interface, or enum expected
	char first_column_information[500];		//?????????1000?????
	^
Main.java:148: error: class, interface, or enum expected
	char comma[] = ",";						//?????????
	^
100 errors
stdout
Standard output is empty