用C語言構建了一個非常簡單的神經網絡,用來理解NN工作的基本原理。
1. 模塊說明:
NVCELL: 單個神經元結構,包含輸入向量和權重向量,並指定了對應的傳遞/激活函數。
NVLAYER: 單層神經網絡,包含若干個NVCELL結構
各結構對應的方法函數包括:NVCELL和NVLAYER的產生/銷燬函數,向前/向後傳導函數,權重和偏置係數的隨機產生函數等。
2. 應用例子:
構建一個3層神經網絡,通過一組數據來學習3數邏輯:當3個數字和大於等於2時輸出爲1。
具體代碼見附。
將https://github.com/midaszhou/nnc代碼clone下來,然後執行make.
3.參考資料:
《機器學習與深度學習 通過C語言模擬》 小高知宏 著
應用例子程序如下:
/*------------------------ test_nnc. -----------------------------------
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
Midas Zhou
[email protected]
-----------------------------------------------------------------------*/
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <math.h>
#include <string.h>
#include "nnc.h"
#define ERR_LIMIT 0.0001 /* 允許誤差值 */
int main(void)
{
int i,j;
int count=0;
int wi_cellnum=3; /* wi layer cell number */
int wm_cellnum=3; /* wm layer cell number */
int wo_cellnum=1; /* wo layer cell number */
int wi_inpnum=3; /* number of input data for each input/hidden nvcell */
int wm_inpnum=3; /* number of input data for each middle nvcell */
int wo_inpnum=3; /* number of input data for each output nvcell */
double err;
int ns=8; /* input sample number + teacher value */
double pin[8*4]= /* 3 input + 1 teacher value */
{
1,1,1,1,
1,1,0,1,
1,0,1,1,
1,0,0,0,
0,1,1,1,
0,1,0,0,
0,0,1,0,
0,0,0,0,
};
double data_input[3];
/* <<<<<<<<<<<<<<<<< Create Neuron Net 構建神經網絡 >>>>>>>>>>>>> */
/* 1. creat an input nvlayer 建立輸入層 */
NVCELL *wi_tempcell=new_nvcell(wi_inpnum,NULL,data_input,NULL,0,func_sigmoid);
nvcell_rand_dwv(wi_tempcell);
NVLAYER *wi_layer=new_nvlayer(wi_cellnum,wi_tempcell);
/* 2. creat a mid nvlayer 建立中間層 */
NVCELL *wm_tempcell=new_nvcell(wm_inpnum,wi_layer->nvcells,NULL,NULL,0,func_sigmoid);
nvcell_rand_dwv(wm_tempcell);
NVLAYER *wm_layer=new_nvlayer(wm_cellnum,wm_tempcell);
/* 3. creat an output nvlayer 建立輸出層 */
NVCELL *wo_tempcell=new_nvcell(wo_inpnum, wm_layer->nvcells, NULL,NULL,0,func_sigmoid);
nvcell_rand_dwv(wo_tempcell);
NVLAYER *wo_layer=new_nvlayer(wo_cellnum,wo_tempcell);
/* <<<<<<<<<<<<<<<<< NNC Learning Process 網絡的學習過程 >>>>>>>>>>>>> */
nnc_set_param(2.0); /* set learn rate 設置學習係數 */
err=10; /* give an init value to trigger while() */
printf("NN model starts learning ...\n");
while(err>ERR_LIMIT)
{
/* reset err */
err=0.0;
/* batch learning */
for(i=0;i<ns;i++)
{
/* 1. update data_input */
memcpy(data_input, pin+4*i,3*sizeof(double));
/* 2. feed forward wi->wm->wo layer 向前傳導 */
nvlayer_feed_forward(wi_layer);
nvlayer_feed_forward(wm_layer);
nvlayer_feed_forward(wo_layer);
/* 3. get err sum up */
err += (wo_layer->nvcells[0]->dout - pin[3+i*4])
* (wo_layer->nvcells[0]->dout - pin[3+i*4]);
/* 4. feed backward wo->wm->wi, 向後傳導並自更新系數 */
nvlayer_feed_backward(wo_layer,pin+(3+i*4));
nvlayer_feed_backward(wm_layer,NULL);
nvlayer_feed_backward(wi_layer,NULL);
}
count++;
if( (count&255) == 0)
printf(" %dth learning, err=%0.8f \n",count, err);
}
printf(" %dth learning, err=%0.8f \n",count, err);
printf("Finish %d times batch learning!. \n\n",count);
/* <<<<<<<<<<<<<<<<< Test Learned NN Model 測試學習完成的神經網絡模型 >>>>>>>>>>>>> */
printf("----------- Test learned NN Model -----------\n");
for(i=0;i<ns;i++)
{
/* update data_input */
memcpy(data_input, pin+4*i,wi_inpnum*sizeof(double));
/* feed forward wi->wm->wo layer */
nvlayer_feed_forward(wi_layer);
nvlayer_feed_forward(wm_layer);
nvlayer_feed_forward(wo_layer);
/* print result */
printf("Input: ");
for(j=0;j<wi_inpnum;j++)
printf("%lf ",data_input[j]);
printf("\n");
printf("output: %lf \n",wo_layer->nvcells[0]->dout);
}
/* <<<<<<<<<<<<<<<<< Destroy NN Model 銷燬神經網絡 >>>>>>>>>>>>> */
free_nvcell(wi_tempcell);
free_nvcell(wm_tempcell);
free_nvcell(wo_tempcell);
free_nvlayer(wi_layer);
free_nvlayer(wm_layer);
free_nvlayer(wo_layer);
usleep(100000);
return 0;
}