資源簡介
本代碼利用numpy實現(xiàn)了深度前饋網(wǎng)絡(luò);代碼包括了網(wǎng)絡(luò)深度,每層的單元數(shù)和每層的激活函數(shù)可以自選,BP算法;最后跟了一個多項式擬合的例子。該代碼適合深度學習初學者。
代碼片段和文件信息
#?=============================================================================
#?深度前饋網(wǎng)絡(luò)框架
#?????多項式(正弦)擬合任務(wù),需要numpy包和matplotlib包;
#???????????????????????????????????????????????????????Written?By?Wang?Chunlei
#?=============================================================================
#------------------?定義深度前饋網(wǎng)絡(luò)?-------------------------------------
import?numpy?as?np
class?MyDfn:
????__WtInitVar?=?0.01?#初始權(quán)值服從標準正態(tài)分布,該參數(shù)控制方差
????__BsInitAmp?=?0.01?#初始閾值服從均勻分布,該參數(shù)控制取值范圍
????__miu?=?0.02?#隨機梯度下降學習率
????
????#?網(wǎng)絡(luò)初始化函數(shù)
????def?__init__(self?InputLen=1?layerNum=0?UnitNum=[]?ActiveFcs=[]):
????????self.layerNum?=?layerNum?#網(wǎng)絡(luò)層數(shù)(int)
????????self.InputLen?=?InputLen?#網(wǎng)絡(luò)輸入數(shù)據(jù)長度(int)
????????self.WeightMts?=[]?#網(wǎng)絡(luò)各層權(quán)值list內(nèi)容為numpy矩陣
????????self.BiasVc?=?[]?#網(wǎng)絡(luò)各層閾值list內(nèi)容為numpy矩陣
????????self.ActiveFcs?=[]?#網(wǎng)絡(luò)各層的激活函數(shù)list內(nèi)容為函數(shù)指針
????????self.UnitNum?=?[]?#網(wǎng)絡(luò)各層的單元數(shù)numpy數(shù)組?????
????????#?如果網(wǎng)絡(luò)層數(shù)等于0
????????if(self.layerNum?==?0):
????????????return
????????#?每層網(wǎng)絡(luò)的單元數(shù)目
????????if(UnitNum.size?==?layerNum):
????????????self.UnitNum?=?UnitNum
????????else:
????????????print(“UnitNum長度和layerNum不等“)
????????????return
????????#?每層網(wǎng)絡(luò)的激活函數(shù)和導數(shù)對應(yīng)的函數(shù)指針
????????if(len(ActiveFcs)?!=?self.layerNum):
????????????print(“ActiveFcs維度有誤“)
????????????return
????????else:
????????????self.ActiveFcs?=?ActiveFcs
????????#?初始化網(wǎng)絡(luò)????????
????????self.WeightMts.append(?self.__WtInitVar*np.random.randn(UnitNum[0]?InputLen)?)
????????self.BiasVc.append(?self.__BsInitAmp*np.random.rand(UnitNum[0]1)?)
????????for?idi?in?range(1?self.layerNum):
????????????self.WeightMts.append(self.__WtInitVar*np.random.randn(UnitNum[idi]?UnitNum[idi-1]))
????????????self.BiasVc.append(self.__BsInitAmp*np.random.rand(UnitNum[idi]1))
????
????#?顯示網(wǎng)絡(luò)結(jié)構(gòu)函數(shù)????
????def?PrintNetworkInfo(self):???????????
????????print(“網(wǎng)絡(luò)層數(shù)=%d“?%?self.layerNum)
????????if(self.layerNum?>=?1):
????????????print(“第1層:輸入數(shù)據(jù)長度=%d,該層單元數(shù)=%d“?%?(self.InputLen?self.UnitNum[0]))
????????????for?idi?in?range(1?self.layerNum):
????????????????print(“第%d層:輸入數(shù)據(jù)長度=%d,該層單元數(shù)=%d“?%?(idi+1?self.UnitNum[idi-1]?self.UnitNum[idi]))
????
????#?前饋函數(shù)(Input為numpy列向量)
????def?Forward(self?Input):
????????if(Input.shape?!=?(self.InputLen?1)):
????????????print(“輸入數(shù)據(jù)維度和網(wǎng)絡(luò)不符“)
????????????return?0.0
????????#?self.LyVals是一個長度為(self.layerNum+1)的列表
????????#?第一個元素是網(wǎng)絡(luò)輸入值,后面依次是各層輸出值
????????#?self.LyDris是一個長度為self.layerNum的列表,每個元素都是對應(yīng)層輸出的導數(shù)
????????self.LyVals?=?[Input]
????????self.LyDris?=?[]
????????for?idi?in?range(self.layerNum):
????????????ZVal?=?np.dot(self.WeightMts[idi]?self.LyVals[idi])?+?self.BiasVc[idi]
????????????ValTmp?DriTmp?=?self.ActiveFcs[
評論
共有 條評論