国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 編程 > Python > 正文

python構建深度神經網絡(DNN)

2020-02-22 23:24:56
字體:
來源:轉載
供稿:網友

本文學習Neural Networks and Deep Learning 在線免費書籍,用python構建神經網絡識別手寫體的一個總結。

代碼主要包括兩三部分:

1)、數據調用和預處理

2)、神經網絡類構建和方法建立

3)、代碼測試文件

1)數據調用:

#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time  : 2017-03-12 15:11 # @Author : CC # @File  : net_load_data.py # @Software: PyCharm Community Edition  from numpy import * import numpy as np import cPickle def load_data():   """載入解壓后的數據,并讀取"""   with open('data/mnist_pkl/mnist.pkl','rb') as f:     try:       train_data,validation_data,test_data = cPickle.load(f)       print " the file open sucessfully"       # print train_data[0].shape #(50000,784)       # print train_data[1].shape  #(50000,)       return (train_data,validation_data,test_data)     except EOFError:       print 'the file open error'       return None  def data_transform():   """將數據轉化為計算格式"""   t_d,va_d,te_d = load_data()   # print t_d[0].shape # (50000,784)   # print te_d[0].shape # (10000,784)   # print va_d[0].shape # (10000,784)   # n1 = [np.reshape(x,784,1) for x in t_d[0]] # 將5萬個數據分別逐個取出化成(784,1),逐個排列   n = [np.reshape(x, (784, 1)) for x in t_d[0]] # 將5萬個數據分別逐個取出化成(784,1),逐個排列   # print 'n1',n1[0].shape   # print 'n',n[0].shape   m = [vectors(y) for y in t_d[1]] # 將5萬標簽(50000,1)化為(10,50000)   train_data = zip(n,m) # 將數據與標簽打包成元組形式   n = [np.reshape(x, (784, 1)) for x in va_d[0]] # 將5萬個數據分別逐個取出化成(784,1),排列   validation_data = zip(n,va_d[1])  # 沒有將標簽數據矢量化   n = [np.reshape(x, (784, 1)) for x in te_d[0]] # 將5萬個數據分別逐個取出化成(784,1),排列   test_data = zip(n, te_d[1]) # 沒有將標簽數據矢量化   # print train_data[0][0].shape #(784,)   # print "len(train_data[0])",len(train_data[0]) #2   # print "len(train_data[100])",len(train_data[100]) #2   # print "len(train_data[0][0])", len(train_data[0][0]) #784   # print "train_data[0][0].shape", train_data[0][0].shape #(784,1)   # print "len(train_data)", len(train_data) #50000   # print train_data[0][1].shape #(10,1)   # print test_data[0][1] # 7   return (train_data,validation_data,test_data) def vectors(y):   """賦予標簽"""   label = np.zeros((10,1))   label[y] = 1.0 #浮點計算   return label 

2)網絡構建

#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time  : 2017-03-12 16:07 # @Author : CC # @File  : net_network.py  import numpy as np import random class Network(object):  #默認為基類?用于繼承:print isinstance(network,object)   def __init__(self,sizes):     self.num_layers = len(sizes)     self.sizes = sizes     # print 'num_layers', self.num_layers     self.weight = [np.random.randn(a1, a2) for (a1, a2) in zip(sizes[1:], sizes[:-1])] #產生一個個數組     self.bias = [np.random.randn(a3,1) for a3 in sizes[1:]]     # print self.weight[0].shape #(20,10)    def SGD(self,train_data,min_batch_size,epoches,eta,test_data=False):     """ 1) 打亂樣本,將訓練數據劃分成小批次       2)計算出反向傳播梯度       3) 獲得權重更新"""     if test_data: n_test = len(test_data)     n = len(train_data)  #50000     random.shuffle(train_data) # 打亂     min_batches = [train_data[k:k+min_batch_size] for k in xrange(0,n,min_batch_size)] #提取批次數據     for k in xrange(0,epoches):  #利用更新后的權值繼續更新       random.shuffle(train_data) # 打亂       for min_batch in min_batches: #逐個傳入,效率很低         self.updata_parameter(min_batch,eta)       if test_data:         num = self.evaluate(test_data)         print "the {0}th epoches: {1}/{2}".format(k,num,len(test_data))       else:         print 'epoches {0} completed'.format(k)    def forward(self,x):     """獲得各層激活值"""     for w,b in zip(self.weight,self.bias):       x = sigmoid(np.dot(w, x)+b)     return x    def updata_parameter(self,min_batch,eta):     """1) 反向傳播計算每個樣本梯度值       2) 累加每個批次樣本的梯度值       3) 權值更新"""     ndeltab = [np.zeros(b.shape) for b in self.bias]     ndeltaw = [np.zeros(w.shape) for w in self.weight]     for x,y in min_batch:       deltab,deltaw = self.backprop(x,y)       ndeltab = [nb +db for nb,db in zip(ndeltab,deltab)]       ndeltaw = [nw + dw for nw,dw in zip(ndeltaw,deltaw)]     self.bias = [b - eta * ndb/len(min_batch) for ndb,b in zip(ndeltab,self.bias)]     self.weight = [w - eta * ndw/len(min_batch) for ndw,w in zip(ndeltaw,self.weight)]     def backprop(self,x,y):     """執行前向計算,再進行反向傳播,返回deltaw,deltab"""     # [w for w in self.weight]     # print 'len',len(w)     # print "self.weight",self.weight[0].shape     # print w[0].shape     # print w[1].shape     # print w.shape     activation = x     activations = [x]     zs = []     # feedforward     for w, b in zip(self.weight, self.bias):       # print w.shape,activation.shape,b.shape       z = np.dot(w, activation) +b       zs.append(z)  #用于計算f(z)導數       activation = sigmoid(z)       # print 'activation',activation.shape       activations.append(activation) # 每層的輸出結果     delta = self.top_subtract(activations[-1],y) * dsigmoid(zs[-1]) #最后一層的delta,np.array乘,相同維度乘     deltaw = [np.zeros(w1.shape) for w1 in self.weight] #每一次將獲得的值作為列表形式賦給deltaw     deltab = [np.zeros(b1.shape) for b1 in self.bias]     # print 'deltab[0]',deltab[-1].shape     deltab[-1] = delta     deltaw[-1] = np.dot(delta,activations[-2].transpose())     for k in xrange(2,self.num_layers):       delta = np.dot(self.weight[-k+1].transpose(),delta) * dsigmoid(zs[-k])       deltab[-k] = delta       deltaw[-k] = np.dot(delta,activations[-k-1].transpose())     return (deltab,deltaw)    def evaluate(self,test_data):     """評估驗證集和測試集的精度,標簽直接一個數作為比較"""     z = [(np.argmax(self.forward(x)),y) for x,y in test_data]     zs = np.sum(int(a == b) for a,b in z)     # zk = sum(int(a == b) for a,b in z)     # print "zs/zk:",zs,zk     return zs    def top_subtract(self,x,y):     return (x - y)  def sigmoid(x):   return 1.0/(1.0+np.exp(-x))  def dsigmoid(x):   z = sigmoid(x)   return z*(1-z)             
發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 油尖旺区| 定远县| 乌拉特中旗| 贵州省| 新昌县| 石阡县| 伊吾县| 来安县| 辽阳县| 沙田区| 攀枝花市| 衡山县| 绍兴市| 潞城市| 泸水县| 独山县| 肥乡县| 平和县| 桂林市| 容城县| 绵阳市| 昆明市| 达拉特旗| 随州市| 开远市| 重庆市| 甘谷县| 海城市| 乳源| 昌图县| 郧西县| 房产| 克什克腾旗| 迭部县| 雷波县| 安吉县| 微山县| 化州市| 疏勒县| 武义县| 丰原市|