您可以使用数据集尝试以下操作scikit-learn digits
:
from sklearn.neural_network import BernoulliRBM
from sklearn import datasets
import numpy as np
import matplotlib.pylab as plt
def show_digits(im, title):
plt.figure(figsize=(5,5))
plt.gray()
for i in range(im.shape[0]):
plt.subplot(10,10,i+1)
plt.imshow(np.reshape(im[i,:], (8,8)))
plt.axis('off')
plt.suptitle(title)
plt.show()
def rbm():
digits = datasets.load_digits()
bindigit_trn = np.asarray(digits.data, 'float32')
for i in range(len(bindigit_trn)):
bindigit_trn[i,:] = bindigit_trn[i,:] / np.max(bindigit_trn[i,:])
print(bindigit_trn.shape)
# (1797, 64) => 1797 8x8 digits images
digits = bindigit_trn[:100,:]
print(digits.shape)
# (100, 64) => 100 images
show_digits(digits, 'original digits')
rbm = BernoulliRBM(n_iter= 10, learning_rate = 0.1, n_components = 10, random_state=0, verbose=True)
rbm.fit(bindigit_trn)
print(rbm.components_.shape)
# (10, 64)
digits_new = digits.copy() # rbm.components_.copy()
# gibbs sampling here
for j in range(10000):
for i in range(100):
digits_new[i,:] = rbm.gibbs(digits_new[i,:])
print(digits_new.shape)
# (100, 64)
show_digits(digits_new, 'sampled digits')
weights = rbm.components_
return weights
weights = rbm()
show_digits(weights, 'weights')
#[BernoulliRBM] Iteration 1, pseudo-likelihood = -25.85, time = 0.02s
#[BernoulliRBM] Iteration 2, pseudo-likelihood = -25.67, time = 0.02s
#[BernoulliRBM] Iteration 3, pseudo-likelihood = -25.45, time = 0.03s
#[BernoulliRBM] Iteration 4, pseudo-likelihood = -24.34, time = 0.02s
#[BernoulliRBM] Iteration 5, pseudo-likelihood = -23.41, time = 0.02s
#[BernoulliRBM] Iteration 6, pseudo-likelihood = -22.33, time = 0.02s
#[BernoulliRBM] Iteration 7, pseudo-likelihood = -21.88, time = 0.02s
#[BernoulliRBM] Iteration 8, pseudo-likelihood = -21.66, time = 0.02s
#[BernoulliRBM] Iteration 9, pseudo-likelihood = -21.74, time = 0.02s
#[BernoulliRBM] Iteration 10, pseudo-likelihood = -21.04, time = 0.02s