0

## 有可能將以下兩段程式碼合併嗎?(求大神解)

``````import numpy
import scipy.special
%matplotlib

def __init__(self, inputnodes, hiddennodes, outputnodes, learningrate):
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes

self.wih = numpy.random.normal(0.0, pow(self.hnodes, -0.5),
(self.hnodes, self.inodes))
self.who = numpy.random.normal(0.0, pow(self.onodes, -0.5),
(self.onodes, self.hnodes))

self.lr = learningrate

self.activation_function = lambda x: scipy.special.expit(x)

pass

def train(self, inputs_list, targets_list):
inputs = numpy.array(inputs_list, ndmin=2).T
targets = numpy.array(targets_list, ndmin=2).T

hidden_inputs = numpy.dot(self.wih, inputs)

hidden_outputs = self.activation_function(hidden_inputs)

final_inputs = numpy.dot(self.who, hidden_outputs)

final_outputs = self.activation_function(final_inputs)

ouput_errors = (target - actual)

hidden_errors = numpy.dot(self.who.T, outputs_errors)

layers
self.who += self.lr * numpy.dot((outputs_errors *
final_outputs * (1.0 - final_outputs)),
numpy.transpose(hidden_outputs))

layers
self.wih += self.lr * numpy.dot((hidden_errors *
hidden_outputs * (1.0 - hidden_outputs)),
numpy.transpose(inputs))

pass
def query(self, inouts_list):
inputs = numpy.array(inputs_list, ndmin=2).T

hidden_inputs = numpy.dot(self.wih, inputs)

hidden_outputs = self.ativation_function(hidden_inputs)

final_inputs = numpy.dot(self.who, hidden_outputs)

final_outputs = self.activation_function(final_inputs)

return final_outputs

def neuralNetwork(input_nodes, hidden_nodes, output_nodes, learning_rate):
return 0

input_nodes = 784
hidden_nodes = 200
output_nodes = 10

learning_rate = 1.0

n = neuralNetwork(input_nodes, hidden_nodes, output_nodes,
learning_rate)

training_data_file = open("mnist_dataset/mnist_train.csv", 'r')
training_data_file.close()

epochs = 5

for e in range(epochs):
for record in training_data_list:
all_values = record.split(',')
inputs = (numpy.asfarray(all_values[1:] / 255.0 * 0.99) + 0.01151)
targets = numpy.zeros(output_nodes) + 0.01

targets[int(all_values[0])] = 0.99
n.train(inputs, targets)

pass

pass

test_data_file = open("mnist_dataset/mnist_test.csv", 'r')
test_data_file.close()

for record in test_data_list:
all_values = record.split(',')

correct_label = int(all_values[0])

inputs = (numpy.asfarray(all_values[1:] / 255.0 * 0.99) + 1.0)

outputs = n.query(inputs)

label = numpy.argmax(outputs)

if (label == correct_label):
scorecard.append(1)

else:
scorecard.append(0)
pass
pass

scorecard_array = numpy.asarray(scorecard)
print("performmance = ", secordcard_array.sum()/scorecard_array.size)
``````
``````import keras

from keras.datasets import minst
from keras.models import Sequential
from keras.layer.core import Dense, Dropout, Activation
from keras.optimizers import RMSprop
from keras.callbacks import EailyStopping, CSVLogger
%matplotlib inline
import matplotlib.pyplot as plt

batch_size = 128
num_classes = 10
epochs = 20

(x_train, y_train), (x_test, y_test) = minst.load_data()

for i in range (10):
plt.subplot(2, 5, i+1)
plt.title("M_%d" % i)
plt.axis("off")
plt.imshow(x_train[i].reshape(28, 28), cmap=None)
plt.show()

x_train = x_train.reshape(60000, 784).astype('float32')
x_test = x_test.reshape(10000, 784).astype('float32')
x_train /= 255
x_test /= 255
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

print(x_train.shape)
print(y_train.shape)
print(x_test.shape)
print(y_test.shape)

model = Sequential()

model.summary()

model.compile(loss='categorical_crossentropy',
optimizer=RMSprop(),
metrics=['accuracy'])

es = EarlyStopping(monitor='val_loss', patience=2)
csv_logger = CSVLogger('training.log')
hist = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs,
verbose=1, validation_split=0.1, callbacks=[es, csv_logger])

score = model.evaluate(x_text, y_text, verbose=0)
print('test loss:', score[0])
print('test acc:', score[1])

loss = hist.history['loss']
val_loss = hist.history['val_loss']
epochs = len(loss)
plt.plot(range(epochs), loss, marker='.', label='loss(training data)')
plt.plot(range(epochs), val_loss, marker='.', label='val_loss(evaluate data)')
plt.legend(loc='best')
plt.grid()
plt.xlabel('epochs')
plt.ylabel('loss')
plt.show()
``````

### 4 個回答

0
yabee5566
iT邦新手 5 級 ‧ 2020-06-22 07:31:46

0

「我啊災」!!

0

iT邦新手 5 級 ‧ 2020-06-22 10:21:53

fillano iT邦超人 1 級 ‧ 2020-06-22 14:35:14 檢舉

0

iT邦大神 1 級 ‧ 2020-06-22 15:57:58

``````import numpy
import scipy.special
import keras

from keras.datasets import minst
from keras.models import Sequential
from keras.layer.core import Dense, Dropout, Activation
from keras.optimizers import RMSprop
from keras.callbacks import EailyStopping, CSVLogger
%matplotlib inline
import matplotlib
import matplotlib.pyplot as plt

def __init__(self, inputnodes, hiddennodes, outputnodes, learningrate):
self.inodes = inputnodes
self.hnodes = hiddennodes
self.onodes = outputnodes

self.wih = numpy.random.normal(0.0, pow(self.hnodes, -0.5),
(self.hnodes, self.inodes))
self.who = numpy.random.normal(0.0, pow(self.onodes, -0.5),
(self.onodes, self.hnodes))

self.lr = learningrate

self.activation_function = lambda x: scipy.special.expit(x)

pass

def train(self, inputs_list, targets_list):
inputs = numpy.array(inputs_list, ndmin=2).T
targets = numpy.array(targets_list, ndmin=2).T

hidden_inputs = numpy.dot(self.wih, inputs)

hidden_outputs = self.activation_function(hidden_inputs)

final_inputs = numpy.dot(self.who, hidden_outputs)

final_outputs = self.activation_function(final_inputs)

ouput_errors = (target - actual)

hidden_errors = numpy.dot(self.who.T, outputs_errors)

layers
self.who += self.lr * numpy.dot((outputs_errors *
final_outputs * (1.0 - final_outputs)),
numpy.transpose(hidden_outputs))

layers
self.wih += self.lr * numpy.dot((hidden_errors *
hidden_outputs * (1.0 - hidden_outputs)),
numpy.transpose(inputs))

pass
def query(self, inouts_list):
inputs = numpy.array(inputs_list, ndmin=2).T

hidden_inputs = numpy.dot(self.wih, inputs)

hidden_outputs = self.ativation_function(hidden_inputs)

final_inputs = numpy.dot(self.who, hidden_outputs)

final_outputs = self.activation_function(final_inputs)

return final_outputs

def neuralNetwork(input_nodes, hidden_nodes, output_nodes, learning_rate):
return 0

input_nodes = 784
hidden_nodes = 200
output_nodes = 10

learning_rate = 1.0

n = neuralNetwork(input_nodes, hidden_nodes, output_nodes,
learning_rate)

training_data_file = open("mnist_dataset/mnist_train.csv", 'r')
training_data_file.close()

epochs = 5

for e in range(epochs):
for record in training_data_list:
all_values = record.split(',')
inputs = (numpy.asfarray(all_values[1:] / 255.0 * 0.99) + 0.01151)
targets = numpy.zeros(output_nodes) + 0.01

targets[int(all_values[0])] = 0.99
n.train(inputs, targets)

pass

pass

test_data_file = open("mnist_dataset/mnist_test.csv", 'r')
test_data_file.close()

for record in test_data_list:
all_values = record.split(',')

correct_label = int(all_values[0])

inputs = (numpy.asfarray(all_values[1:] / 255.0 * 0.99) + 1.0)

outputs = n.query(inputs)

label = numpy.argmax(outputs)

if (label == correct_label):
scorecard.append(1)

else:
scorecard.append(0)
pass
pass

scorecard_array = numpy.asarray(scorecard)
print("performmance = ", secordcard_array.sum()/scorecard_array.size)

batch_size = 128
num_classes = 10
epochs = 20

(x_train, y_train), (x_test, y_test) = minst.load_data()

for i in range (10):
plt.subplot(2, 5, i+1)
plt.title("M_%d" % i)
plt.axis("off")
plt.imshow(x_train[i].reshape(28, 28), cmap=None)
plt.show()

x_train = x_train.reshape(60000, 784).astype('float32')
x_test = x_test.reshape(10000, 784).astype('float32')
x_train /= 255
x_test /= 255
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

print(x_train.shape)
print(y_train.shape)
print(x_test.shape)
print(y_test.shape)

model = Sequential()

model.summary()

model.compile(loss='categorical_crossentropy',
optimizer=RMSprop(),
metrics=['accuracy'])

es = EarlyStopping(monitor='val_loss', patience=2)
csv_logger = CSVLogger('training.log')
hist = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs,
verbose=1, validation_split=0.1, callbacks=[es, csv_logger])

score = model.evaluate(x_text, y_text, verbose=0)
print('test loss:', score[0])
print('test acc:', score[1])

loss = hist.history['loss']
val_loss = hist.history['val_loss']
epochs = len(loss)
plt.plot(range(epochs), loss, marker='.', label='loss(training data)')
plt.plot(range(epochs), val_loss, marker='.', label='val_loss(evaluate data)')
plt.legend(loc='best')
plt.grid()
plt.xlabel('epochs')
plt.ylabel('loss')
plt.show()
``````
WQ iT邦新手 3 級 ‧ 2020-06-23 16:18:48 檢舉

yorkc99 iT邦新手 5 級 ‧ 2020-07-11 10:49:33 檢舉