728x90
반응형

# 퍼셉트론 : 1957년

기존
and
x1 | x2 | y
0  | 0  | 0
0  | 1  | 0
1  | 0  | 0
1  | 1  | 1
b + x1*w1 + ... = y
b 편향
b 보다 작으면 0, 크면 1 이라고 하면 and 알고리즘을 알수있다
x1, x2 입력값
w1, w2 가중치
y = 0 : if x1w1 + x2w2 <=b
y = 1 : if x1w1 + x2w2 > b    

 

# and게이트

import numpy as np
def AND (x1, x2) :
    x = np.array([x1, x2])
    w = np.array([0.5, 0.5])
    b = -0.5 # -0.8이되어도 영향 별로 없음
    tmp = np.sum(w*x) + b
    if tmp <= 0 :
        return 0
    else : 
        return 1
# 퍼셉트론 알고리즘
for xs in [(0,0),(1,0),(0,1),(1,1)] :
    y = AND(xs[0], xs[1])
    print(str(xs) + "=>" + str(y))
# 가중치와 편향을 찾아가는 방법
# 모든것의 최적의 가중치와 편향을 찾음

(0, 0)=>0
(1, 0)=>0
(0, 1)=>0
(1, 1)=>1

 

# OR게이트

import numpy as np
def OR (x1, x2) :
    x = np.array([x1, x2])
    w = np.array([0.5, 0.5])
    b = -0.2 # -0.8이되어도 영향 별로 없음
    tmp = np.sum(w*x) + b
    if tmp <= 0 :
        return 0
    else : 
        return 1
# 퍼셉트론 알고리즘
for xs in [(0,0),(1,0),(0,1),(1,1)] :
    y = OR(xs[0], xs[1])
    print(str(xs) + "=>" + str(y))
# -0.2로 바뀌었을 뿐 => 이걸로 조합이 OR이 ㅣㅗㄷㅁ

(0, 0)=>0
(1, 0)=>1
(0, 1)=>1
(1, 1)=>1

 

# nand게이트

import numpy as np
def NAND (x1, x2) :
    x = np.array([x1, x2])
    w = np.array([-0.5, -0.5])
    b = 0.8 # -0.8이되어도 영향 별로 없음
    tmp = np.sum(w*x) + b
    if tmp <= 0 :
        return 0
    else : 
        return 1
# 퍼셉트론 알고리즘
for xs in [(0,0),(1,0),(0,1),(1,1)] :
    y = NAND(xs[0], xs[1])
    print(str(xs) + "=>" + str(y))
    

(0, 0)=>1
(1, 0)=>1
(0, 1)=>1
(1, 1)=>0

 

# XOR게이트

 다중 신경망
import numpy as np
def XOR (x1, x2) :
    s1 = NAND(x1, x2)
    s2 = OR(x1, x2)
    y = AND(s1, s2) # 2층 구조
    return y
# 퍼셉트론 알고리즘
for xs in [(0,0),(1,0),(0,1),(1,1)] :
    y = XOR(xs[0], xs[1])
    print(str(xs) + "=>" + str(y))


(0, 0)=>1
(1, 0)=>0
(0, 1)=>0
(1, 1)=>1

# 같으면 1 다르면 0
# 10 # 선형 표현불가
# 01
x1을 
다중 퍼셉트론

 

# 활성화 함수 : 비선형데이터로 변환

# 계단 함수 : 0, 1로 리턴

import matplotlib.pyplot as plt
def step_function (x):
    return np.array(x > 0,dtype = np.int)
    # 0보다 크면 실수 출력
x = np.arange(-5.0, 5.0, 0.1)
y = step_function(x)
plt.plot(x, y)
plt.ylim(-0.1, 1.1)
plt.show()

# 시그모이드함수 0 ~ 1.0

def sigmoid(x) :
    return 1 / (1 + np.exp(-x)) # 분류
x = np.arange(-5.0, 5.0, 0.1)
y = sigmoid(x)
plt.plot(x,y)
plt.ylim(-0.1, 1.1)
plt.show()

# ReLU : 0 ~ 이상의 값

def relu(x) :
    return np.maximum(0,x) # 회귀분석에서
x = np.arange(-5.0, 5.0, 0.1)
y = relu(x)
plt.plot(x, y)
plt.show()

# cost function (loss functin) # 어떻게 최적의 값으로 접근하지?
# 미분값이 최소

 

batch gd

stochastic

mini batch gc

 

# 오차역전파
수식을 통해
알고리즘을 통해
정답과 예측을 손실함수로 비교하고 다시 구함 =>

 

import numpy as np
import tensorflow as tf
print(tf.__version__)

2.4.1

 

 

a = tf.constant(2) # 스칼라값을 텐서로 선언
b = tf.constant([1, 2]) # 백터를 텐서로 선언
c = tf.constant([[1, 2],[3, 4]]) 
# rank : a텐서의 차원리터
print(tf.rank(a))
print(tf.rank(b))
print(tf.rank(c)) #

tf.Tensor(0, shape=(), dtype=int32)
tf.Tensor(1, shape=(), dtype=int32)
tf.Tensor(2, shape=(), dtype=int32)

 

# 더하기 add
a = tf.constant(3)
b = tf.constant(2)
print(tf.add(a,b))

# tf.Tensor(5, shape=(), dtype=int32)


# 빼기 subtract
print(tf.subtract(a,b))
# 곱셈
print(tf.multiply(a,b))
# 나눗셈
print(tf.divide(a,b))
# 나눗셈 결과값 많
print(tf.divide(a,b).numpy())
print(tf.multiply(a,b).numpy())

tf.Tensor(1, shape=(), dtype=int32)
tf.Tensor(6, shape=(), dtype=int32)
tf.Tensor(1.5, shape=(), dtype=float64)
1.5
6

 

c_square = np.square(tf.add(a,b).numpy(), dtype=np.float32)
c_square

# 25.0

 

c_tensor = tf.convert_to_tensor(c_square)
c_tensor

# <tf.Tensor: shape=(), dtype=float32, numpy=25.0>

 

@tf.function # 언어펑션
def square_pos1(x) :
    if x > 0 :
        x = x*x
    else :
        x = x*-1
    return x
print(square_pos1(tf.constant(2)))
print(square_pos1.__class__)

# tf.Tensor(4, shape=(), dtype=int32)
# <class 'tensorflow.python.eager.def_function.Function'>

 

def square_pos2(x) :
    if x > 0 :
        x = x*x
    else :
        x = x*-1
    return x
print(square_pos2(tf.constant(2)))
print(square_pos2.__class__)

# tf.Tensor(4, shape=(), dtype=int32)
# <class 'function'>

 

# 텐서플로우를 통한 OR게이트
import tensorflow as tf
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.losses import mse
tf.random.set_seed(777)
# 데이터
data = np.array([[0,0],[1,0],[0,1],[1,1]])
# 라벨링
label = np.array([[0],[1],[1],[1]])
model = Sequential()
model.add(Dense(1, input_shape = (2,), activation = 'linear')) # 퍼셉트론
model.compile(optimizer = SGD(), loss = mse, metrics = ['acc'])
# epochs 100으로 하면 학습하다가 관둬서, 200으로 함
model.fit(data, label, epochs = 200)
# 그 값은 이거야
model.get_weights()
# 머신러닝은 값 하나하나 매겨주는데 딥러닝은 라벨링하고 주면 알아서 찾아감

Epoch 1/200
1/1 [==============================] - 1s 700ms/step - loss: 1.4290 - acc: 0.5000
Epoch 2/200
1/1 [==============================] - 0s 2ms/step - loss: 1.3602 - acc: 0.5000
Epoch 3/200
1/1 [==============================] - 0s 2ms/step - loss: 1.2956 - acc: 0.5000
Epoch 4/200
1/1 [==============================] - 0s 2ms/step - loss: 1.2349 - acc: 0.5000
Epoch 5/200
1/1 [==============================] - 0s 2ms/step - loss: 1.1779 - acc: 0.5000
Epoch 6/200
1/1 [==============================] - 0s 2ms/step - loss: 1.1242 - acc: 0.5000
Epoch 7/200
1/1 [==============================] - 0s 2ms/step - loss: 1.0738 - acc: 0.5000
Epoch 8/200
1/1 [==============================] - 0s 2ms/step - loss: 1.0264 - acc: 0.5000
Epoch 9/200
1/1 [==============================] - 0s 2ms/step - loss: 0.9819 - acc: 0.5000
Epoch 10/200
1/1 [==============================] - 0s 2ms/step - loss: 0.9399 - acc: 0.5000
Epoch 11/200
1/1 [==============================] - 0s 3ms/step - loss: 0.9005 - acc: 0.5000
Epoch 12/200
1/1 [==============================] - 0s 2ms/step - loss: 0.8634 - acc: 0.5000
Epoch 13/200
1/1 [==============================] - 0s 4ms/step - loss: 0.8284 - acc: 0.5000
Epoch 14/200
1/1 [==============================] - 0s 2ms/step - loss: 0.7955 - acc: 0.5000
Epoch 15/200
1/1 [==============================] - 0s 3ms/step - loss: 0.7646 - acc: 0.5000
Epoch 16/200
1/1 [==============================] - 0s 2ms/step - loss: 0.7354 - acc: 0.5000
Epoch 17/200
1/1 [==============================] - 0s 2ms/step - loss: 0.7079 - acc: 0.5000
Epoch 18/200
1/1 [==============================] - 0s 2ms/step - loss: 0.6820 - acc: 0.5000
Epoch 19/200
1/1 [==============================] - 0s 2ms/step - loss: 0.6576 - acc: 0.5000
Epoch 20/200
1/1 [==============================] - 0s 2ms/step - loss: 0.6346 - acc: 0.5000
Epoch 21/200
1/1 [==============================] - 0s 3ms/step - loss: 0.6129 - acc: 0.5000
Epoch 22/200
1/1 [==============================] - 0s 2ms/step - loss: 0.5925 - acc: 0.5000
Epoch 23/200
1/1 [==============================] - 0s 2ms/step - loss: 0.5732 - acc: 0.5000
Epoch 24/200
1/1 [==============================] - 0s 2ms/step - loss: 0.5549 - acc: 0.5000
Epoch 25/200
1/1 [==============================] - 0s 3ms/step - loss: 0.5377 - acc: 0.5000
Epoch 26/200
1/1 [==============================] - 0s 2ms/step - loss: 0.5215 - acc: 0.5000
Epoch 27/200
1/1 [==============================] - 0s 2ms/step - loss: 0.5061 - acc: 0.5000
Epoch 28/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4916 - acc: 0.5000
Epoch 29/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4778 - acc: 0.5000
Epoch 30/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4648 - acc: 0.5000
Epoch 31/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4525 - acc: 0.7500
Epoch 32/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4409 - acc: 0.7500
Epoch 33/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4298 - acc: 0.7500
Epoch 34/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4193 - acc: 0.7500
Epoch 35/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4094 - acc: 0.7500
Epoch 36/200
1/1 [==============================] - 0s 2ms/step - loss: 0.4000 - acc: 0.7500
Epoch 37/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3911 - acc: 0.7500
Epoch 38/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3826 - acc: 0.7500
Epoch 39/200
1/1 [==============================] - 0s 3ms/step - loss: 0.3745 - acc: 0.7500
Epoch 40/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3668 - acc: 0.7500
Epoch 41/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3595 - acc: 0.7500
Epoch 42/200
1/1 [==============================] - 0s 3ms/step - loss: 0.3525 - acc: 0.7500
Epoch 43/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3459 - acc: 0.7500
Epoch 44/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3396 - acc: 0.7500
Epoch 45/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3336 - acc: 0.7500
Epoch 46/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3278 - acc: 0.7500
Epoch 47/200
1/1 [==============================] - 0s 3ms/step - loss: 0.3223 - acc: 0.7500
Epoch 48/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3170 - acc: 0.7500
Epoch 49/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3120 - acc: 0.7500
Epoch 50/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3072 - acc: 0.7500
Epoch 51/200
1/1 [==============================] - 0s 2ms/step - loss: 0.3026 - acc: 0.7500
Epoch 52/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2982 - acc: 0.7500
Epoch 53/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2939 - acc: 0.7500
Epoch 54/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2898 - acc: 0.7500
Epoch 55/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2859 - acc: 0.7500
Epoch 56/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2822 - acc: 0.7500
Epoch 57/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2785 - acc: 0.7500
Epoch 58/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2750 - acc: 0.7500
Epoch 59/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2717 - acc: 0.7500
Epoch 60/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2684 - acc: 0.7500
Epoch 61/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2653 - acc: 0.7500
Epoch 62/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2623 - acc: 0.7500
Epoch 63/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2594 - acc: 0.7500
Epoch 64/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2565 - acc: 0.7500
Epoch 65/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2538 - acc: 0.7500
Epoch 66/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2511 - acc: 0.7500
Epoch 67/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2486 - acc: 0.7500
Epoch 68/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2461 - acc: 0.7500
Epoch 69/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2436 - acc: 0.7500
Epoch 70/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2413 - acc: 0.7500
Epoch 71/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2390 - acc: 0.7500
Epoch 72/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2368 - acc: 0.7500
Epoch 73/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2346 - acc: 0.7500
Epoch 74/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2325 - acc: 0.7500
Epoch 75/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2304 - acc: 0.7500
Epoch 76/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2284 - acc: 0.7500
Epoch 77/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2264 - acc: 0.7500
Epoch 78/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2245 - acc: 0.7500
Epoch 79/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2226 - acc: 0.7500
Epoch 80/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2208 - acc: 0.7500
Epoch 81/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2190 - acc: 0.7500
Epoch 82/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2173 - acc: 0.7500
Epoch 83/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2155 - acc: 0.7500
Epoch 84/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2139 - acc: 0.7500
Epoch 85/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2122 - acc: 0.7500
Epoch 86/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2106 - acc: 0.7500
Epoch 87/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2090 - acc: 0.7500
Epoch 88/200
1/1 [==============================] - 0s 3ms/step - loss: 0.2074 - acc: 0.7500
Epoch 89/200
1/1 [==============================] - 0s 999us/step - loss: 0.2059 - acc: 0.7500
Epoch 90/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2044 - acc: 0.7500
Epoch 91/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2029 - acc: 0.7500
Epoch 92/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2015 - acc: 0.7500
Epoch 93/200
1/1 [==============================] - 0s 2ms/step - loss: 0.2000 - acc: 0.7500
Epoch 94/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1986 - acc: 0.7500
Epoch 95/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1973 - acc: 0.7500
Epoch 96/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1959 - acc: 0.7500
Epoch 97/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1946 - acc: 0.7500
Epoch 98/200
1/1 [==============================] - 0s 1ms/step - loss: 0.1932 - acc: 0.7500
Epoch 99/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1919 - acc: 0.7500
Epoch 100/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1907 - acc: 0.7500
Epoch 101/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1894 - acc: 0.7500
Epoch 102/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1881 - acc: 0.7500
Epoch 103/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1869 - acc: 0.7500
Epoch 104/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1857 - acc: 0.7500
Epoch 105/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1845 - acc: 0.7500
Epoch 106/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1833 - acc: 0.7500
Epoch 107/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1822 - acc: 0.7500
Epoch 108/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1810 - acc: 0.7500
Epoch 109/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1799 - acc: 0.7500
Epoch 110/200
1/1 [==============================] - 0s 1ms/step - loss: 0.1787 - acc: 0.7500
Epoch 111/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1776 - acc: 0.7500
Epoch 112/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1765 - acc: 0.7500
Epoch 113/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1755 - acc: 0.7500
Epoch 114/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1744 - acc: 0.7500
Epoch 115/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1733 - acc: 0.7500
Epoch 116/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1723 - acc: 0.7500
Epoch 117/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1712 - acc: 0.7500
Epoch 118/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1702 - acc: 0.7500
Epoch 119/200
1/1 [==============================] - 0s 4ms/step - loss: 0.1692 - acc: 0.7500
Epoch 120/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1682 - acc: 0.7500
Epoch 121/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1672 - acc: 0.7500
Epoch 122/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1663 - acc: 0.7500
Epoch 123/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1653 - acc: 0.7500
Epoch 124/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1643 - acc: 0.7500
Epoch 125/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1634 - acc: 0.7500
Epoch 126/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1625 - acc: 0.7500
Epoch 127/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1615 - acc: 0.7500
Epoch 128/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1606 - acc: 0.7500
Epoch 129/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1597 - acc: 0.7500
Epoch 130/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1588 - acc: 0.7500
Epoch 131/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1579 - acc: 0.7500
Epoch 132/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1570 - acc: 0.7500
Epoch 133/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1562 - acc: 0.7500
Epoch 134/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1553 - acc: 0.7500
Epoch 135/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1545 - acc: 0.7500
Epoch 136/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1536 - acc: 0.7500
Epoch 137/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1528 - acc: 0.7500
Epoch 138/200
1/1 [==============================] - 0s 1ms/step - loss: 0.1520 - acc: 0.7500
Epoch 139/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1511 - acc: 0.7500
Epoch 140/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1503 - acc: 0.7500
Epoch 141/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1495 - acc: 0.7500
Epoch 142/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1487 - acc: 0.7500
Epoch 143/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1480 - acc: 0.7500
Epoch 144/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1472 - acc: 0.7500
Epoch 145/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1464 - acc: 0.7500
Epoch 146/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1456 - acc: 0.7500
Epoch 147/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1449 - acc: 0.7500
Epoch 148/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1441 - acc: 0.7500
Epoch 149/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1434 - acc: 0.7500
Epoch 150/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1427 - acc: 0.7500
Epoch 151/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1419 - acc: 0.7500
Epoch 152/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1412 - acc: 0.7500
Epoch 153/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1405 - acc: 0.7500
Epoch 154/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1398 - acc: 0.7500
Epoch 155/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1391 - acc: 0.7500
Epoch 156/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1384 - acc: 0.7500
Epoch 157/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1377 - acc: 0.7500
Epoch 158/200
1/1 [==============================] - 0s 1000us/step - loss: 0.1370 - acc: 0.7500
Epoch 159/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1364 - acc: 0.7500
Epoch 160/200
1/1 [==============================] - 0s 4ms/step - loss: 0.1357 - acc: 0.7500
Epoch 161/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1350 - acc: 0.7500
Epoch 162/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1344 - acc: 0.7500
Epoch 163/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1337 - acc: 0.7500
Epoch 164/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1331 - acc: 0.7500
Epoch 165/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1325 - acc: 0.7500
Epoch 166/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1318 - acc: 0.7500
Epoch 167/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1312 - acc: 0.7500
Epoch 168/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1306 - acc: 0.7500
Epoch 169/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1300 - acc: 0.7500
Epoch 170/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1294 - acc: 0.7500
Epoch 171/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1288 - acc: 0.7500
Epoch 172/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1282 - acc: 0.7500
Epoch 173/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1276 - acc: 0.7500
Epoch 174/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1270 - acc: 0.7500
Epoch 175/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1264 - acc: 0.7500
Epoch 176/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1258 - acc: 0.7500
Epoch 177/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1253 - acc: 0.7500
Epoch 178/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1247 - acc: 0.7500
Epoch 179/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1242 - acc: 0.7500
Epoch 180/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1236 - acc: 0.7500
Epoch 181/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1231 - acc: 0.7500
Epoch 182/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1225 - acc: 0.7500
Epoch 183/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1220 - acc: 0.7500
Epoch 184/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1214 - acc: 0.7500
Epoch 185/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1209 - acc: 0.7500
Epoch 186/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1204 - acc: 0.7500
Epoch 187/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1199 - acc: 0.7500
Epoch 188/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1194 - acc: 0.7500
Epoch 189/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1189 - acc: 0.7500
Epoch 190/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1184 - acc: 0.7500
Epoch 191/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1179 - acc: 0.7500
Epoch 192/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1174 - acc: 1.0000
Epoch 193/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1169 - acc: 1.0000
Epoch 194/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1164 - acc: 1.0000
Epoch 195/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1159 - acc: 1.0000
Epoch 196/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1154 - acc: 1.0000
Epoch 197/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1150 - acc: 1.0000
Epoch 198/200
1/1 [==============================] - 0s 2ms/step - loss: 0.1145 - acc: 1.0000
Epoch 199/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1140 - acc: 1.0000
Epoch 200/200
1/1 [==============================] - 0s 3ms/step - loss: 0.1136 - acc: 1.0000
[array([[0.5995085 ],
        [0.06513146]], dtype=float32),
 array([0.4472612], dtype=float32)]

 

 

반응형

+ Recent posts