Gradient stuff

pull/1/head
E. Almqvist 4 years ago
parent 72928d2217
commit 5391205808
  1. 39
      rgbAI/lib/func.py
  2. 19
      rgbAI/main.py

@ -39,7 +39,11 @@ class AIlib:
if( layerIndex < maxLayer ): if( layerIndex < maxLayer ):
return AIlib.think( layer, weights, bias, layerIndex + 1 ) return AIlib.think( layer, weights, bias, layerIndex + 1 )
else: else:
return np.squeeze(np.asarray(layer)) out = np.squeeze(np.asarray(layer))
print("-Result-")
print(out)
print("\n")
return out
except (ValueError, IndexError) as err: except (ValueError, IndexError) as err:
print("\n---------") print("\n---------")
@ -47,9 +51,32 @@ class AIlib:
print( "Layer index: " + str(layerIndex) ) print( "Layer index: " + str(layerIndex) )
print( "Max layer index: " + str(maxLayer) ) print( "Max layer index: " + str(maxLayer) )
def gradient( cost1:float, cost2:float, inp1:np.array, inp2:np.array ): def gradient( dCost:float, prop:list ):
dY = np.asarray(cost2 - cost1) propLen = len(prop)
dX = np.asarray(inp2 - inp1) print("PropLEN: ", propLen)
print(dY, dX) print(prop)
return dY / dX print("\n")
gradient = [None] * propLen
for i in range( propLen ):
gradient[i] = dCost / prop[i]
return gradient
def learn( inp:np.array, weights:list, bias:list, theta:float ):
# Calculate the derivative for:
# Cost in respect to weights
# Cost in respect to biases
res1 = AIlib.think( inp, weights, bias ) # Think the first result
cost1 = AIlib.calcCost( inp, res1 ) # Calculate the cost of the thought result
inp2 = np.asarray( inp + theta ) # make the new input with `theta` as diff
res2 = AIlib.think( inp2, weights, bias ) # Think the second result
cost2 = AIlib.calcCost( inp2, res2 ) # Calculate the cost
dCost = cost2 - cost1 # get the difference
weightDer = AIlib.gradient( dCost, weights )
biasDer = AIlib.gradient( dCost, bias )
print(weightDer, len(weightDer))

@ -25,8 +25,8 @@ class rgb(object):
# Cost needs to get to 0, we can figure out this with backpropagation # Cost needs to get to 0, we can figure out this with backpropagation
return cost return cost
def learn(): def learn( self, inp:np.array, theta:float ):
print("learn") ai.learn( inp, self.weights, self.bias, theta )
def think(self, inp:np.array): def think(self, inp:np.array):
print("-----Gen " + str(self.generation) + "------") print("-----Gen " + str(self.generation) + "------")
@ -45,19 +45,6 @@ def init(): # init
bot = rgb() bot = rgb()
inpArr = np.array( [0.2, 0.4, 0.8] ) inpArr = np.array( [0.2, 0.4, 0.8] )
res = bot.think( inpArr ) bot.learn( inpArr, 0.1 )
cost = bot.calcError(inpArr, res)
print("Cost: " + str(cost))
inpArr2 = np.array([0.3, 0.5, 0.9])
res2 = bot.think(inpArr2)
cost2 = bot.calcError(inpArr2, res2)
print("Cost: " + str(cost2))
print("\n----")
print("Gradient\n")
gradient = ai.gradient( cost, cost2, inpArr, inpArr2 )
print(gradient)
init() init()

Loading…
Cancel
Save