Gradient stuff

pull/1/head
E. Almqvist 4 years ago
parent 2cf3a89e70
commit 93694b9da9
  1. 9
      rgbAI/lib/func.py
  2. 11
      rgbAI/main.py

@ -24,11 +24,8 @@ class AIlib:
return mat
def think( inp:np.array, weights:list, bias:list, layerIndex: int=0 ): # recursive thinking, hehe
# the length of weights and bias should be the same
# if not then the neural net is flawed/incorrect
maxLayer = len(weights) - 1
try:
maxLayer = len(weights) - 1
weightedInput = np.dot( inp, weights[layerIndex] ) # dot multiply the input and the weights
layer = AIlib.sigmoid( np.add(weightedInput, bias[layerIndex]) ) # add the biases
@ -49,3 +46,7 @@ class AIlib:
print( "Error: " + str(err) )
print( "Layer index: " + str(layerIndex) )
print( "Max layer index: " + str(maxLayer) )
def gradient( cost1:float, cost2:float, inp1:np.array, inp2:np.array ):
return (cost2 - cost1) / (inp2 - inp1)

@ -48,7 +48,16 @@ def init(): # init
res = bot.think( inpArr )
cost = bot.calcError(inpArr, res)
print("Cost: " + str(cost))
inpArr2 = np.array([0.3, 0.5, 0.9])
res2 = bot.think(inpArr2)
cost2 = bot.calcError(inpArr2, res2)
print("Cost: " + str(cost2))
print("\n----")
print("Gradient\n")
gradient = ai.gradient( cost, cost2, inpArr, inpArr2 )
print(gradient)
init()

Loading…
Cancel
Save