pull/1/head
E. Almqvist 4 years ago
parent bff87e11a4
commit bce25a5a44
  1. 15
      rgbAI/lib/func.py
  2. 9
      rgbAI/main.py

@ -82,11 +82,14 @@ class AIlib:
else: else:
return grads, res1, cost1 return grads, res1, cost1
def mutateProps( obj, maxLen:int, gradient:list ): def mutateProps( inpObj, maxLen:int, gradient:list ):
obj = copy(inpObj)
for i in range(maxLen): for i in range(maxLen):
obj.weights[i] -= obj.learningrate * gradient[i]["weight"] # mutate the weights obj.weights[i] -= obj.learningrate * gradient[i]["weight"] # mutate the weights
obj.bias[i] -= obj.learningrate * gradient[i]["bias"] obj.bias[i] -= obj.learningrate * gradient[i]["bias"]
return obj
def learn( inputNum:int, targetCost:float, obj, theta:float, curCost: float=None ): def learn( inputNum:int, targetCost:float, obj, theta:float, curCost: float=None ):
# Calculate the derivative for: # Calculate the derivative for:
# Cost in respect to weights # Cost in respect to weights
@ -95,19 +98,15 @@ class AIlib:
# i.e. : W' = W - lr * gradient (respect to W in layer i) = W - lr*[ dC / dW[i] ... ] # i.e. : W' = W - lr * gradient (respect to W in layer i) = W - lr*[ dC / dW[i] ... ]
# So if we change all the weights with i.e. 0.01 = theta, then we can derive the gradient with math and stuff # So if we change all the weights with i.e. 0.01 = theta, then we can derive the gradient with math and stuff
if( not curCost or curCost > targetCost ): # targetCost is the target for the cost function inp = np.asarray(np.random.rand( 1, inputNum ))[0] # create a random learning sample
inp = np.asarray(np.random.rand( 1, inputNum ))[0]
while( not curCost or curCost > targetCost ): # targetCost is the target for the cost function
maxLen = len(obj.bias) maxLen = len(obj.bias)
grads, res, curCost = AIlib.gradient( inp, obj, theta, maxLen - 1 ) grads, res, curCost = AIlib.gradient( inp, obj, theta, maxLen - 1 )
AIlib.mutateProps( obj, maxLen, grads ) # mutate the props for next round obj = AIlib.mutateProps( obj, maxLen, grads ) # mutate the props for next round
print("Cost:", curCost, "|", inp, res) print("Cost:", curCost, "|", inp, res)
return AIlib.learn( inputNum, targetCost, obj, theta, curCost )
else:
print("DONE\n") print("DONE\n")
print(obj.weights) print(obj.weights)
print(obj.bias) print(obj.bias)
return

@ -32,20 +32,23 @@ class rgb(object):
ai.learn( 3, 0.001, self, 0.001 ) ai.learn( 3, 0.001, self, 0.001 )
def think( self, inp:np.array ): def think( self, inp:np.array ):
print("-----Gen " + str(self.generation) + "------")
print("\n-Input-") print("\n-Input-")
print(inp) print(inp)
print("\n") print("\n")
res = ai.think( inp, self.weights, self.bias ) res = ai.think( inp, self )
print("\n-Output-") print("\n-Output-")
print(res) print(res)
print("\n----------------\n\n")
return res return res
def init(): def init():
bot = rgb() bot = rgb()
bot.learn() bot.learn()
inpArr = np.asarray([1.0, 1.0, 1.0])
res = bot.think( inpArr )
err = bot.calcError( inpArr, res )
print(err)
init() init()

Loading…
Cancel
Save