Added ai that works i think

pull/1/head
E. Almqvist 4 years ago
parent 750760e1f6
commit bc2e219108
  1. 100000
      rgbAI/cost.txt
  2. 24
      rgbAI/lib/func.py
  3. 19
      rgbAI/main.py

File diff suppressed because it is too large Load Diff

@ -6,7 +6,7 @@ class AIlib:
return 1/(1 + np.exp(-x)) return 1/(1 + np.exp(-x))
def correctFunc(inp:np.array): # generates the correct answer for the AI def correctFunc(inp:np.array): # generates the correct answer for the AI
return np.array( [inp[2], inp[1], inp[0]] ) # basically invert the rgb values return np.asarray( [1.0 - inp[0], 1.0 - inp[1], 1.0 - inp[2]] ) # basically invert the rgb values
def calcCost( predicted:np.array, correct:np.array ): # cost function, lower -> good, higher -> bad, bad bot, bad def calcCost( predicted:np.array, correct:np.array ): # cost function, lower -> good, higher -> bad, bad bot, bad
costSum = 0 costSum = 0
@ -87,15 +87,27 @@ class AIlib:
obj.weights[i] -= obj.learningrate * gradient[i]["weight"] # mutate the weights obj.weights[i] -= obj.learningrate * gradient[i]["weight"] # mutate the weights
obj.bias[i] -= obj.learningrate * gradient[i]["bias"] obj.bias[i] -= obj.learningrate * gradient[i]["bias"]
def learn( inp:np.array, obj, theta:float ): def learn( inputNum:int, targetCost:float, obj, curCost: float=None ):
# Calculate the derivative for: # Calculate the derivative for:
# Cost in respect to weights # Cost in respect to weights
# Cost in respect to biases # Cost in respect to biases
# i.e. : W' = W - lr * gradient (respect to W in layer i) = W - lr*[ dC / dW[i] ... ] # i.e. : W' = W - lr * gradient (respect to W in layer i) = W - lr*[ dC / dW[i] ... ]
# So if we change all the weights with i.e. 0.01 = theta, then we can derive the gradient with math and stuff # So if we change all the weights with i.e. 0.01 = theta, then we can derive the gradient with math and stuff
maxLen = len(obj.bias)
grads, res, cost = AIlib.gradient( inp, obj, theta, maxLen - 1 )
AIlib.mutateProps( obj, maxLen, grads ) # mutate the props for next round
print("Cost:", cost, "|", inp, res) if( not curCost or curCost > targetCost ): # targetCost is the target for the cost function
inp = np.asarray(np.random.rand( 1, inputNum ))[0]
maxLen = len(obj.bias)
grads, res, curCost = AIlib.gradient( inp, obj, theta, maxLen - 1 )
AIlib.mutateProps( obj, maxLen, grads ) # mutate the props for next round
print("Cost:", curCost, "|", inp, res)
return AIlib.learn( inputNum, targetCost, obj, theta, curCost )
else:
print("DONE\n")
print(obj.weights)
print(obj.bias)
return

@ -14,9 +14,7 @@ class rgb(object):
self.bias = [ ai.genRandomMatrix(1, 4), ai.genRandomMatrix(1, 4), ai.genRandomMatrix(1, 3) ] self.bias = [ ai.genRandomMatrix(1, 4), ai.genRandomMatrix(1, 4), ai.genRandomMatrix(1, 3) ]
# This doesn't look very good, but it works so... # This doesn't look very good, but it works so...
self.generation = 0 self.learningrate = 0.01 # the learning rate of this ai
self.learningrate = 0.1 # the learning rate of this ai
print( self.weights ) print( self.weights )
print( self.bias ) print( self.bias )
@ -30,8 +28,8 @@ class rgb(object):
# Cost needs to get to 0, we can figure out this with backpropagation # Cost needs to get to 0, we can figure out this with backpropagation
return cost return cost
def learn( self, inp:np.array, theta:float ): def learn( self ):
ai.learn( inp, self, theta ) ai.learn( 3, 0.001, self, 0.0001 )
def think( self, inp:np.array ): def think( self, inp:np.array ):
print("-----Gen " + str(self.generation) + "------") print("-----Gen " + str(self.generation) + "------")
@ -46,17 +44,8 @@ class rgb(object):
print("\n----------------\n\n") print("\n----------------\n\n")
return res return res
def train( self ):
for i in range(self.traintimes):
inpArr = np.asarray(np.random.rand( 1, 3 ))[0]
self.generation = i
self.learn( inpArr, 0.1 )
def init(): def init():
bot = rgb() bot = rgb()
bot.learn()
bot.traintimes = 10000
bot.train()
init() init()

Loading…
Cancel
Save