Optimzed stuff

master
E. Almqvist 4 years ago
parent 4a4b3b26bb
commit c0a536c49a
  1. 4
      rgbAI/lib/ailib/ai.py
  2. 6
      rgbAI/main.py

@ -1,5 +1,6 @@
import numpy as np
from copy import deepcopy as copy
import os
def sigmoid(x):
return 1/(1 + np.exp(-x))
@ -137,7 +138,7 @@ def mutateProps( inpObj, curCost:float, maxLayer:int, gradient:list ):
for layer in range(maxLayer):
lr = getLearningRate( curCost, gradient[layer], maxLayer )
print(lr)
# print(lr)
obj.weights[layer] -= lr["weight"] * gradient[layer]["weight"] # mutate the weights
obj.bias[layer] -= lr["bias"] * gradient[layer]["bias"]
@ -162,6 +163,7 @@ def learn( inputNum:int, targetCost:float, obj, theta:float, curCost: float=None
grads, costW, costB, curCost = gradient( inp, obj, theta, maxLen - 1 )
obj = mutateProps( obj, curCost, maxLen, grads ) # mutate the props for next round
os.system("clear")
print(f"Cost: {curCost}")

@ -7,11 +7,11 @@ class rgb(object):
if( not loadedWeights or not loadedBias ): # if one is null (None) then just generate new ones
print("Generating weights and biases...")
self.weights = [ ai.genRandomMatrix(3, 8), ai.genRandomMatrix(8, 8), ai.genRandomMatrix(8, 3) ] # array of matrices of weights
self.weights = [ ai.genRandomMatrix(3, 8), ai.genRandomMatrix(8, 8), ai.genRandomMatrix(8, 8), ai.genRandomMatrix(8, 3) ] # array of matrices of weights
# 3 input neurons -> 8 hidden neurons -> 8 hidden neurons -> 3 output neurons
# Generate the biases
self.bias = [ ai.genRandomMatrix(1, 8), ai.genRandomMatrix(1, 8), ai.genRandomMatrix(1, 3) ]
self.bias = [ ai.genRandomMatrix(1, 8), ai.genRandomMatrix(1, 8), ai.genRandomMatrix(1, 8), ai.genRandomMatrix(1, 3) ]
# This doesn't look very good, but it works so...
self.learningrate = 0.01 # the learning rate of this ai
@ -29,7 +29,7 @@ class rgb(object):
return cost
def learn( self ):
ai.learn( 3, 0.0001, self, 0.001 )
ai.learn( 3, 0.0001, self, 0.000001 )
def think( self, inp:np.array ):
print("\n-Input-")

Loading…
Cancel
Save