Huge tweaks to the network

master
E. Almqvist 4 years ago
parent eab52714a4
commit 56c765d8bb
  1. 2
      rgbAI/lib/ailib/ai.py
  2. 4
      rgbAI/main.py

@ -186,7 +186,7 @@ def learn( inputNum:int, targetCost:float, obj, theta:float, curCost: float=None
# So if we change all the weights with i.e. 0.01 = theta, then we can derive the gradient with math and stuff
count = 0
while( count <= 1000 ): # targetCost is the target for the cost function
while( count <= 10000 ): # targetCost is the target for the cost function
count += 1
inp = np.asarray(np.random.rand( 1, inputNum ))[0] # create a random learning sample
# inp = np.asarray([1.0, 1.0, 1.0])

@ -7,11 +7,11 @@ class rgb(object):
if( not loadedWeights or not loadedBias ): # if one is null (None) then just generate new ones
print("Generating weights and biases...")
self.weights = [ ai.genRandomMatrix(3, 3), ai.genRandomMatrix(3, 3), ai.genRandomMatrix(3, 3) ] # array of matrices of weights
self.weights = [ ai.genRandomMatrix(3, 3), ai.genRandomMatrix(3, 3) ] # array of matrices of weights
# 3 input neurons -> 3 hidden neurons -> 3 hidden neurons -> 3 output neurons
# Generate the biases
self.bias = [ ai.genRandomMatrix(1, 3), ai.genRandomMatrix(1, 3), ai.genRandomMatrix(1, 3) ]
self.bias = [ ai.genRandomMatrix(1, 3), ai.genRandomMatrix(1, 3) ]
# This doesn't look very good, but it works so...
print( self.weights )

Loading…
Cancel
Save