diff --git a/rgbAI/lib/ailib/ai.py b/rgbAI/lib/ailib/ai.py index d280f89..cc7e138 100644 --- a/rgbAI/lib/ailib/ai.py +++ b/rgbAI/lib/ailib/ai.py @@ -186,7 +186,7 @@ def learn( inputNum:int, targetCost:float, obj, theta:float, curCost: float=None # So if we change all the weights with i.e. 0.01 = theta, then we can derive the gradient with math and stuff count = 0 - while( count <= 1000 ): # targetCost is the target for the cost function + while( count <= 10000 ): # targetCost is the target for the cost function count += 1 inp = np.asarray(np.random.rand( 1, inputNum ))[0] # create a random learning sample # inp = np.asarray([1.0, 1.0, 1.0]) diff --git a/rgbAI/main.py b/rgbAI/main.py index 7965936..7368cb4 100755 --- a/rgbAI/main.py +++ b/rgbAI/main.py @@ -7,11 +7,11 @@ class rgb(object): if( not loadedWeights or not loadedBias ): # if one is null (None) then just generate new ones print("Generating weights and biases...") - self.weights = [ ai.genRandomMatrix(3, 3), ai.genRandomMatrix(3, 3), ai.genRandomMatrix(3, 3) ] # array of matrices of weights + self.weights = [ ai.genRandomMatrix(3, 3), ai.genRandomMatrix(3, 3) ] # array of matrices of weights # 3 input neurons -> 3 hidden neurons -> 3 hidden neurons -> 3 output neurons # Generate the biases - self.bias = [ ai.genRandomMatrix(1, 3), ai.genRandomMatrix(1, 3), ai.genRandomMatrix(1, 3) ] + self.bias = [ ai.genRandomMatrix(1, 3), ai.genRandomMatrix(1, 3) ] # This doesn't look very good, but it works so... print( self.weights )