From a543a9db1afe5aa37a078f6bb3930d4b3c5739d8 Mon Sep 17 00:00:00 2001 From: "E. Almqvist" Date: Thu, 8 Oct 2020 11:47:03 +0000 Subject: [PATCH] Made it so that the AI trains on the same input --- rgbAI/lib/func.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rgbAI/lib/func.py b/rgbAI/lib/func.py index abcd950..898558c 100644 --- a/rgbAI/lib/func.py +++ b/rgbAI/lib/func.py @@ -98,8 +98,9 @@ class AIlib: # i.e. : W' = W - lr * gradient (respect to W in layer i) = W - lr*[ dC / dW[i] ... ] # So if we change all the weights with i.e. 0.01 = theta, then we can derive the gradient with math and stuff + inp = np.asarray(np.random.rand( 1, inputNum ))[0] # create a random learning sample + while( not curCost or curCost > targetCost ): # targetCost is the target for the cost function - inp = np.asarray(np.random.rand( 1, inputNum ))[0] # create a random learning sample maxLen = len(obj.bias) grads, res, curCost = AIlib.gradient( inp, obj, theta, maxLen - 1 ) obj = AIlib.mutateProps( obj, maxLen, grads ) # mutate the props for next round