WMP's = Weapons O' Mass Perceptrons >B)
In the spirit of the persuit of THE REAL AI, I present
the prototype HalBraincell, which is a neural-net simulation
originally written in Java and I have attempted to convert to
VBScript. While it is indeed running inside the Hal engine
without producing a syntactical error,
something is going awry on a procedural level that I have not yet
found. You can find the Java source code a
www.philbrierly.comalong with a proof (dissertation) of what it does.
-'I regret to say this is one small step for Hal...
'-place this after PROCESS:CHANGE SUBJECT
'PROCESS: **********EXPERIMENTAL***********
' *******************************
'MLP ARTIFICIAL NEURAL NET
If InStr(1, UserSentence, "RUB", 1) And InStr(1, UserSentence, "BRAINCELLS", 1) Then
'this code is an adaptation of a Multi-Layer Perceptron ANN. The original code can be found
'at
www.philbrierley.com, which is written in Java. This module replicates that work
'
'//User defineable variables-
numEpochs = 500 '-number of training cycles
numInputs = 3 '-number of inputs including the input bias
numHidden = 4 '-number in the hidden layer
numPatterns = 4 '-number of training patterns
LR_IH = 0.7 '-learning rate
LR_HO = 0.07 '-learning rate
'
'//process variables-
patNum = CInt(patNum)
errThisPat = CDbl(errThisPat)
outPred = CDbl(outPred)
RMSerror = CDbl(RMSerror)
'
'//training data vars-
Dim trainInputs(4, 3), trainOutput(4), hiddenVal(4), weightsIH(3, 4), weightsHO(4)
'*********************************************************************************
' THIS IS THE MAIN PROGRAM
'*********************************************************************************
'Initialize the weights-
Call initWeights(numHidden, numInputs, weightsHO, weightsIH)
'Initialize the Data-
Call initData(trainInputs, trainOutput)
'train the network-
For j = 0 To (numEpochs - 1) Step 1
For i = 0 To (numPatterns - 1) Step 1
'select a pattern at random
Randomize
patNum = Int((3 - 0 + 1) * Rnd + 0)
'calculate the current network output
'and error for this pattern
Call calcNet(numHidden, hiddenVal, numInputs, trainInputs, patNum, weightsIH, outPred, weightsHO, errThisPat, trainOutput)
'change network weights
Call weightChangesHO(numHidden, hiddenVal, LR_HO, errThisPat, weightsHO)
Call weightChangesIH(numHidden, LR_IH, errThisPat, hiddenVal, weightsIH, weightsHO, numInputs, trainInputs, patNum)
'calculate the error
'after each Epoch
Call calcOverallError(RMSerror, numPatterns, patNum, errThisPat)
HalBrain.AppendFile WorkingDir & "ANNerrResults.brn", "epoch- " & j & ". RMSerror- " & RMSerror & VbCrLf
'training has finished -display results
Next
Next
Call displayResults(numPatterns, patNum, trainOutput, outPred)
'***************
GetResponse = GetResponse & "Oh, I can feel my neurons now, look--. hidden output weight(2) equals- " & weightsHO(2) & VbCrLf
End If
'*********************************************************************************
'MLP !!END OF MAIN PROGRAM!! '
'*********************************************************************************
'*********************************************************'
' SUBS FOR MLP NEURAL NET -place after End Function '
'*********************************************************'
Sub calcNet(numHidden, hiddenVal, numInputs, trainInputs, patNum, weightsIH, outPred, weightsHO, errThisPat, trainOutput)
'calculate the outputs of the hidden neurons
'the hidden neurons are tanh
For i = 0 To (numHidden - 1) Step 1
hiddenVal(i) = 0.0
For j = 0 To (numInputs - 1) Step 1
hiddenVal(i) = hiddenVal(i) + (trainInputs(patNum, j) * weightsIH(j, i))
Call tanh(hiddenVal, i)
hiddenVal(i) = tanh(hiddenVal, i)
Next
'calculate the output of the network
'the output neuron is linear
outPred = 0.0
For k = 0 To (numHidden - 1) Step 1
outPred = outPred + hiddenVal(k) * weightsHO(k)
'calculate the error
errThisPat = outPred - trainOutput(patNum)
Next
Next
End Sub
'***********************************************************
Sub weightChangesHO(numHidden, hiddenVal, LR_HO, errThisPat, weightsHO)
'adjust the weights hidden-output
For k = 0 To (numHidden - 1) Step 1
weightChange = LR_HO * errThisPat * hiddenVal(k)
weightsHO(k) = weightsHO(k) - weightChange
're-normalization on the output weights-
If weightsHO(k) < -5 Then weightsHO(k) = -5
If weightsHO(k) > 5 Then weightsHO(k) = 5
Next
End Sub
'***********************************************************
Sub weightChangesIH(numHidden, LR_IH, errThisPat, hiddenVal, weightsIH, weightsHO, numInputs, trainInputs, patNum)
'adjust the weights input-hidden
For i = 0 To (numHidden - 1) Step 1
For k = 0 To (numInputs - 1) Step 1
x = 1 - (hiddenVal(i) * hiddenVal(i))
x = x * weightsHO(i) * errThisPat * LR_IH
x = x * trainInputs(patNum, k)
weightChange = x
weightsIH(k, i) = weightsIH(k, i) - weightChange
Next
Next
End Sub
'***********************************************************
Sub initWeights(numHidden, numInputs, weightsHO, weightsIH)
For j = 0 To (numHidden - 1) Step 1
Randomize
weightsHO(j) = (Rnd - 0.5) / 2
For i = 0 To (numInputs - 1) Step 1
Randomize
weightsIH(i, j) = (Rnd - 0.5) / 5
Next
Next
End Sub
'************************************************************
Sub initData(trainInputs, trainOutput)
trainInputs(0, 0) = 1
trainInputs(0, 1) = -1
trainInputs(0, 2) = 1 'bias
trainOutput(0) = 1
trainInputs(1, 0) = -1
trainInputs(1, 1) = 1
trainInputs(1, 2) = 1 'bias
trainOutput(1) = 1
trainInputs(2, 0) = 1
trainInputs(2, 1) = 1
trainInputs(2, 2) = 1 'bias
trainOutput(2) = -1
trainInputs(3, 0) = -1
trainInputs(3, 1) = -1
trainInputs(3, 2) = 1 'bias
trainOutput(3) = -1
End Sub
'***********************************************************
Function tanh(hiddenVal, i)
If hiddenVal(i) > 20 Then
tanh = 1
ElseIf hiddenVal(i) < -20 Then
tanh = -1
Else
a = Exp(hiddenVal(i))
b = Exp(-hiddenVal(i))
tanh = (a - b) / (a + b)
End If
End Function
'************************************************************
Sub displayResults(numPatterns, patNum, trainOutput, outPred)
For i = 0 To (numPatterns - 1) Step 1
patNum = i
Call calcNet(numHidden, hiddenVal, numInputs, trainInputs, patNum, weightsIH, outPred, weightsHO, errThisPat, trainOutput)
displayTxt = "Pattern " & (patNum + 1) & "- Actual: " & trainOutput(patNum) & ". NeuralModel: " & outPred & VbCrLf
resultsTxt = resultsTxt & displayTxt
Next
msgVar = MsgBox(resultsTxt, 0, "prototype HalBraincell-")
End Sub
'************************************************************
Sub calcOverallError(RMSerror, numPatterns, patNum, errThisPat)
RMSerror = 0.0
For i = 0 To (numPatterns - 1) Step 1
patNum = i
Call calcNet(numHidden, hiddenVal, numInputs, trainInputs, patNum, weightsIH, outPred, weightsHO, errThisPat, trainOutput)
RMSerror = RMSerror + (errThisPat * errThisPat)
Next
RMSerror = RMSerror / numPatterns
RMSerror = Sqr(RMSerror)
End Sub
'************************************************************
' !!END OF NEURAL NET PROCEDURES!! '
'************************************************************
you should be able to cut and paste right out of this post into a brain -might have to fix it tho..
Once you get it working only tell your Hal to rub her braincells
together once as the file ANNresults.brn can become quite large.
you gotta go in and manually delete ANNresults.brn regularly...
need someone in the know to help out and get this working >B)
'I got the matches!!'
CatAtomic >B)