Skip to content

Commit dcda413

Browse files
author
Guled
committed
Minor adjustments to Flappy Bird Example project, and more documentation.
1 parent 9251ac7 commit dcda413

File tree

8 files changed

+22
-7
lines changed

8 files changed

+22
-7
lines changed

Example/MLKit/GameScene.swift

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -327,11 +327,12 @@ class GameScene: SKScene, SKPhysicsContactDelegate {
327327
print("PARENT 2 FITNESS: \(parents.1.fitness)")
328328

329329
// Produce new flappy birds
330-
var offspring = BiologicalProcessManager.onePointCrossover(crossOverRate: 0.5, parentOneGenotype: parents.0.genotypeRepresentation, parentTwoGenotype: parents.1.genotypeRepresentation)
330+
var offspring = BiologicalProcessManager.onePointCrossover(crossOverRate: 0.7, parentOneGenotype: parents.0.genotypeRepresentation, parentTwoGenotype: parents.1.genotypeRepresentation)
331331

332332
// Mutate their genes
333-
BiologicalProcessManager.inverseMutation(mutationRate: 0.5, genotype: &offspring.0)
334-
BiologicalProcessManager.inverseMutation(mutationRate: 0.5, genotype: &offspring.1)
333+
BiologicalProcessManager.swapMutation(mutationRate: 0.5, genotype: &offspring.0)
334+
BiologicalProcessManager.swapMutation(mutationRate: 0.5, genotype: &offspring.1)
335+
335336

336337
// Create a separate neural network for the birds based on their genes
337338
let brainofOffspring1 = GeneticOperations.decode(genotype: offspring.0)
@@ -437,7 +438,7 @@ class GameScene: SKScene, SKPhysicsContactDelegate {
437438
// Decision AI makes
438439
let decision = (currentBird?.brain?.forward(input: [Float(1), Float(normalizedDistanceOfNextPipe), Float(normalizedPosToGap), Float(birdYPos)]))!
439440

440-
print("DEC: \(decision)")
441+
print("FLAPPY BIRD DECISION: \(decision)")
441442

442443
// 0.95 was arbitrary, tweaking is recommended
443444
if decision[0] >= Float(0.95) {

MLKit/Classes/ANN/LayerProtocol.swift .swift

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@ public protocol Layer {
2323

2424
}
2525

26+
27+
/// Protcol for Layer Printing and Debugging Methods
2628
public protocol InputandOutputLayerMethods {
2729

2830
/**

MLKit/Classes/ANN/Learning/ActivationFunctionEnum.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import Foundation
1010

1111

12+
/// The ActivationFunctionType enum represents the type of activation function your NueralNet object will use.
1213
public enum ActivationFunctionType {
1314
case STEP // Step Function
1415
case LINEAR // Linear Function

MLKit/Classes/ANN/Learning/NNOperations.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
import Foundation
1010

11+
/// The NNOperations (Nueral Network Operations) class has the objective of computing activation function values and the derivative of activation functions as well.
1112
final class NNOperations {
1213

1314
/**

MLKit/Classes/ANN/Learning/Training.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import Foundation
1010
import Upsurge
1111

1212

13+
/// The Training Protocol defines the methods used for training a NeuralNet Object. Note that the `train` method used in this protocol's extension is used only for Neural Network architectures such as Adaline and Perceptron. There is no backpropagation method within the Training method. The Backpropagation class utilizes the Training protocol in order to implement methods that pertain to printing/debugging values. The Backpropagation algorithm has it's own 'train' method. The way the Adaline and Perceptron architecture's perform weight updates and training are completely different from the techniques found in Backpropagation which is why I have separated them.
1314
public protocol Training {
1415

1516
}

MLKit/Classes/ANN/Learning/TrainingTypes.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
import Foundation
1010

11-
11+
/// The TrainingType enum represents the type of neural network architecture you are using.
1212
public enum TrainingType {
1313
/// Perceptron Architecture
1414
case PERCEPTRON

MLKit/Classes/ANN/NeuralNet.swift

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -355,6 +355,15 @@ open class NeuralNet {
355355
return newNeuralNetwork
356356
}
357357

358+
359+
360+
/**
361+
The forward method allows a NeuralNet object to pass in inputs (corresponding to the number of input layers in your NueralNet Object) and recieve a list of output values (depends on the number of output layer neurons available).
362+
363+
- parameter input: An array of Float values. NOTE: Don't forget to make the first input value a '1' (this is your bias value).
364+
365+
- returns: A list of Float values corresponding to the output of your NeuralNet object.
366+
*/
358367
public func forward(input: [Float]) -> [Float] {
359368

360369
return forwardProcess(network: self, input:input)
@@ -454,7 +463,7 @@ open class NeuralNet {
454463

455464

456465
/**
457-
The trainNet method trains the Neural Network with the methods available (PERCEPTRON, ADALINE, and BACKPROPAGATION).
466+
The trainNet method trains the Neural Network with the methods available (PERCEPTRON, ADALINE, and BACKPROPAGATION). It is advised that you use this method for supervised learning.
458467

459468
- parameter network: A Neural Net Object.
460469

MLKit/Classes/Genetic Algorithms/Genome.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
import Foundation
1010

11-
/// Blueprint for a Genome. It is encouraged that you create your own `generateFitness` method as there are several ways to assess fitness. You are required, on the other hand, to have a genotype representation and a fitness for every Genome.
11+
/// Protocol for a Genome. It is encouraged that you create your own `generateFitness` method as there are several ways to assess fitness. You are required, on the other hand, to have a genotype representation and a fitness for every Genome.
1212
public protocol Genome {
1313

1414
/// Genotype representation of the genome.

0 commit comments

Comments
 (0)