Skip to content

Commit

Permalink
misc: Update SDK version. Update project description. Clean up some c…
Browse files Browse the repository at this point in the history
…ode.
  • Loading branch information
vxern committed Jul 6, 2022
1 parent 14d423d commit 6a2c7f1
Show file tree
Hide file tree
Showing 15 changed files with 88 additions and 70 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# 0.4.4

- Update SDK version from `2.12.0` to `2.17.0`.
- Updated project description to make it more accurate in describing what
`synadart` actually is.

# 0.4.3

- Bumped version of `sprint` from `1.0.2+3` to `1.0.3`.
Expand Down
56 changes: 29 additions & 27 deletions example/example.dart
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,36 @@ import 'package:synadart/src/layers/core/dense.dart';
import 'package:synadart/synadart.dart';

void main() {
final network = Sequential(learningRate: 0.2, layers: [
Dense(
size: 15,
activation: ActivationAlgorithm.sigmoid,
),
Dense(
size: 5,
activation: ActivationAlgorithm.sigmoid,
),
Dense(
size: 1,
activation: ActivationAlgorithm.sigmoid,
)
]);
final network = Sequential(
learningRate: 0.2,
layers: [
Dense(
size: 15,
activation: ActivationAlgorithm.sigmoid,
),
Dense(
size: 5,
activation: ActivationAlgorithm.sigmoid,
),
Dense(
size: 1,
activation: ActivationAlgorithm.sigmoid,
)
],
);

// We are expecting to get the number '5'.
final expected = [
[0.01],
[0.01],
[0.01],
[0.01],
[0.01],
[0.99],
[0.01],
[0.01],
[0.01],
[0.01],
[0.01], // 0
[0.01], // 1
[0.01], // 2
[0.01], // 3
[0.01], // 4
[0.99], // 5
[0.01], // 6
[0.01], // 7
[0.01], // 8
[0.01], // 9
];

// Training data contains different number patterns.
Expand All @@ -38,8 +41,7 @@ void main() {
'111001111100111'.split('').map(double.parse).toList(),
'111001111001111'.split('').map(double.parse).toList(),
'101101111001001'.split('').map(double.parse).toList(),
// This is the number 5
'111100111001111'.split('').map(double.parse).toList(),
'111100111001111'.split('').map(double.parse).toList(), // 5
'111100111101111'.split('').map(double.parse).toList(),
'111001001001001'.split('').map(double.parse).toList(),
'111101111101111'.split('').map(double.parse).toList(),
Expand All @@ -60,7 +62,7 @@ void main() {
final numberFive = trainingData[5];

// Train the network using the training and expected data.
network.train(inputs: trainingData, expected: expected, iterations: 5000);
network.train(inputs: trainingData, expected: expected, iterations: 20000);

print('Confidence in recognising a 5: ${network.process(numberFive)}');
for (final test in testData) {
Expand Down
4 changes: 2 additions & 2 deletions lib/src/activation.dart
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,14 @@ const algorithms = <ActivationAlgorithm, List<ActivationFunctionSignature>>{
ActivationFunction resolveActivationAlgorithm(
ActivationAlgorithm activationAlgorithm,
) =>
(weightedSum) => algorithms[activationAlgorithm]![0](weightedSum());
(weightedSum) => algorithms[activationAlgorithm]!.first(weightedSum());

/// Resolves an `ActivationAlgorithm` to the derivative of the mathematical
/// function in the form of an `ActivationFunction`
ActivationFunction resolveActivationDerivative(
ActivationAlgorithm activationAlgorithm,
) =>
(weightedSum) => algorithms[activationAlgorithm]![1](weightedSum());
(weightedSum) => algorithms[activationAlgorithm]!.last(weightedSum());

/// Shrinks the range of values to inbetween 0 and 1 using exponentials. Results
/// can be driven into saturation, which makes the sigmoid function unsuited for
Expand Down
6 changes: 3 additions & 3 deletions lib/src/layers/core/dense.dart
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import 'package:synadart/src/layers/layer.dart';
class Dense extends Layer {
/// Construct a dense layer using the [activation] algorithm and [size].
Dense({
required int size,
required ActivationAlgorithm activation,
}) : super(size: size, activation: activation);
required super.size,
required super.activation,
});
}
16 changes: 9 additions & 7 deletions lib/src/layers/layer.dart
Original file line number Diff line number Diff line change
Expand Up @@ -60,14 +60,16 @@ class Layer {
}) {
isInput = parentLayerSize == 0;

neurons.addAll(Iterable.generate(
size,
(_) => Neuron(
activationAlgorithm: activation,
parentLayerSize: parentLayerSize,
learningRate: learningRate,
neurons.addAll(
Iterable.generate(
size,
(_) => Neuron(
activationAlgorithm: activation,
parentLayerSize: parentLayerSize,
learningRate: learningRate,
),
),
));
);
}

/// Accept a single input or multiple [inputs] by assigning them sequentially
Expand Down
12 changes: 3 additions & 9 deletions lib/src/layers/recurrent/lstm.dart
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,10 @@ class LSTM extends Layer {
/// [recurrenceActivation] - Algorithm used to activate recurrence
/// connections.
LSTM({
required int size,
required ActivationAlgorithm activation,
required super.size,
required super.activation,
required ActivationAlgorithm recurrenceActivation,
}) : super(
size: size,
activation: activation,
) {
this.recurrenceActivation =
resolveActivationAlgorithm(recurrenceActivation);
}
}) : recurrenceActivation = resolveActivationAlgorithm(recurrenceActivation);

/// Obtain the output by applying the recurrent memory algorithm.
@override
Expand Down
5 changes: 3 additions & 2 deletions lib/src/networks/network.dart
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,9 @@ class Network {
/// Adds a `Layer` to this `Network`.
void addLayer(Layer layer) {
layer.initialise(
parentLayerSize: layers.isEmpty ? 0 : layers[layers.length - 1].size,
learningRate: learningRate);
parentLayerSize: layers.isEmpty ? 0 : layers.last.size,
learningRate: learningRate,
);

layers.add(layer);

Expand Down
7 changes: 3 additions & 4 deletions lib/src/networks/sequential.dart
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import 'package:synadart/src/layers/layer.dart';
import 'package:synadart/src/networks/network.dart';
import 'package:synadart/src/networks/training/backpropagation.dart';

Expand All @@ -7,7 +6,7 @@ import 'package:synadart/src/networks/training/backpropagation.dart';
class Sequential extends Network with Backpropagation {
/// Creates a `Sequential` model network.
Sequential({
required double learningRate,
List<Layer>? layers,
}) : super(learningRate: learningRate, layers: layers);
required super.learningRate,
super.layers,
});
}
10 changes: 8 additions & 2 deletions lib/src/networks/training/backpropagation.dart
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ mixin Backpropagation on Network {

if (inputs.length != expected.length) {
log.severe(
'Inputs and expected result lists must be of the same length.');
'Inputs and expected result lists must be of the same length.',
);
return;
}

Expand All @@ -66,14 +67,19 @@ mixin Backpropagation on Network {

for (var iteration = 0; iteration < iterations; iteration++) {
stopwatch.start();

for (var index = 0; index < inputs.length; index++) {
propagateBackwards(inputs[index], expected[index]);
}

stopwatch.stop();

if (iteration % 500 == 0) {
log.info(
'Iterations: $iteration/$iterations ~ ETA: ${secondsToETA((stopwatch.elapsedMicroseconds * (iterations - iteration)) ~/ 1000000)}');
'Iterations: $iteration/$iterations ~ ETA: ${secondsToETA((stopwatch.elapsedMicroseconds * (iterations - iteration)) ~/ 1000000)}',
);
}

stopwatch.reset();
}
}
Expand Down
4 changes: 2 additions & 2 deletions lib/src/neurons/neuron.dart
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ class Neuron {
}

if (this.inputs.isNotEmpty) {
this.inputs[0] = input!;
this.inputs.first = input!;
} else {
this.inputs.add(input!);
}
Expand All @@ -142,5 +142,5 @@ class Neuron {
/// it will output the weighted sum of the [inputs] and [weights], passed
/// through the activation function.
double get output =>
weights.isEmpty ? inputs[0] : activation(() => dot(inputs, weights));
weights.isEmpty ? inputs.first : activation(() => dot(inputs, weights));
}
6 changes: 6 additions & 0 deletions lib/src/utils/mathematical_operations.dart
Original file line number Diff line number Diff line change
@@ -1,27 +1,33 @@
/// Calculates the dot product of two lists
double dot(List<double> a, List<double> b) {
var result = 0.0;

for (var index = 0; index < a.length; index++) {
result += a[index] * b[index];
}

return result;
}

/// Adds values in list [b] to list [a]
List<double> add(List<double> a, List<double> b) {
final result = <double>[];

for (var index = 0; index < a.length; index++) {
result.add(a[index] + b[index]);
}

return result;
}

/// Subtracts values in list [b] from list [a]
List<double> subtract(List<double> a, List<double> b) {
final result = <double>[];

for (var index = 0; index < a.length; index++) {
result.add(a[index] - b[index]);
}

return result;
}

Expand Down
7 changes: 5 additions & 2 deletions lib/src/utils/value_generator.dart
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ Iterable<double> doubleIterableSync({double from = 0, double to = 0}) sync* {
}

/// Generates a list of size [size], filled with random `double` values.
List<double> generateListWithRandomDoubles(
{required int size, double from = 0, double to = 0}) =>
List<double> generateListWithRandomDoubles({
required int size,
double from = 0,
double to = 0,
}) =>
doubleIterableSync(from: from, to: to).take(size).toList();
2 changes: 2 additions & 0 deletions lib/synadart.dart
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
/// A limited but fully documented neural network library created for
/// educational purposes.
library synadart;

export 'src/activation.dart';
Expand Down
12 changes: 6 additions & 6 deletions pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
name: synadart
version: 0.4.3
version: 0.4.4

description: >-
A simple-to-grasp, complete and fully documented Neural Network library,
written from scratch in Dart.
A limited but fully documented neural network library created for educational
purposes.
homepage: https://github.com/wordcollector/synadart
repository: https://github.com/wordcollector/synadart
issue_tracker: https://github.com/wordcollector/synadart/issues

environment:
sdk: '>=2.12.0 <3.0.0'
sdk: '>=2.17.0 <3.0.0'

dependencies:
# Logging
sprint: ^1.0.3
sprint: ^1.0.4

dev_dependencies:
words: ^0.0.2+1
words: ^0.1.1
5 changes: 1 addition & 4 deletions test/synadart_test.dart
Original file line number Diff line number Diff line change
@@ -1,4 +1 @@
import 'package:synadart/synadart.dart';

void main() {
}
void main() {}

0 comments on commit 6a2c7f1

Please sign in to comment.