Skip to content

Commit f6aa1a5

Browse files
authored
Remove phpunit readAttributes deprecated methods (#372)
1 parent db82afa commit f6aa1a5

File tree

6 files changed

+35
-22
lines changed

6 files changed

+35
-22
lines changed

ecs.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@ services:
1818
PhpCsFixer\Fixer\Operator\BinaryOperatorSpacesFixer:
1919
align_double_arrow: false
2020
align_equals: false
21-
21+
PhpCsFixer\Fixer\PhpUnit\PhpUnitTestCaseStaticMethodCallsFixer:
22+
call_type: 'self'
2223
# phpdoc
2324
PhpCsFixer\Fixer\Phpdoc\PhpdocSeparationFixer: ~
2425
PhpCsFixer\Fixer\Phpdoc\PhpdocAlignFixer: ~

src/Helper/Optimizer/Optimizer.php

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,11 @@ public function setTheta(array $theta): self
4848
return $this;
4949
}
5050

51+
public function theta(): array
52+
{
53+
return $this->theta;
54+
}
55+
5156
/**
5257
* Executes the optimization with the given samples & targets
5358
* and returns the weights

src/NeuralNetwork/Network/MultilayerPerceptron.php

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,16 @@ public function getOutput(): array
129129
return $result;
130130
}
131131

132+
public function getLearningRate(): float
133+
{
134+
return $this->learningRate;
135+
}
136+
137+
public function getBackpropagation(): Backpropagation
138+
{
139+
return $this->backpropagation;
140+
}
141+
132142
/**
133143
* @param mixed $target
134144
*/

src/NeuralNetwork/Training/Backpropagation.php

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,11 @@ public function setLearningRate(float $learningRate): void
3434
$this->learningRate = $learningRate;
3535
}
3636

37+
public function getLearningRate(): float
38+
{
39+
return $this->learningRate;
40+
}
41+
3742
/**
3843
* @param mixed $targetClass
3944
*/

tests/Helper/Optimizer/OptimizerTest.php

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,7 @@ public function testSetTheta(): void
2626
$optimizer = $this->getMockForAbstractClass(Optimizer::class, [2]);
2727
$object = $optimizer->setTheta([0.3, 1]);
2828

29-
$theta = self::getObjectAttribute($optimizer, 'theta');
30-
3129
self::assertSame($object, $optimizer);
32-
self::assertSame([0.3, 1], $theta);
30+
self::assertSame([0.3, 1], $object->theta());
3331
}
3432
}

tests/NeuralNetwork/Network/MultilayerPerceptronTest.php

Lines changed: 12 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -55,14 +55,12 @@ public function testLearningRateSetter(): void
5555
[5, [3], [0, 1], 1000, null, 0.42]
5656
);
5757

58-
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
59-
$backprop = self::readAttribute($mlp, 'backpropagation');
60-
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
58+
self::assertEquals(0.42, $mlp->getLearningRate());
59+
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
6160

6261
$mlp->setLearningRate(0.24);
63-
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
64-
$backprop = self::readAttribute($mlp, 'backpropagation');
65-
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
62+
self::assertEquals(0.24, $mlp->getLearningRate());
63+
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
6664
}
6765

6866
public function testLearningRateSetterWithCustomActivationFunctions(): void
@@ -75,14 +73,12 @@ public function testLearningRateSetterWithCustomActivationFunctions(): void
7573
[5, [[3, $activation_function], [5, $activation_function]], [0, 1], 1000, null, 0.42]
7674
);
7775

78-
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
79-
$backprop = self::readAttribute($mlp, 'backpropagation');
80-
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
76+
self::assertEquals(0.42, $mlp->getLearningRate());
77+
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
8178

8279
$mlp->setLearningRate(0.24);
83-
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
84-
$backprop = self::readAttribute($mlp, 'backpropagation');
85-
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
80+
self::assertEquals(0.24, $mlp->getLearningRate());
81+
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
8682
}
8783

8884
public function testLearningRateSetterWithLayerObject(): void
@@ -95,14 +91,12 @@ public function testLearningRateSetterWithLayerObject(): void
9591
[5, [new Layer(3, Neuron::class, $activation_function), new Layer(5, Neuron::class, $activation_function)], [0, 1], 1000, null, 0.42]
9692
);
9793

98-
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
99-
$backprop = self::readAttribute($mlp, 'backpropagation');
100-
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
94+
self::assertEquals(0.42, $mlp->getLearningRate());
95+
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
10196

10297
$mlp->setLearningRate(0.24);
103-
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
104-
$backprop = self::readAttribute($mlp, 'backpropagation');
105-
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
98+
self::assertEquals(0.24, $mlp->getLearningRate());
99+
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
106100
}
107101

108102
/**

0 commit comments

Comments
 (0)