Skip to content

Commit 74a9d6a

Browse files
committed
Migration uses selection by torunament
1 parent 0cb2611 commit 74a9d6a

File tree

7 files changed

+18
-29
lines changed

7 files changed

+18
-29
lines changed

‎mlp-ea-centralized/native/client/client.go‎

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
package main
22

33
import (
4-
"os"
5-
64
"github.com/salvacorts/TFG-Parasitic-Metaheuristics/mlp-ea-centralized/common/ga"
75
"google.golang.org/grpc/grpclog"
86

@@ -18,7 +16,7 @@ func main() {
1816

1917
logrus.SetLevel(logrus.ErrorLevel)
2018
client.Log.SetLevel(logrus.InfoLevel)
21-
grpclog.SetLoggerV2(grpclog.NewLoggerV2(os.Stdout, os.Stdout, os.Stdout))
19+
grpclog.SetLoggerV2(grpclog.NewLoggerV2(nil, nil, ga.Log.Out))
2220

2321
err := client.Start()
2422
if err != nil {

‎mlp-ea-centralized/native/server/server.go‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ func main() {
2222
logrus.SetLevel(logrus.ErrorLevel)
2323
ga.Log.SetOutput(os.Stdout)
2424
ga.Log.SetLevel(logrus.InfoLevel)
25-
grpclog.SetLoggerV2(grpclog.NewLoggerV2(ga.Log.Out, ga.Log.Out, ga.Log.Out))
25+
grpclog.SetLoggerV2(grpclog.NewLoggerV2(nil, nil, ga.Log.Out))
2626

2727
_, score, err := ga.TrainMLP(string(fileContent))
2828
if err != nil {

‎mlp-ea-decentralized/common/ga/poolModel.go‎

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -161,8 +161,8 @@ func (pool *PoolModel) Minimize() {
161161
for i := range offsprings {
162162
for _, op := range pool.ExtraOperators {
163163
if pool.Rnd.Float64() <= op.Probability {
164-
offsprings[i].Evaluated = false
165164
offsprings[i].Genome = op.Operator(offsprings[i].Genome, pool.Rnd)
165+
offsprings[i].Evaluated = false
166166
}
167167
}
168168
}
@@ -215,7 +215,6 @@ func (pool *PoolModel) selection(nOffstrings, nCandidates int) []eaopt.Individua
215215
return offsprings
216216
}
217217

218-
// TODO: Get rid of odd arrays here
219218
func (pool *PoolModel) crossover(in []eaopt.Individual) []eaopt.Individual {
220219
offsprings := make([]eaopt.Individual, len(in))
221220
for i := 0; i < len(in)-1; i += 2 {
@@ -241,8 +240,8 @@ func (pool *PoolModel) crossover(in []eaopt.Individual) []eaopt.Individual {
241240
func (pool *PoolModel) mutate(in []eaopt.Individual) []eaopt.Individual {
242241
for i := range in {
243242
if pool.Rnd.Float64() < pool.MutRate {
244-
in[i].Evaluated = false
245243
in[i].Genome.Mutate(pool.Rnd)
244+
in[i].Evaluated = false
246245
}
247246
}
248247

@@ -513,16 +512,8 @@ func (pool *PoolModel) migrationScheduler() {
513512
var conn *grpc.ClientConn
514513
dialed := false
515514

516-
snap := pool.population.Snapshot()
517-
indivArr := make([]eaopt.Individual, 0, len(snap))
515+
indivArr := pool.selection(pool.NMigrate, 4)
518516
migrate := make([]Individual, pool.NMigrate)
519-
520-
for _, item := range snap {
521-
indivArr = append(indivArr, item.Object.(eaopt.Individual).Clone(pool.Rnd))
522-
}
523-
524-
// Sort population and get NMigrate best
525-
indivArr = pool.SortFunc(indivArr, pool.SortPrecission)
526517
for i := range migrate {
527518
migrate[i] = Individual{
528519
IndividualID: indivArr[i].ID,

‎mlp-ea-decentralized/common/mlp/operators.go‎

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@ func NewRandMLP(rng *rand.Rand) eaopt.Genome {
3131
}
3232

3333
// Evaluate a MLP by getting its accuracy
34-
// TODO: Compare also number of neurons
3534
func (nn *MultiLayerNetwork) Evaluate() (float64, error) {
3635
copy := nn.Clone().(*MultiLayerNetwork)
3736

‎mlp-ea-decentralized/native/server/server.go‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ func main() {
4747
Classes: mapped,
4848
TrainingSet: train,
4949
TrainEpochs: 100,
50-
MutateRate: 0.3,
50+
MutateRate: 1,
5151
FactoryCfg: mlp.MLPFactoryConfig{
5252
InputLayers: len(patterns[0].Features),
5353
OutputLayers: len(mapped),
@@ -80,7 +80,7 @@ func main() {
8080
eaopt.ExtraOperator{Operator: mlp.AddNeuron, Probability: 0.3},
8181
eaopt.ExtraOperator{Operator: mlp.RemoveNeuron, Probability: 0.15},
8282
eaopt.ExtraOperator{Operator: mlp.SubstituteNeuron, Probability: 0.15},
83-
eaopt.ExtraOperator{Operator: mlp.Train, Probability: 0.3},
83+
eaopt.ExtraOperator{Operator: mlp.Train, Probability: 0.5},
8484
}
8585
pool.BestCallback = func(pool *ga.PoolModel) {
8686
logrus.WithFields(logrus.Fields{

‎mlp-ea/common/ga.go‎

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ func TrainMLP(csvdata string) (mn.MultiLayerNetwork, float64, error) {
3434
ga.NPops = 1
3535
ga.PopSize = popSize
3636
ga.Model = getGenerationalModelTournament(4)
37+
//ga.Model = getSteadyStateModel()
3738
ga.Callback = func(ga *eaopt.GA) {
3839
Log.WithFields(logrus.Fields{
3940
"level": "info",
@@ -50,8 +51,8 @@ func TrainMLP(csvdata string) (mn.MultiLayerNetwork, float64, error) {
5051

5152
// Configure MLP
5253
Config = MLPConfig{
53-
Epochs: 10,
54-
Folds: 1,
54+
Epochs: 50,
55+
Folds: 5,
5556
Classes: &mapped,
5657
TrainingSet: &train,
5758
FactoryCfg: MLPFactoryConfig{

‎mlp-ea/common/mlp.go‎

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -38,16 +38,16 @@ func NewRandMLP(rng *rand.Rand) eaopt.Genome {
3838
func (mlp *MLP) Evaluate() (float64, error) {
3939
copy := mlp.Clone().(*MLP)
4040

41-
train, validation := mv.TrainTestPatternSplit(*Config.TrainingSet, 0.8, 1)
41+
scores := mv.MLPKFoldValidation(
42+
(*mn.MultiLayerNetwork)(copy), *Config.TrainingSet, Config.Epochs, Config.Folds, 1, *Config.Classes)
4243

43-
mn.MLPTrain((*mn.MultiLayerNetwork)(copy), train, *Config.Classes,
44-
Config.Epochs, true)
45-
46-
predictions := utils.PredictN((*mn.MultiLayerNetwork)(copy), validation)
47-
predictionsR := utils.RoundPredictions(predictions)
48-
_, acc := utils.AccuracyN(predictionsR, validation)
44+
mean := 0.0
45+
for _, s := range scores {
46+
mean += s
47+
}
48+
mean /= float64(len(scores))
4949

50-
return 100 - acc, nil
50+
return 100 - mean, nil
5151
}
5252

5353
// Mutate modifies the weights of certain neurons, at random, depending on the application rate.

0 commit comments

Comments
 (0)