To cite metANN in publications, please use: References for implemented optimization algorithms and neural network components: References for metaheuristic-based neural network training:
Dilber B, Ozdemir A (2026). metANN: Metaheuristic and Gradient-Based Optimization for Neural Network Training and Continuous Problems. R package version 0.1.0, https://github.com/burakdilber/metANN.
Kennedy J, Eberhart R (1995). “Particle Swarm Optimization.” In Proceedings of ICNN'95 - International Conference on Neural Networks, volume 4, 1942–1948. doi:10.1109/ICNN.1995.488968.
Storn R, Price K (1997). “Differential Evolution – A Simple and Efficient Heuristic for Global Optimization over Continuous Spaces.” Journal of Global Optimization, 11, 341–359. doi:10.1023/A:1008202821328.
Goldberg D (1989). Genetic Algorithms in Search, Optimization, and Machine Learning. Addison-Wesley, Reading, MA.
Karaboga D, Basturk B (2007). “A Powerful and Efficient Algorithm for Numerical Function Optimization: Artificial Bee Colony (ABC) Algorithm.” Journal of Global Optimization, 39, 459–471. doi:10.1007/s10898-007-9149-x.
Mirjalili S, Mirjalili S, Lewis A (2014). “Grey Wolf Optimizer.” Advances in Engineering Software, 69, 46–61. doi:10.1016/j.advengsoft.2013.12.007.
Mirjalili S, Lewis A (2016). “The Whale Optimization Algorithm.” Advances in Engineering Software, 95, 51–67. doi:10.1016/j.advengsoft.2016.01.008.
Rao R, Savsani V, Vakharia D (2011). “Teaching-Learning-Based Optimization: A Novel Method for Constrained Mechanical Design Optimization Problems.” Computer-Aided Design, 43, 303–315. doi:10.1016/j.cad.2010.12.015.
Fu Y, Liu D, Chen J, He L (2024). “Secretary Bird Optimization Algorithm: A New Metaheuristic for Solving Global Optimization Problems.” Artificial Intelligence Review, 57, 123. doi:10.1007/s10462-024-10729-y.
Kingma D, Ba J (2015). “Adam: A Method for Stochastic Optimization.” In International Conference on Learning Representations.
Nair V, Hinton G (2010). “Rectified Linear Units Improve Restricted Boltzmann Machines.” In Proceedings of the 27th International Conference on Machine Learning, 807–814.
Bridle J (1990). “Probabilistic Interpretation of Feedforward Classification Network Outputs, with Relationships to Statistical Pattern Recognition.” In Neurocomputing: Algorithms, Architectures and Applications, 227–236. Springer.
Montana D, Davis L (1989). “Training Feedforward Neural Networks Using Genetic Algorithms.” In Proceedings of the 11th International Joint Conference on Artificial Intelligence, 762–767.
Ilonen J, Kamarainen J, Lampinen J (2003). “Differential Evolution Training Algorithm for Feed-Forward Neural Networks.” Neural Processing Letters, 17, 93–105. doi:10.1023/A:1022995128597.
Karaboga D, Ozturk C (2009). “Neural Networks Training by Artificial Bee Colony Algorithm on Pattern Classification.” Neural Network World, 19(3), 279–292.
Mirjalili S (2015). “How Effective is the Grey Wolf Optimizer in Training Multi-Layer Perceptrons.” Applied Intelligence, 43, 150–161. doi:10.1007/s10489-014-0645-7.
Dilber B, Ozdemir A (2026). “A novel approach to training feed-forward multi-layer perceptrons with recently proposed secretary bird optimization algorithm.” Neural Computing and Applications, 38(5). doi:10.1007/s00521-026-11874-x.
Corresponding BibTeX entries:
@Manual{,
title = {metANN: Metaheuristic and Gradient-Based Optimization for
Neural Network Training and Continuous Problems},
author = {Burak Dilber and A. Firat Ozdemir},
year = {2026},
note = {R package version 0.1.0},
url = {https://github.com/burakdilber/metANN},
}
@InProceedings{,
title = {Particle Swarm Optimization},
author = {James Kennedy and Russell Eberhart},
booktitle = {Proceedings of ICNN'95 - International Conference on
Neural Networks},
year = {1995},
volume = {4},
pages = {1942--1948},
doi = {10.1109/ICNN.1995.488968},
}
@Article{,
title = {Differential Evolution -- A Simple and Efficient Heuristic
for Global Optimization over Continuous Spaces},
author = {Rainer Storn and Kenneth Price},
journal = {Journal of Global Optimization},
year = {1997},
volume = {11},
pages = {341--359},
doi = {10.1023/A:1008202821328},
}
@Book{,
title = {Genetic Algorithms in Search, Optimization, and Machine
Learning},
author = {David E. Goldberg},
publisher = {Addison-Wesley},
address = {Reading, MA},
year = {1989},
}
@Article{,
title = {A Powerful and Efficient Algorithm for Numerical Function
Optimization: Artificial Bee Colony (ABC) Algorithm},
author = {Dervis Karaboga and Bahriye Basturk},
journal = {Journal of Global Optimization},
year = {2007},
volume = {39},
pages = {459--471},
doi = {10.1007/s10898-007-9149-x},
}
@Article{,
title = {Grey Wolf Optimizer},
author = {Seyedali Mirjalili and Seyed Mohammad Mirjalili and
Andrew Lewis},
journal = {Advances in Engineering Software},
year = {2014},
volume = {69},
pages = {46--61},
doi = {10.1016/j.advengsoft.2013.12.007},
}
@Article{,
title = {The Whale Optimization Algorithm},
author = {Seyedali Mirjalili and Andrew Lewis},
journal = {Advances in Engineering Software},
year = {2016},
volume = {95},
pages = {51--67},
doi = {10.1016/j.advengsoft.2016.01.008},
}
@Article{,
title = {Teaching-Learning-Based Optimization: A Novel Method for
Constrained Mechanical Design Optimization Problems},
author = {R. V. Rao and V. J. Savsani and D. P. Vakharia},
journal = {Computer-Aided Design},
year = {2011},
volume = {43},
pages = {303--315},
doi = {10.1016/j.cad.2010.12.015},
}
@Article{,
title = {Secretary Bird Optimization Algorithm: A New Metaheuristic
for Solving Global Optimization Problems},
author = {Youfa Fu and Dan Liu and Jiadui Chen and Ling He},
journal = {Artificial Intelligence Review},
year = {2024},
volume = {57},
pages = {123},
doi = {10.1007/s10462-024-10729-y},
}
@InProceedings{,
title = {Adam: A Method for Stochastic Optimization},
author = {Diederik P. Kingma and Jimmy Ba},
booktitle = {International Conference on Learning Representations},
year = {2015},
}
@InProceedings{,
title = {Rectified Linear Units Improve Restricted Boltzmann
Machines},
author = {Vinod Nair and Geoffrey E. Hinton},
booktitle = {Proceedings of the 27th International Conference on
Machine Learning},
year = {2010},
pages = {807--814},
}
@InCollection{,
title = {Probabilistic Interpretation of Feedforward Classification
Network Outputs, with Relationships to Statistical Pattern
Recognition},
author = {John S. Bridle},
booktitle = {Neurocomputing: Algorithms, Architectures and
Applications},
publisher = {Springer},
year = {1990},
pages = {227--236},
}
@InProceedings{,
title = {Training Feedforward Neural Networks Using Genetic
Algorithms},
author = {D. J. Montana and L. Davis},
booktitle = {Proceedings of the 11th International Joint Conference
on Artificial Intelligence},
year = {1989},
pages = {762--767},
}
@Article{,
title = {Differential Evolution Training Algorithm for Feed-Forward
Neural Networks},
author = {J. Ilonen and J.-K. Kamarainen and J. Lampinen},
journal = {Neural Processing Letters},
year = {2003},
volume = {17},
pages = {93--105},
doi = {10.1023/A:1022995128597},
}
@Article{,
title = {Neural Networks Training by Artificial Bee Colony
Algorithm on Pattern Classification},
author = {D. Karaboga and C. Ozturk},
journal = {Neural Network World},
year = {2009},
volume = {19},
number = {3},
pages = {279--292},
}
@Article{,
title = {How Effective is the Grey Wolf Optimizer in Training
Multi-Layer Perceptrons},
author = {S. Mirjalili},
journal = {Applied Intelligence},
year = {2015},
volume = {43},
pages = {150--161},
doi = {10.1007/s10489-014-0645-7},
}
@Article{,
title = {A novel approach to training feed-forward multi-layer
perceptrons with recently proposed secretary bird optimization
algorithm},
author = {Burak Dilber and A. Firat Ozdemir},
journal = {Neural Computing and Applications},
year = {2026},
volume = {38},
number = {5},
doi = {10.1007/s00521-026-11874-x},
}