%Version 2.1 April 2023
% See section 11 of the User Manual for version history
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%                                                                 %%
%% Please do not use \input{...} to include other tex files.       %%
%% Submit your LaTeX manuscript as one .tex document.              %%
%%                                                                 %%
%% All additional figures and files should be attached             %%
%% separately and not embedded in the \TeX\ document itself.       %%
%%                                                                 %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%%\documentclass[referee,sn-basic]{sn-jnl}% referee option is meant for double line spacing

%%=======================================================%%
%% to print line numbers in the margin use lineno option %%
%%=======================================================%%

%%\documentclass[lineno,sn-basic]{sn-jnl}% Basic Springer Nature Reference Style/Chemistry Reference Style

%%======================================================%%
%% to compile with pdflatex/xelatex use pdflatex option %%
%%======================================================%%

%%\documentclass[pdflatex,sn-basic]{sn-jnl}% Basic Springer Nature Reference Style/Chemistry Reference Style


%%Note: the following reference styles support Namedate and Numbered referencing. By default the style follows the most common style. To switch between the options you can add or remove Numbered in the optional parenthesis. 
%%The option is available for: sn-basic.bst, sn-vancouver.bst, sn-chicago.bst, sn-mathphys.bst. %  
 
%%\documentclass[sn-nature]{sn-jnl}% Style for submissions to Nature Portfolio journals
%%\documentclass[sn-basic]{sn-jnl}% Basic Springer Nature Reference Style/Chemistry Reference Style
\documentclass[sn-mathphys,Numbered]{sn-jnl}% Math and Physical Sciences Reference Style
%%\documentclass[sn-aps]{sn-jnl}% American Physical Society (APS) Reference Style
%%\documentclass[sn-vancouver,Numbered]{sn-jnl}% Vancouver Reference Style
%%\documentclass[sn-apa]{sn-jnl}% APA Reference Style 
%%\documentclass[sn-chicago]{sn-jnl}% Chicago-based Humanities Reference Style
%%\documentclass[default]{sn-jnl}% Default
%%\documentclass[default,iicol]{sn-jnl}% Default with double column layout

%%%% Standard Packages
%%<additional latex packages if required can be included here>
\usepackage{graphicx}%
\usepackage{multirow}%
\usepackage{amsmath,amssymb,amsfonts}%
\usepackage{amsthm}%
\usepackage{mathrsfs}%
\usepackage[title]{appendix}%
\usepackage{xcolor}%
\usepackage{textcomp}%
\usepackage{manyfoot}%
\usepackage{booktabs}%
\usepackage{algorithm}%
\usepackage{algorithmicx}%
\usepackage{algpseudocode}%
\usepackage{listings}%
\usepackage{float}
\usepackage{rotating}
\usepackage{longtable}%
\usepackage{subcaption} 
\usepackage{lscape}
\captionsetup[subfigure]{labelformat=empty}
\captionsetup[figure]{labelsep=space}

%%%%

%%%%%=============================================================================%%%%
%%%%  Remarks: This template is provided to aid authors with the preparation
%%%%  of original research articles intended for submission to journals published 
%%%%  by Springer Nature. The guidance has been prepared in partnership with 
%%%%  production teams to conform to Springer Nature technical requirements. 
%%%%  Editorial and presentation requirements differ among journal portfolios and 
%%%%  research disciplines. You may find sections in this template are irrelevant 
%%%%  to your work and are empowered to omit any such section if allowed by the 
%%%%  journal you intend to submit to. The submission guidelines and policies 
%%%%  of the journal take precedence. A detailed User Manual is available in the 
%%%%  template package for technical guidance.
%%%%%=============================================================================%%%%

%\jyear{2021}%

%% as per the requirement new theorem styles can be included as shown below
\theoremstyle{thmstyleone}%
\newtheorem{theorem}{Theorem}%  meant for continuous numbers
%%\newtheorem{theorem}{Theorem}[section]% meant for sectionwise numbers
%% optional argument [theorem] produces theorem numbering sequence instead of independent numbers for Proposition
\newtheorem{proposition}[theorem]{Proposition}% 
%%\newtheorem{proposition}{Proposition}% to get separate numbers for theorem and proposition etc.

\theoremstyle{thmstyletwo}%
\newtheorem{example}{Example}%
\newtheorem{remark}{Remark}%

\theoremstyle{thmstylethree}%
\newtheorem{definition}{Definition}%

\raggedbottom
%%\unnumbered% uncomment this for unnumbered level heads

\begin{document}

\title[A Multi-strategy Improved Manta Ray Foraging Optimization for Engineering Applications]{A Multi-strategy Improved Manta Ray Foraging Optimization for Engineering Applications}

%%=============================================================%%
%% Prefix	-> \pfx{Dr}
%% GivenName	-> \fnm{Joergen W.}
%% Particle	-> \spfx{van der} -> surname prefix
%% FamilyName	-> \sur{Ploeg}
%% Suffix	-> \sfx{IV}
%% NatureName	-> \tanm{Poet Laureate} -> Title after name
%% Degrees	-> \dgr{MSc, PhD}
%% \author*[1,2]{\pfx{Dr} \fnm{Joergen W.} \spfx{van der} \sur{Ploeg} \sfx{IV} \tanm{Poet Laureate} 
%%                 \dgr{MSc, PhD}}\email{iauthor@gmail.com}
%%=============================================================%%

\author[1]{\fnm{Kewen} \sur{Wang}}\email{202230603119@mails.zstu.edu.cn}
\author*[1]{\fnm{Ting} \sur{Shu}}\email{shuting@zstu.edu.cn}
\author[2]{\fnm{Xuesong} \sur{Yin}}\email{yinxs@hdu.edu.cn}
\author[1]{\fnm{Jinsong} \sur{Xia}}\email{summerx@zstu.edu.cn}

\affil[1]{\orgdiv{School of Computer Science and Technology}, \orgname{Zhejiang Sci-Tech University}, \orgaddress{\city{Hangzhou}, \postcode{310018}, \country{China}}}
\affil[2]{\orgdiv{School of Media and Design}, \orgname{Hangzhou Dianzi University}, \orgaddress{\city{Hangzhou}, \postcode{310018}, \country{China}}}

%%==================================%%
%% sample for unstructured abstract %%
%%==================================%%

\abstract{
The Manta Ray Foraging Optimization (MRFO), inspired by the foraging behaviors of manta rays, has gained widespread application across various fields. However, MRFO often suffers from slow convergence and a tendency to fall into local optima. To address these issues, this paper introduces a multi-strategy enhanced Manta Ray Foraging Optimization algorithm (MLMRFO). The MLMRFO incorporates an adaptive somersault factor, a multi-leader mechanism with modified memory, and a Laplace crossover operator into the original MRFO framework. The adaptive somersault factor effectively balances exploration and exploitation during the iterative process, mitigating premature and slow convergence. Additionally, the multi-leader mechanism with improved memory and the Laplace crossover operator prevent the algorithm from falling into local optima. We used an ablation experiment to evaluate the impact of each strategy on the optimization ability of MRFO. The experimental results showed that each adopted strategy improved the performance of MRFO, with all three strategies improving MRFO most significantly. Furthermore, MLMRFO is compared with five state-of-the-art optimization algorithms and two MRFO variants using the CEC2017 test suite and four engineering optimization problems. The experimental results confirm the superior performance of MLMRFO in both numerical and engineering optimization problems.}

%%================================%%
%% Sample for structured abstract %%
%%================================%

% \abstract{\textbf{Purpose:} The abstract serves both as a general introduction to the topic and as a brief, non-technical summary of the main results and their implications. The abstract must not include subheadings (unless expressly permitted in the journal's Instructions to Authors), equations or citations. As a guide the abstract should not exceed 200 words. Most journals do not set a hard limit however authors are advised to check the author instructions for the journal they are submitting to.
% 
% \textbf{Methods:} The abstract serves both as a general introduction to the topic and as a brief, non-technical summary of the main results and their implications. The abstract must not include subheadings (unless expressly permitted in the journal's Instructions to Authors), equations or citations. As a guide the abstract should not exceed 200 words. Most journals do not set a hard limit however authors are advised to check the author instructions for the journal they are submitting to.
% 
% \textbf{Results:} The abstract serves both as a general introduction to the topic and as a brief, non-technical summary of the main results and their implications. The abstract must not include subheadings (unless expressly permitted in the journal's Instructions to Authors), equations or citations. As a guide the abstract should not exceed 200 words. Most journals do not set a hard limit however authors are advised to check the author instructions for the journal they are submitting to.
% 
% \textbf{Conclusion:} The abstract serves both as a general introduction to the topic and as a brief, non-technical summary of the main results and their implications. The abstract must not include subheadings (unless expressly permitted in the journal's Instructions to Authors), equations or citations. As a guide the abstract should not exceed 200 wordms. Most journals do not set a hard limit however authors are advised to check the author instructions for the journal they are submitting to.}

\keywords{Manta ray foraging optimization algorithm, Adaptive somersault factor, Multi-leader mechanism with modified memory, Laplace crossover operator, Engineering design}

%%\pacs[JEL Classification]{D8, H51}

%%\pacs[MSC Classification]{35A01, 65L10, 65L12, 65L20, 65L70}

\maketitle

\section{Introduction}\label{sec1}
An optimization problem is defined as a problem that seeks to identify a solution that optimizes a given objective function while satisfying the constraints imposed by the problem \cite{fu2023improved}. As optimization problems become increasingly complex, deterministic algorithms such as the Lagrange multiplier method \cite{he2016tutorial}, the conjugate gradient method \cite{nocedal2006conjugate}, linear programming \cite{ciripoi2018vector}, and the gradient descent method \cite{abualigah2021aquila} face significant limitations. In contrast, metaheuristic algorithms \cite{hassan2023meta}, characterized by their simplicity and independence from the problem's form or the objective function's derivative information, have demonstrated excellent performance in addressing these complex problems \cite{shen2023improved}.
\par Metaheuristic algorithms are generally categorized into four groups:
 evolutionary-based, human-based, physics-based, and swarm-based \cite{abualigah2021aquila}. Evolutionary-based algorithms include Genetic Algorithm \cite{de1988learning}, Genetic Programming \cite{koza1992programming}, and Differential Evolution \cite{storn1997differential}. Human-based algorithms encompass Poor and Rich Optimization Algorithm \cite{moosavi2019poor}, Adolescent Identity Search Algorithm \cite{bogar2020adolescent}, and Socio-evolution \& Learning Optimization Algorithm \cite{kumar2018socio}. Physics-based algorithms primarily consist of Gravitational Search Algorithm \cite{rashedi2009gsa}, Water Wave Optimization \cite{zheng2019water}, and Special Relativity Search \cite{goodarzimehr2023special}. Among these four categories, swarm-based algorithms are the most popular, including Whale Optimization Algorithm \cite{mirjalili2016whale}, Gray Wolf Algorithm \cite{mirjalili2014grey}, Seagull Optimization Algorithm \cite{dhiman2019seagull}, Harris Hawks Optimization \cite{heidari2019harris}, Slime Mould Algorithm \cite{li2020slime}, Sparrow Optimization Algorithm \cite{xue2020novel}, Marine Predators Algorithm \cite{faramarzi2020marine}, Reptile Search Algorithm \cite{abualigah2022reptile}, Honey Badger Algorithm \cite{hashim2022honey}, and Coati Optimization Algorithm \cite{dehghani2023coati}.
\par The Manta Ray Foraging Optimization (MRFO) \cite{zhao2020manta}, proposed by Zhao et al. in 2020, is a swarm-based algorithm inspired by the foraging behaviors observed in manta rays. Compared to classic metaheuristic algorithms, MRFO demonstrates superior convergence accuracy \cite{zhu2022application}. Its high convergence accuracy and ease of implementation have led many researchers to apply MRFO and its variants across various fields, such as power system optimization, feature selection and image processing \cite{abdullahi2023manta}. 
\par However, MRFO has disadvantages such as slow convergence and a tendency to fall into local optima \cite{spea2024optimizing}. To overcome the above shortcomings, many researchers have proposed optimization strategies for MRFO. Zhu et al. \cite{zhu2023manta} explored mechanistic games and asymptotic learning methods with MRFO to tackle multiple optimization problems. Zhang et al. \cite{zhang2023manta} replaced the spiral in the original MRFO algorithm with the internal rotation wheel line, which improved the global exploration ability of MRFO. Spea \cite{spea2024optimizing} introduced an oppositional-based learning method in MRFO to better utilize the exploration space to increase the probability of finding the global optimal solution. Zhang et al. \cite{zhang2024ibmrfo} utilized the chaotic tent map in MRFO to increase the population diversity of the algorithm. Jena et al. \cite{jena2021maximum} enhanced MRFO by incorporating adaptive attacking power, which increased early population diversity and prevented the algorithm from becoming trapped in local minima or maxima during later stages. This modification ensured a balanced performance between MRFO's initial exploration and later exploitation ability. Ma et al. \cite{ma2023manta} improved the diversification of the manta ray population in MRFO by incorporating oppositional learning. Qu et al. \cite{qu2024improved} combined the Tent chaotic mapping, the bidirectional search strategy, and the Levy flight strategy with MRFO. These improvements respectively enhanced the quality of the initial solution, expanded the search range of MRFO, and strengthened MRFO's ability to escape from local optimal solutions. Liu et al. \cite{liu2024efficient} utilized a random information interaction strategy between individuals in MRFO to expedite the convergence of the algorithm, and then added a fractional derivative mutation strategy to improve individual quality. Although these MRFO variants have improved the algorithm's performance by integrating various strategies, there is still room for further enhancement in MRFO's ability to escape local optima and increase convergence speed.
\par To overcome these limitations and enhance MRFO's capability in engineering constrained optimization and numerical optimization, this paper proposes a multi-strategy improved MRFO (MLMRFO). The MLMRFO incorporates an adaptive somersault factor, a multi-leader mechanism with a modified memory structure, and a Laplace crossover operator. Specifically, the adaptive somersault factor can better balance the exploration and exploitation capabilities of MRFO, helping avoid both slow and premature convergence. The multi-leader mechanism with a modified memory structure \cite{abd2021multi} and the Laplace crossover operator \cite{deep2007new} assist the algorithm in escaping local optima by changing the leader position in the position update formula and integrating different parent features to explore new solutions in the solution space, respectively. We utilize the CEC2017 benchmark suite to evaluate the performance of MLMRFO in terms of numerical optimization. Furthermore, we employ MLMRFO to address four engineering design problems and compared its performance to that of state-of-the-art optimization algorithms. This comparison validated the quality of MLMRFO's solutions in tackling real-world challenges.
\par The paper's primary contributions are summarized as follows:
\begin{itemize}
	\item An adaptive somersault factor is proposed to help the algorithm better balance the exploration and exploitation during the iterative process, thereby avoiding slow and premature convergence.
	\item A multi-leader mechanism with a modified memory structure is introduced to fully utilize the available information of multiple elite individuals, preventing the algorithm from falling into local optima.
	\item The Laplace crossover operator is introduced to explore new solutions in the solution space by amalgamating diverse parental traits from the parent generation, aiding the algorithm in escaping local optima.
	\item We conduct an ablation experiment on the CEC2017 test suite to demonstrate the optimization ability of each improved strategy of MLMRFO on MRFO. We compare MLMRFO with other comparison algorithms on the CEC2017 test suite and four engineering problems to evaluate its performance in solving numerical and engineering optimization problems.
\end{itemize}
\par The rest of the paper is organized as follows: Section \ref{sec2} describes the MRFO method. Section \ref{sec3} presents the proposed MLMRFO method. Section \ref{sec4} details the experiments and analysis, including an ablation experiment, a comparison of MLMRFO with other algorithms and an evaluation of the experimental results. Section \ref{sec5} applies MLMRFO to four engineering design problems. The final section summarizes the findings.
\section{Manta ray foraging optimization}\label{sec2}
Manta ray foraging optimization algorithm is a novel meta-heuristic algorithm proposed by Zhao et al. in 2020 \cite{zhao2020manta}, and its inspiration is derived from manta rays' foraging behaviors. Manta ray foraging behaviors are categorized into three types: chain foraging, spiral foraging, and somersault foraging. These three behaviors and their mathematical models are described in detail in the following subsections.
\subsection{Chain foraging behavior}\label{subsec1}
From their heads to tails, manta rays can form a feeding chain to approach and feed on mayflies. Except for the manta ray at the head of the chain, the others must not only approach the food but also follow the manta ray in front of them during movement. In MRFO, the location with the highest concentration of food corresponds to the optimal solution identified until now. Therefore, each manta ray's new position is updated based on both the the optimal solution identified until now and the position of the preceding individual. The following formula describes the chain foraging behavior of manta rays:
\begin{eqnarray}
	\label{eq:1}
	x_{i}^{d}(t+1) & = & \left\{\begin{array}{l}x_{i}^{d}(t)+r \cdot\left(x_{\text {best }}^{d}(t)-x_{i}^{d}(t)\right)\\+\alpha \cdot\left(x_{\text {best }}^{d}(t)-x_{i}^{d}(t)\right) \quad i=1 \\x_{i}^{d}(t)+r \cdot\left(x_{i-1}^{d}(t)-x_{i}^{d}(t)\right)\\+\alpha \cdot\left(x_{\text {best }}^{d}(t)-x_{i}^{d}(t)\right) \quad i=2, \ldots, N\end{array}\right.
\end{eqnarray}
\begin{equation}
	\label{eq:2}
	\alpha =2.r.\sqrt{|log\left ( r \right ) |} 
\end{equation}
\par Where $x_{i}^{d}(t)$ is the $d$-th dimension of the $i$-th individual at time $t$, $x_{best}^{d}(t)$ is the $d$-th dimension of the optimal location at time $t$, $r$ is a random vector ranging between [0, 1], and $\alpha$ is the weighting factor.
\subsection{Cyclone foraging behavior}\label{subsec2}
\par Once manta rays encounter food, they establish a long foraging chain and spiral swim towards the food source. Each manta ray's position update is influenced by both the optimal solution identified until now and the preceding individual in the foraging chain. The following formula describes the spiral foraging behavior of manta rays in detail:
\begin{equation}
	\label{eq:3}
	x_{i}^{d}(t+1)  = \left\{\begin{array}{l}x_{\text {best }}^{d}+r \cdot\left(x_{\text {best }}^{d}(t)-x_{i}^{d}(t)\right)+\beta \cdot\left(x_{\text {best }}^{d}(t)-x_{i}^{d}(t)\right) i  = 1 \\x_{\text {best }}^{d}+r \cdot\left(x_{i-1}^{d}(t)-x_{i}^{d}(t)\right)+\beta \cdot\left(x_{\text {best }}^{d}(t)-x_{i}^{d}(t)\right) i  = 2, \ldots, N\end{array}\right.
\end{equation}
\begin{eqnarray}
	\label{eq:4}
	\beta & = & 2 e^{r_{1} \frac{T-t+1}{T}} \cdot \sin \left(2 \pi r_{1}\right)
\end{eqnarray}
\par Where $\beta$ represents the weighting factor, $r1$ is a random number between [0, 1], $T$ stands for the maximum number of iterations.
\par In the spiral foraging behavior represented by Eq. (\ref{eq:3}), each individual conducts random searches around the optimal solution identified until now, which enhances MRFO's exploitation capability. Moreover, spiral foraging behavior can also improve MRFO's exploration ability. By choosing a new random position within the search space as a reference point, each manta ray individual can explore a new position distant from the optimal solution identified until now. The mathematical formula is as follows:
\begin{eqnarray}
	\label{eq:5}
	x_{\text {rand }}^{d} & = & L b^{d}+r \cdot\left(U b^{d}-L b^{d}\right)
\end{eqnarray}
\begin{eqnarray}
	\label{eq:6}
	x_{i}^{d}(t+1)  =  \left\{\begin{array}{ll}x_{\text {rand }}^{d}+r \cdot\left(x_{\text {rand }}^{d}-x_{i}^{d}(t)\right)+\beta \cdot\left(x_{\text {rand }}^{d}-x_{i}^{d}(t)\right)  i  =  1 \\x_{\text {rand }}^{d}+r \cdot\left(x_{i-1}^{d}(t)-x_{i}^{d}(t)\right)+\beta \cdot\left(x_{\text {rand }}^{d}-x_{i}^{d}(t)\right)  i =  2, \ldots, N\end{array}\right.
\end{eqnarray}
\par Where $x_{i}^{d}(t+1)$ represents the $d$-th dimension of a random vector generated at time $t$, and $Ub^{d}$ and $Lb^{d}$ stand for the upper and lower limits of the search space in the $d$-th dimension, respectively.

\subsection{Somersault foraging behavior}\label{subsec3}
\par In manta ray's somersault foraging strategy, manta rays use the food's location as a pivot. Each manta ray somersaults around the food, which represents the optimal solution identified until now, to a new position. The following formula describes the manta ray's somersault foraging behavior in detail:
\begin{eqnarray}
	\label{eq:7}
	x_{i}^{d}(t+1)=x_{i}^{d}(t)+S \cdot\left(r_{2} \cdot x_{\text {best }}^{d}-r_{3} \cdot x_{i}^{d}(t)\right), i=1, \ldots, N
\end{eqnarray}
\par Where $S$ is a coefficient that determines the range of manta ray's somersault foraging, and $r2$ and $r3$ are random numbers within the interval [0, 1].
\par The pseudocode of MRFO is given in Algorithm\ref{alg1}.
\begin{algorithm}
	\caption{The framework of MRFO}
	\label{alg1}
	\begin{algorithmic}[1]
		\State \textbf{Begin}
		\State Set the Initial parameters of MRFO.
		\State Initialize manta ray populations. 
		\State Calculate each individual manta ray's fitness, and obtain best position and the associated fitness value.
		\While {$t<T$} 
		\For {$i$=1 to $N$}
		\If{$rand<0.5$} 
		\If{$t/T<rand$}
		\State Update manta ray's position by Eq. (\ref{eq:3}) and Eq. (\ref{eq:4}) 
		\Else 
		\State Update manta ray's position by Eq. (\ref{eq:6}) 
		\EndIf
		\Else 
		\State Update manta ray's position by Eq. (\ref{eq:1})
		\EndIf
		\State Update manta ray's optimal position and the associated fitness value.
		\State Update manta ray's position by Eq. (\ref{eq:7})
		\State Update manta ray's optimal position and the associated fitness value.
		\EndFor
		\State Update global best position and the associated fitness value.
		\EndWhile
		\State Return global best position and the associated fitness value. 
		\State \textbf{End}
	\end{algorithmic}
\end{algorithm}
\section{Improved manta ray foraging algorithm}\label{sec3}

\subsection{Adaptive somersault factor S}\label{subsec4}
In the original MRFO, the somersault factor $S$ is a fixed value, which leads to a poor balance between exploration and exploitation, resulting in premature convergence and slow convergence. To overcome this limitation, we propose an adaptive somersault factor $S$. As shown in Fig. \ref{fig:1}, during the early phase of optimization, a larger $S$ value facilitates greater exploration and helps prevent premature convergence. Conversely, during the final phase of optimization, a smaller $S$ value enhances the algorithm's exploitation and mitigates slow convergence. Since MRFO's exploitation capability is weaker than its exploration capability \cite{abd2021grunwald}, we need to reduce $S$ appropriately during iteration to enhance its exploitation ability. In summary, compared with the fixed tumbling factor $S$, the adaptive somersault factor $S$ improves the balance between the exploration and exploitation capabilities of MRFO during the iteration process, thereby avoiding slow and premature convergence. The mathematical formula is depicted below:
\begin{eqnarray}
	\label{eq:8}
	S  =  S_{max} +(S_{max}-S_{min})\cdot \cos \left(\frac{\pi}{2}\cdot\left(1+\frac{t}{\text {T}}\right)\right)
\end{eqnarray}

\begin{figure}[H]
	\centering
	\includegraphics[width=0.7\linewidth]{factor S.jpg}
	\caption{Adaptive somersault factor $S$}
	\label{fig:1}
\end{figure}
\par Where $Smin$ and $Smax$ represent the maximum and minimum values of the somersault factor $S$, respectively, and $T$ denotes the total number of iterations.
\subsection{Multi-leader mechanism with modified memory structure}\label{subsec5}
\subsubsection{Modified memory structure}\label{subsubsec5}
All three foraging behaviors of the original MRFO (chain foraging, spiral foraging, and somersault foraging) update positions around the optimal position, with fitness functions computed based on the n-dimensional vector of this optimal position. The enhancement of two components might be counterbalanced by the deterioration of one component, potentially leading to entrapment in local optima \cite{abd2021multi}. In MLMRFO, a novel memory mechanism is introduced, where each manta ray's position is updated based on $n$ global memories and $m$ individual memories. This approach reduces the probability of degradation of a single vector component and helps individuals avoid getting stuck in local optima. MLMRFO replaces $x_{best}$ in Eq. (\ref{eq:1}), Eq. (\ref{eq:3}), and Eq. (\ref{eq:7}) with position vectors determined by the multiple leaders selected by each individual. The updated equations are shown below:
\begin{eqnarray}
	\label{eq:9}
	x_{i}^{d}(t+1)  = \left\{\begin{array}{l}\overline{x_{gbest(i)}^{d}}(t)+r \cdot\left(\overline{x_{gbest(i)}^{d}}(t)-x_{i}^{d}(t)\right)\\+\beta \cdot\left(\overline{x_{gbest(i)}^{d}}(t)-x_{i}^{d}(t)\right) i  = 1 \\\overline{x_{gbest(i)}^{d}}(t)+r \cdot\left(x_{i-1}^{d}(t)-x_{i}^{d}(t)\right)\\+\beta \cdot\left(\overline{x_{gbest(i)}^{d}}(t)-x_{i}^{d}(t)\right) i  = 2, \ldots, N\end{array}\right.
\end{eqnarray}
\begin{eqnarray}
	\label{eq:10}
	x_{i}^{d}(t+1)  = \left\{\begin{array}{l}x_{i}^{d}(t)+r \cdot\left(\overline{x_{gbest(i)}^{d}}(t)-x_{i}^{d}(t)\right)\\+\alpha \cdot\left(\overline{x_{gbest(i)}^{d}}(t)-x_{i}^{d}(t)\right) \quad i=1 \\x_{i}^{d}(t)+r \cdot\left(x_{i-1}^{d}(t)-x_{i}^{d}(t)\right)\\+\alpha \cdot\left(\overline{x_{gbest(i)}^{d}}(t)-x_{i}^{d}(t)\right) \quad i=2, \ldots, N\end{array}\right.
\end{eqnarray}	
\begin{eqnarray}
	\label{eq:11}
	x_{i}^{d}(t+1)=x_{i}^{d}(t)+S \cdot\left(r_{2} \cdot \overline{x_{gbest(i)}^{d}}(t)-r_{3} \cdot x_{i}^{d}(t)\right), i=1, \ldots, N
\end{eqnarray}
\par Where $\overline{x_{gbest(i)}^{d}}(t)$ is the average value of the position vector in the global memory of dimension $d$ at time $t$ of the $i$-th individual, and its definition is as follows:
\begin{eqnarray}
	\label{eq:12}
	\overline{x_{gbest(i)}^{d}}(t)=\frac{1}{LN}\sum_{j=1}^{LN}x_{L_{j}(i)}^{d}(t)
\end{eqnarray}
\par Where $LN$ is the number of leaders per individual and $L_{j}(i)$ is the index of the $j$-th leader of individual $i$.
\subsubsection{Multi-leader mechanism}\label{subsubsec5}
To fully utilize the information from multiple advantageous individuals and reduce the likelihood of being trapped in local optima, MLMRFO introduces a multi-leader mechanism. Initially, $L$ optimal leaders are chosen based on their fitness values. Subsequently, each individual manta ray chooses its own leader from the $L$ leaders by selecting $LN$ leaders. According to the differences between their individual memory and the location information of the selected leaders, the greater the difference between the individual memory and the selected leader's position information, the higher the likelihood that the leader will be chosen. The probability that individual $k$ chooses the $i$-th leader is $P_{k, i}$ and its definition is given below:
\begin{eqnarray}
	\label{eq:13}
	d i s_{i} & = & \left\{\begin{array}{c}\frac{\sum_{d}\left|x_{i d}^{\text {gbest }}-\overline{x_{k d}^{l b e s t}}\right|}{\max _{j}\left\{\sum_{d}\left|x_{j d}^{g b e s t}-\overline{x_{k d}^{l b e s t}}\right|\right\}}, \max _{j}\left\{\sum_{d}\left|x_{j d}^{\text {gbest}}-\overline{x_{k d}^{l b e s t}}\right|\right\} \neq 0 
		\\0, \max _{j}\left\{\sum_{d}\left|X_{j d}^{\text {gbest}}-\overline{x_{k d}^{l b e s t}}\right|\right\}  =  0\end{array}\right.
\end{eqnarray}
\begin{eqnarray}
	\label{eq:14}
	P_{k, i} & = & \frac{d i s_{i}}{\sum_{j} d i s_{j}}\end{eqnarray}
\par Where $\overline{x_{k d}^{l b e s t}}$ represents the average value of an individual's memory in $d$-th dimension, and $x_{j d}^{g b e s t}$ represents the $d$-th dimension of the $j$-th global memory. 
\par Each individual's position update is influenced by the $LN$ leaders in each iteration. $p$ of manta ray individuals reselect leaders, while the others retain their choices from the previous generation. As iterations progress, the strategy of manta ray individuals in selecting leaders tends to stabilize.
\begin{eqnarray}
	\label{eq:15}
	p & = & \frac{T-t}{T}\end{eqnarray}
\subsection{Laplacian crossover operator}\label{subsec6}
Three foraging behaviors in the original manta ray foraging algorithm all generate the next generation of solutions by emphasizing the best solution. However, this can lead to local optima because the new solutions generated will be biased towards the best solution. To address this issue, we introduce the Laplace crossover operator (LX) \cite{deep2007new} into MRFO. LX combines different parent features to explore new solutions within the solution space, aiding the algorithm in escaping local optima. LX uses two parents $x^{(1)}=\left(x_{1}^{(1)}, x_{2}^{(1)}, \ldots, x_{n}^{(1)}\right)$ and $x^{(2)}=\left(x_{1}^{(2)}, x_{2}^{(2)}, \ldots, x_{n}^{(2)}\right)$ to produce two offspring $y^{(1)}=\left(y_{1}^{(1)}, y_{2}^{(1)}, \ldots, y_{n}^{(1)}\right)$ and $y^{(2)}=\left(y_{1}^{(2)}, y_{2}^{(2)}, \ldots, y_{n}^{(2)}\right)$, a distinctive characteristic of LX is that its offspring exhibit symmetry relative to the parents' positions. LX generates offspring as follows:
\begin{eqnarray}
	\label{eq:16}
	y_{i}^{1} & = & x_{i}^{1}+\beta \left|x_{i}^{1}-x_{i}^{2}\right|
\end{eqnarray}
\begin{eqnarray}
	\label{eq:17}
	y_{i}^{2} & = & x_{i}^{2}+\beta\left|x_{i}^{1}-x_{i}^{2}\right|
\end{eqnarray}
\par Where the random variable $\beta$ follows a Laplace distribution is as the following formula:
\begin{eqnarray}
	\label{eq:18}
	\beta & = & \left\{\begin{array}{ll}a-b \log _{e}\left(u\right) & u \leq 1 / 2 \\a+b \log _{e}\left(u\right) & u>1 / 2\end{array}\right.
\end{eqnarray}
\par Where $a$ represents a position parameter, $b$ denotes a scale parameter, and $u$ signifies a uniformly distributed random variable within the range [0,1].
\par This paper applies LX to generate new manta ray offspring. Individuals in the population paired two by two, and new individuals are generated using the Laplace crossover formula given below:
\begin{eqnarray}
	\label{eq:19}
	X_{\text {newR}}(t+1) & = & X_{\text {R }}(t)+\beta *abs(X_{\text {R}}(t)-X_{\text {Rpair}}(t))\end{eqnarray}
\begin{eqnarray}
	\label{eq:20}
	X_{\text {newRpair}}(t+1) & = & X_{\text {Rpair }}(t)+\beta *abs(X_{\text {R}}(t)-X_{\text {Rpair}}(t))\end{eqnarray}
\par Where $R$ and $Rpair$ denote a pair of individuals randomly selected from manta ray population. In each iteration, individuals are re-paired.
\par Relying on the greedy mechanism, the fitness values before and after the adoption of the strategy are evaluated, retaining the individual with the superior fitness value.
\par The pseudocode of MLMRFO is shown as in Algorithm\ref{alg2}.	
\begin{algorithm}
	\caption{The framework of MLMRFO}
	\label{alg2}
	\begin{algorithmic}[1]
		\State \textbf{Begin}
		\State Set the Initial parameters of MLMRFO.
		\State Initialize manta ray populations.
		\State Calculate each individual manta ray's fitness, and obtain best position and the associated fitness value.
		\While {maximum numbers of iterations is not reached} 
		\State Select $L$ position vectors based on their fitness values.
		\For {$i$=1 to $N$}
		\State Update the individual
		memories of the individual $i$.
		\State Calculate the probability $P$ for individual $i$ to select each leader from $L$.
		\State Select $LN$ leaders from $L$ position vectors for individual $i$ and compute the average leader position by Eq. (\ref{eq:12})
		\EndFor
		\For {$i$=1  to  N}
		\If{$rand<0.5$} 
		\If{$t/T<rand$}
		\State Update manta ray's position by Eq. (\ref{eq:4}) and Eq. (\ref{eq:9}) 
		\Else 
		\State Update manta ray's position by Eq. (\ref{eq:6}) 
		\EndIf
		\Else
		\State Update manta ray's position by Eq. (\ref{eq:10})
		\EndIf
		\State Update manta ray's optimal position and the associated fitness value.
		\State Update manta ray's position by Eq. (\ref{eq:8}) and Eq. (\ref{eq:11})
		\State Update manta ray's optimal position and the associated fitness value.
		\EndFor
		\For {$i$=1 to $N$}
		\State Update manta ray's position by Eq. (\ref{eq:18}) and Eq. (\ref{eq:19}) 
		\State Update manta ray's optimal position and the associated fitness value.
		\EndFor
		\State Update global best position and the associated fitness value.
		\EndWhile
		\State Return global best position and the associated fitness value. 
		\State \textbf{End}
	\end{algorithmic}
\end{algorithm}
\subsection{Time complexity analysis}\label{subsec5}
MLMRFO's time complexity depends on the dimensions of the problem $d$, the population size of manta rays $N$, the maximum number of iterations $T$, and the count of global memories $n$.
\begin{flalign*}
	&O(MLMRFO) = O(T(O(cyclone\:foraging + chain\:foraging)&\\
	&+O(somersault\:foraging)+O(Laplace\:cross\:operator)&\\ 
	&+O(Multi-leadership\:mechanisms))&\\
	&=O(TNd)+O(TNd)+O(TNd)+O(2TNd+Tnd+3TnN+TnNd)&\\
	&=O(5TNd+Tnd+3TnN+TnNd)& 
\end{flalign*}
\section{Simulation experiments and result analysis}\label{sec4}
\subsection{Test Functions and Environment Settings}\label{subsec4.1}
To comprehensively and effectively evaluate the overall performance of MLMRFO, this article employs the well-known and authoritative CEC2017 test function suite \cite{wang2021novel} to conduct comparative experiments on multidimensional function optimization. The CEC2017 test function suite includes 29 functions, each with an independent variable within the range of -100 to 100. For accurate results, all experiments were carried out on the same computer running Windows 11 (64-bit) with an Intel Core i5-10210U CPU (1.60GHz) and 16GB of RAM, using MATLAB 2022a as the programming tool. For all algorithms, the population size was set to $N$=50, with 3000 iterations fixed. Each test function was run 20 times per algorithm.
\subsection{Ablation analysis of improvement strategies}\label{subsec9}
As observed in Section \ref{sec3}, we introduced three improvement strategies to the original MRFO: the Laplace crossover operator (LX), the adaptive somersault factor (AS), and the multi-leader mechanism with modified memory structure (MU). We tested the improved algorithms independently under various strategies and analyzed the impact of these enhancement strategies on MRFO based on the test results. As indicated in Table~\ref{tab1}, LX represents the incorporation of the Laplace crossover operator, AS denotes the incorporation of the adaptive somersault factor, and MU refers to the introduction of the multi-leader mechanism with modified memory structure. Furthermore, if an algorithm employs the corresponding strategy, it is represented by "1"; otherwise, it is represented by "0".
\par In Table~\ref{tab2}, the ranking for each function is established based on the mean outcome of each function, and the final average ranking is then calculated from these individual rankings. Furthermore, Table~\ref{tab2} reveals that the MLMRFO algorithm achieves the most favorable average ranking, indicating its notable advantages over other comparative algorithms. The average rankings of ASMRFO, MUMRFO, and LXMRFO are superior to those of the original MRFO as well. The results suggest that any of the three improvement strategies can notably enhance MRFO's performance, and combining these three strategies to optimize MLMRFO yields superior results compared to using any single improvement strategy alone. Overall, the analysis demonstrates that MLMRFO is the optimal improvement method for MRFO.

\begin{table}[h]
	\caption{Various MRFOs with three strategies}\label{tab1}%
	\begin{tabular}{@{}p{2cm}p{2cm}p{2cm}p{2cm}@{}}
		\toprule
		Algorithm&LX&AS&MU\\
		\midrule
		MLMRFO&1&1&1\\
		MRFO&0&0&0\\
		LXMRFO&1&0&0\\
		ASMRFO&0&1&0\\
		MUMRFO&0&0&1\\	           
		\botrule
	\end{tabular}
\end{table}
\footnotesize								
\begin{longtable}[h]{ccccccc}	
	\caption{Comparison results of MLMRFO, MRFO, LXMRFO, ASMRFO, and MUMRFO}\label{tab2}\\
	%\toprule
	\hline
	Function&Metric&MLMRFO&MRFO&LXMRFO&ASMRFO&MUMRFO\\
	\endfirsthead % ????????????????????		
	\caption{continue} \\
	\hline
	Function&Metric&MLMRFO&MRFO&LXMRFO&ADMRFO&MUMRFO\\
	\hline
	\endhead
	\hline
	\endfoot
	\hline
	\endlastfoot	
	\midrule
	F1&Ave&1.8150E+02&1.7619E+03&\textbf{1.7845E+02}&1.5816E+03&1.2459E+03\\
	&Std&1.2229E+02&1.5917E+03&\textbf{9.6787E+01}&2.3093E+03&1.8240E+03\\
	&Rank&2&5&1&4&3\\
	F3&Ave&\textbf{3.0000E+02}&\textbf{3.0000E+02}&\textbf{3.0000E+02}&\textbf{3.0000E+02}&\textbf{3.0000E+02}\\
	&Std&2.2587E-14&2.6082E-14&3.6885E-14&2.2587E-14&\textbf{0.0000E+00}\\
	&Rank&1&1&1&1&1\\
	F4&Ave&\textbf{4.0000E+02}&\textbf{4.0000E+02}&\textbf{4.0000E+02}&\textbf{4.0000E+02}&\textbf{4.0000E+02}\\
	&Std&\textbf{1.3954E-12}&1.2985E-03&3.9504E-10&1.5211E-03&2.8826E-04\\
	&Rank&1&1&1&1&1\\
	F5&Ave&5.1129E+02&5.2273E+02&5.1841E+02&5.1915E+02&\textbf{5.0801E+02}\\
	&Std&\textbf5.4713E+00&1.1541E+01&1.0767E+01&7.2461E+00&\textbf{3.7434E+00}\\
	&Rank&2&5&3&4&1\\
	F6&Ave&\textbf{6.0002E+02}&6.0057E+02&6.0064E+02&6.0008E+02&6.0009E+02\\
	&Std&\textbf{6.4647E-02}&1.4968E+00&2.0401E+00&2.3413E-01&2.7844E-01\\
	&Rank&1&4&5&2&3\\
	F7&Ave&\textbf{7.2145E+02}&7.4473E+02&7.4125E+02&7.3302E+02&7.2328E+02\\
	&Std&\textbf{6.0498E+00}&1.9111E+01&1.8856E+01&1.1258E+01&7.0397E+00\\
	&Rank&1&5&4&3&2\\
	F8&Ave&\textbf{8.1149E+02}&8.2438E+02&8.2144E+02&8.1791E+02&8.1234E+02\\
	&Std&7.0611E+00&8.1568E+00&6.3214E+00&1.0043E+01&\textbf{4.5239E+00}\\
	&Rank&1&5&4&3&2\\
	F9&Ave&\textbf{9.0017E+02}&9.0155E+02&9.0674E+02&9.0121E+02&9.0047E+02\\
	&Std&\textbf{3.0618E-01}&3.6111E+00&1.9375E+01&1.9624E+00&1.1468E+00\\
	&Rank&1&4&5&3&2\\
	F10&Ave&1.3484E+03&1.7170E+03&1.7678E+03&1.7276E+03&\textbf{1.1963E+03}\\
	&Std&2.4359E+02&2.9382E+02&3.2036E+02&3.2991E+02&\textbf{1.7605E+02}\\
	&Rank&2&3&5&4&1\\
	F11&Ave&1.1069E+03&1.1132E+03&1.1091E+03&1.1087E+03&\textbf{1.1045E+03}\\
	&Std&5.4637E+00&1.2202E+01&5.1909E+00&7.6230E+00&\textbf{5.0465E+00}\\
	&Rank&2&5&4&3&1\\
	F12&Ave&6.4355E+03&1.5943E+04&\textbf{5.7815E+03}&1.3795E+04&9.4507E+03\\
	&Std&6.0797E+03&1.1858E+04&\textbf{5.0799E+03}&1.3906E+04&6.3393E+03\\
	&Rank&2&5&1&4&3\\
	F13&Ave&\textbf{1.3983E+03}&1.5292E+03&1.4691E+03&1.5249E+03&1.5721E+03\\
	&Std&\textbf{9.3120E+01}&1.5156E+02&1.5849E+02&1.4176E+02&1.0082E+02\\
	&Rank&1&4&2&3&5\\
	F14&Ave&1.4135E+03&1.4217E+03&\textbf{1.4095E+03}&1.4201E+03&1.4192E+03\\
	&Std&9.8379E+00&1.1784E+01&\textbf{8.1194E+00}&9.7903E+00&1.2372E+01\\
	&Rank&2&5&1&4&3\\
	F15&Ave&\textbf{1.5073E+03}&1.5096E+03&1.5099E+03&1.5145E+03&1.5091E+03\\
	&Std&\textbf{4.9382E+00}&5.2773E+00&9.6451E+00&9.1446E+00&6.1455E+00\\
	&Rank&1&3&4&5&2\\
	F16&Ave&1.6548E+03&1.6761E+03&1.6869E+03&1.6881E+03&\textbf{1.6185E+03}\\
	&Std&7.3244E+01&7.4598E+01&6.9183E+01&1.1310E+02&\textbf{4.3664E+01}\\
	&Rank&2&3&4&5&1\\
	F17&Ave&\textbf{1.7234E+03}&1.7334E+03&1.7358E+03&1.7285E+03&1.7270E+03\\
	&Std&1.4465E+01&1.7392E+01&1.6565E+01&1.5758E+01&\textbf{1.0300E+01}\\
	&Rank&1&4&5&3&2\\
	F18&Ave&\textbf{1.8306E+03}&3.9563E+03&1.8379E+03&4.0257E+03&3.6811E+03\\
	&Std&\textbf{1.8926E+01}&2.3189E+03&3.0632E+01&2.6654E+03&2.3634E+03\\
	&Rank&1&4&2&5&3\\
	F19&Ave&\textbf{1.9067E+03}&1.9123E+03&1.9131E+03&1.9129E+03&1.9109E+03\\
	&Std&6.6608E+00&5.2364E+00&1.6884E+01&4.7753E+00&\textbf{4.5187E+00}\\
	&Rank&1&3&5&4&2\\
	F20&Ave&2.0069E+03&2.0183E+03&2.0175E+03&2.0137E+03&\textbf{2.0066E+03}\\
	&Std&\textbf{6.8761E+00}&1.3804E+01&1.6740E+01&1.2859E+01&7.9476E+00\\
	&Rank&2&5&4&3&1\\
	F21&Ave&2.2012E+03&2.2006E+03&2.2008E+03&\textbf{2.2006E+03}&2.2009E+03\\
	&Std&1.2864E+00&\textbf{1.1592E+00}&1.2511E+00&1.1604E+00&1.2295E+00\\
	&Rank&5&2&3&1&4\\
	F22&Ave&2.3014E+03&\textbf{2.2986E+03}&2.3015E+03&2.3017E+03&2.3008E+03\\
	&Std&7.4034E-01&1.2580E+01&6.7055E-01&1.0024E+00&\textbf{3.1888E-01}\\
	&Rank&3&1&4&5&2\\
	F23&Ave&2.6155E+03&2.6222E+03&2.6220E+03&\textbf{2.6009E+03}&2.6101E+03\\
	&Std&6.9036E+00&9.1476E+00&7.5664E+00&7.1038E+01&\textbf{4.6876E+00}\\
	&Rank&3&5&4&1&2\\
	F24&Ave&2.6193E+03&2.5824E+03&\textbf{2.5453E+03}&2.5739E+03&2.6067E+03\\
	&Std&1.2250E+02&1.2522E+02&\textbf{1.0512E+02}&1.1604E+02&1.2111E+02\\
	&Rank&5&3&1&2&4\\
	F25&Ave&2.9424E+03&\textbf{2.9269E+03}&2.9296E+03&2.9338E+03&2.9338E+03\\
	&Std&\textbf{1.4898E+01}&2.3410E+01&2.2780E+01&2.0571E+01&2.0566E+01\\
	&Rank&5&1&2&3&4\\
	F26&Ave&2.9204E+03&2.9249E+03&\textbf{2.9067E+03}&2.9209E+03&2.9297E+03\\
	&Std&7.4630E+01&1.2511E+02&9.3014E+01&\textbf{4.6366E+01}&6.6406E+01\\
	&Rank&2&4&1&3&5\\
	F27&Ave&\textbf{3.0991E+03}&3.1031E+03&3.1046E+03&3.1018E+03&3.0996E+03\\
	&Std&\textbf{3.3101E+00}&1.2518E+01&9.3082E+00&9.7829E+00&4.6609E+00\\
	&Rank&1&4&5&3&2\\
	F28&Ave&3.1431E+03&3.1498E+03&3.1555E+03&\textbf{3.1296E+03}&3.2032E+03\\
	&Std&9.9794E+01&1.0813E+02&1.0842E+02&\textbf{7.0709E+01}&1.7121E+02\\
	&Rank&2&3&4&1&5\\
	F29&Ave&3.1795E+03&3.2000E+03&3.1985E+03&3.1881E+03&\textbf{3.1724E+03}\\
	&Std&2.6673E+01&4.5587E+01&2.9864E+01&2.7175E+01&\textbf{2.4433E+01}\\
	&Rank&2&5&4&3&1\\
	F30&Ave&\textbf{4.4449E+04}&1.6838E+05&1.2619E+05&1.2857E+05&2.0872E+05\\
	&Std&\textbf{1.8268E+05}&3.3499E+05&2.9932E+05&2.9848E+05&3.6301E+05\\
	&Rank&1&4&2&3&5\\
	\hline 	 
	{Average Rank}&&\textbf{1.93}&3.66&3.14&3.07&2.52 
	\\               
	%\botrule
\end{longtable}
\normalsize
\subsection{Results analysis for the CEC2017 Test Functions}\label{subsec4.3}
\par In this subsection, the original MRFO \cite{zhao2020manta}, four renowned optimization algorithms (WOA \cite{mirjalili2016whale}, GWO \cite{mirjalili2014grey}, HHO \cite{heidari2019harris}, and SMA \cite{ li2020slime}), and two representative improved MRFO variants (AMRFO \cite{jena2021maximum} and HYMRFO \cite{zhang2023manta}) are compared with MLMRFO. Table~\ref{tab3} presents the parameter settings for these algorithms. These values are consistent with those reported in the original papers and the source code of the corresponding algorithms. The experiments were conducted in 10-dimensional and 50-dimensional spaces, respectively. Additional experimental parameters are detailed in Section \ref{subsec4.1}.
\begin{table}[h]
	\caption{Parameter settings of different algorithms.}\label{tab3}%
	\begin{tabular}{@{}ll@{}}
		\toprule
		Algorithm & Parameter   \\
		\midrule
		MLMRFO&$Smin$=1.5, $Smax$=2.5, $LN$=3, $m$=4,$n$=5, $a$=0, $b$=1\\
		MRFO    &$S$=2  \\
		AMRFO    &$S$=2, $A$=3\\
		HYMRFO    &$S$=2\\
		SMA  &$z$=0.03\\   
		WOA    & $b$=1, $l$=[-1,1], $a$=[2 0] (linear reduction)\\
		GWO    & $a$=[2 0] (linear reduction)\\
		HHO &$\beta$=1.5, $E_{0}$=[-1,1]\\                                        
		\botrule
	\end{tabular}
\end{table}
\par To ascertain whether there is a notable difference between optimal results obtained by MLMRFO and other algorithms, we conducted a Wilcoxon rank-sum test \cite{derrac2011practical} on the outcomes produced by each algorithm. According to Wilcoxon rank-sum test theory, a $p$-value below 0.05 suggests a notable difference between MLMRFO and the compared algorithm. A state value of + indicates that MLMRFO performs better than the comparison algorithm, while a state value of - indicates that MLMRFO performs worse. A state of = indicates that MLMRFO has no significant differences from the comparison algorithm.
\par Table~\ref{tab4} illustrates the mean, standard deviation, mean ranking and $p$-value of the objective function values across each algorithm's performance on the CEC2017 test suite with $dim$ = 10. For functions F1, F3-F5, F8, F12-F20, F23, and F29-F30, MLMRFO exhibits the best average optimization results among the 8 algorithms, highlighting its robust optimization capabilities. For functions F11 and F26, MLMRFO's average optimization results are slightly inferior to SMA's and MRFO's, respectively, but its standard deviation is optimal among all eight algorithms, demonstrating MLMRFO's stability on these functions. Regarding functions F6 and F7, both MLMRFO's average optimization results and standard deviation are slightly behind SMA but superior to other algorithms, indicating MLMRFO's effectiveness on these functions. For the remaining eight functions, MLMRFO performs adequately. In terms of the average ranking of means, MLMRFO achieves an average ranking value of 1.93, securing the top position among the algorithms, significantly outperforming others. In Table~\ref{tab4}, MLMRFO has more + than = and - compared with the results of other comparison algorithms, indicating that MLMRFO is more competitive than other comparison algorithms in solving the 10-dimensional CEC2017 test function.
\par Tables~\ref{tab5} illustrates the mean, standard deviation, mean ranking and $p$-value of the objective function values across each algorithm's performance on the CEC2017 test suite with $dim$ = 50. For functions F1, F4, F6, F8-F9, F11-F16, F18, F21-F22, F28, and F30, MLMRFO demonstrates the best average optimization results among the 8 algorithms, highlighting its robust optimization capabilities. For function F17, although MLMRFO's average optimization result is slightly inferior to GWO's, its standard deviation on this function is optimal among all 8 algorithms, indicating the stability of MLMRFO. For functions F5 and F20, the average optimization results of MLMRFO are slightly lower than GWO's but superior to other algorithms. For function F3, both MLMRFO's average optimization results and standard deviation are slightly lower than SMA's but superior to other algorithms. Regarding function F19, MLMRFO's average optimization result and standard deviation are lower than HYMRFO's but superior to other algorithms, indicating MLMRFO's effectiveness on these functions. Among the remaining 8 functions, MLMRFO performs adequately. In terms of the average ranking of means, MLMRFO achieves an average ranking value of 2.03, placing it first among the algorithms, significantly outperforming others. In Table~\ref{tab5}, MLMRFO has more + than = and - compared with the results of other comparison algorithms, indicating that MLMRFO is more competitive than other comparison algorithms in solving the 50-dimensional CEC2017 test function.

\par To intuitively reflect the changing trends of function values, we selected representative functions from the CEC2017 test suite and plotted the convergence curves of different algorithms for these functions. Fig. \ref{fig:2} and Fig. \ref{fig:3} show the  MLMRFO's convergence curves alongside those of other algorithms for $dim$=10 and $dim$=50, respectively. 
\par According to Fig. \ref{fig:2}, for functions F1, F8, F16, F17, F18, F23, F29, and F30, MLMRFO converges quickly and exhibits better convergence accuracy than all other algorithms. For function F10, MLMRFO converges faster at the beginning, yet its final convergence accuracy is slightly worse than that of SMA and GWO but higher than that of all other algorithms.

\begin{figure}[H]
	\centering
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F1D10.jpg}
		\caption{F1}
		\label{fig:sub1}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F8D10.jpg}
		\caption{F8}
		\label{fig:sub2}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F10D10.jpg}
		\caption{F10}
		\label{fig:sub3}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F16D10.jpg}
		\caption{F16}
		\label{fig:sub4}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F17D10.jpg}
		\caption{F17}
		\label{fig:sub5}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F18D10.jpg}
		\caption{F18}
		\label{fig:sub6}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F23D10.jpg}
		\caption{F23}
		\label{fig:sub7}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F29D10.jpg}
		\caption{F29}
		\label{fig:sub8}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F30D10.jpg}
		\caption{F30}
		\label{fig:sub9}
	\end{subfigure}
	\caption{Convergence Curves of MLMRFO and Other Algorithms on CEC2017 ($dim$ = 10)}
	\label{fig:2}
\end{figure}

\par According to Fig. \ref{fig:3}, for functions F1, F9, F15, F16, F18, F21, F22, F28, MLMRFO converges more quickly and achieves better convergence accuracy compared to all other algorithms. For functions F5, MLMRFO starts to converge faster, yet its final convergence accuracy is slightly inferior to that of GWO but higher than that of all other algorithms.

\begin{figure}[H]
	\centering
	% ???
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F1D50.jpg}
		\caption{F1}
		\label{fig:sub1}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F5D50.jpg}
		\caption{F5}
		\label{fig:sub2}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F9D50.jpg}
		\caption{F9}
		\label{fig:sub3}
	\end{subfigure}
	% ???
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F15D50.jpg}
		\caption{F15}
		\label{fig:sub4}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F16D50.jpg}
		\caption{F16}
		\label{fig:sub5}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F18D50.jpg}
		\caption{F18}
		\label{fig:sub6}
	\end{subfigure}
	% ???
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F21D50.jpg}
		\caption{F21}
		\label{fig:sub7}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F22D50.jpg}
		\caption{F22}
		\label{fig:sub8}
	\end{subfigure}
	\begin{subfigure}{0.32\textwidth}
		\centering
		\includegraphics[width=\linewidth]{F28D50.jpg}
		\caption{F28}
		\label{fig:sub9}
	\end{subfigure}
	\caption{Convergence Curves of MLMRFO and Other Algorithms on CEC2017 ($dim$ = 50)}
	\label{fig:3}
\end{figure}
\par To summarize, in comparison with the other seven advanced optimization algorithms, MLMRFO converges quickly and ranks first in convergence accuracy on most functions under both $dim$=10 and $dim$=50 conditions. MLMRFO demonstrates significant effectiveness and superiority in enhancing the MRFO algorithm mechanism.
\section{Engineering design optimization}\label{sec5}
To assess the performance of MLMRFO in addressing engineering optimization design, this section applies MLMRFO to four engineering optimization design problems, namely the pressure vessel design problem \cite{singh2022mutation}, the tension spring design problem \cite{wang2024improved}, the three-bar truss design problem \cite{zhu2024dung}, and the speed reducer design problem \cite{nadimi2024multi}. The comparison algorithms selected in this section are the same as those in Section \ref{subsec4.3}. Each algorithm was independently executed 20 times, using a population size of 50 and a fixed number of 500 iterations.
\subsection{Pressure vessel design problem}\label{subsec10}
As illustrated in Fig. \ref{fig:4}, this problem aims to minimize the manufacturing costs associated with the vessel. The mathematical model for this problem is as follows:
\begin{flalign*}
	&Variables:x=\left[x_{1}, x_{2}, x_{3}, x_{4}\right]& \\
	&Minimize: f(x)=0.6224 x_{1} x_{3} x_{4}+1.7781 x_{2} x_{3}^{2}+3.1661 x_{1}^{2} x_{4}+19.84 x_{1}^{2} x_{3}&	\\
	&Subject \; to: g_{1}(x)=-x_{1}+0.0193 x_{3} \leq 0	& \\
	&\qquad \qquad \quad g_{2}(x)=s_{3}+0.00954 x_{3} \leq 0& \\
	&\qquad \qquad \quad g_{3}(x)=-\pi x_{3}^{2} x_{4}-\frac{4}{3} \pi x_{3}^{3}+1296000 \leq 0&	\\
	&\qquad \qquad \quad g_{4}(x)=x_{4}-240 \leq 0& \\
	&With bounds:0 \geq x_{1} \leq 99,\, 0 \geq x_{2} \leq 99,\, 10 \geq x_{3} \leq 200,\, 10 \geq x_{4} \leq 200&
\end{flalign*}	
\begin{figure}[H]
	\centering
	\includegraphics[width=0.7\linewidth]{pressvessel.jpg}
	\caption{Model of pressure vessel design problem model}
	\label{fig:4}
\end{figure}
\par To evaluate the efficacy of MLMRFO algorithm in comparison to other optimization techniques, the performance results of eight distinct algorithms were analyzed. The findings are detailed in Table~\ref{tab6} and Table~\ref{tab7}. As illustrated in Table~\ref{tab6}, MLMRFO achieves the first place in mean, optimum value, and standard deviation, indicating that it outperforms the other algorithms. This superior performance suggests that MLMRFO can be utilized in the design of pressure vessel.

\subsection{Tension spring design problem}\label{subsec10}
As illustrated in Fig. \ref{fig:5}, this problem aims to minimize the weight of the tension spring. The mathematical model for this problem is as follows:
\begin{flalign*}
	&Variables:x=\left[x_{1}, x_{2}, x_{3}\right]&\\
	&Minimize:f(x)=\left(x_{3}+2\right) x_{2} x_{1}^{2}&\\
	&Subject \; to:g_{1}(x)=1-\frac{x_{2}^{3} x_{3}}{71785 x_{1}^{4}} \leq 0&\\
	&\qquad \qquad \quad g_{2}(x)=\frac{4 x_{2}^{2}-x_{1} x_{2}}{12566\left(x_{2}	 	x_{1}^{3}-x_{1}^{4}\right)}+\frac{1}{5108 x_{1}^{2}} \leq 0& \\
	&\qquad \qquad \quad g_{3}(x)=1-\frac{140.45 x_{1}}{x_{2}^{2} x_{3}} \leq 0&\\
	& \qquad \qquad \quad g_{4}(x)=\frac{x_{1}+x_{2}}{1.5}-1 \leq 0&\\
	&With bounds:0.05 \leq x_{1} \leq 2.00, \, 0.25 \leq x_{2} \leq 1.30, \, 2.00   \leq x_{3} \leq 15.0&
\end{flalign*}	\par

\begin{figure}[H]
	\centering
	\includegraphics[width=0.7\linewidth]{SpringShow.jpg}
	\caption{Model of tension spring design problem model}
	\label{fig:5}
\end{figure}
\par To evaluate the efficacy of MLMRFO algorithm in comparison to other optimization techniques, the performance results of eight distinct algorithms were analyzed. The findings are detailed in Table~\ref{tab8} and Table~\ref{tab9}. As illustrated in Tables~\ref{tab9}, MLMRFO achieves the first place in mean, optimum value, and standard deviation, indicating that it outperforms the other algorithms. This superior performance suggests that MLMRFO can be utilized in the design of extension spring.
\subsection{Three-bar truss design problem}\label{subsec10}
As illustrated in Fig. \ref{fig:6}, this problem aims to minimize the the weight of the truss while satisfying the imposed constraints. The mathematical model for this problem is as follows:
\begin{flalign*}
	&Variables:{x}=\left[x_{1}, x_{2}, x_{3}\right]&\\
	&Minimize:f(x)= l\left(2 \sqrt{2} x_{1}+x_{2}\right) &\\
	&Subject \; to:g_{1}(x) = \frac{\sqrt{2} x_{1}+x_{2}}{\sqrt{2} x_{1}^{2}+2 x_{1} x_{2}} P-\sigma \leq 0&\\
	&\qquad \qquad \quad g_{2}(x) = \frac{x_{2}}{\left(\sqrt{2} x_{1}^{2}+2 x_{1} x_{2}\right) }P-\sigma \leq 0&\\
	&\qquad \qquad \quad g_{3}(x)  =  \frac{1}{\sqrt{2} x_{2}+x_{1}} P-\sigma \leq 0&\\
	&Where \;\quad l = 100, P = 2, \sigma = 2&\\
	&With bounds: 0\leq x_{1} \leq 1, 0 \leq x_{2} \leq 1& 
\end{flalign*}


\begin{figure}[H]
	\centering
	\includegraphics[width=0.7\linewidth]{ThreeBarTrussShow.jpg}
	\caption{Model of three-bar truss design problem model}
	\label{fig:6}
\end{figure}
\par To evaluate the efficacy of MLMRFO algorithm in comparison to other optimization techniques, the performance results of eight distinct algorithms were analyzed. The findings are detailed in Table~\ref{tab10} and Table~\ref{tab11}. As illustrated in Tables~\ref{tab10}, MLMRFO achieves the first place in mean and standard deviation. It is tied for the first place in optimal value with MRFO, indicating that it outperforms the other algorithms. This superior performance suggests that MLMRFO can be utilized in the design of three-bar truss.


\subsection{Speed reducer design problem}\label{subsec10}
As illustrated in Fig. \ref{fig:7}, this problem aims to identify the optimal configuration that minimises the mass of the speed reducer, subject to certain constraints. The mathematical model for this problem is as follows:
\begin{flalign*}
	&Variables:{x}=\left[x_{1}, x_{2}, x_{3},x_{4}, x_{5}, x_{6}, x_{7}\right]&\\
	&Minimize:f(x)= 0.7854 x_{2}^{2} x_{1}\left(14.9334 x_{3}-43.0934+3.3333 x_{3}^{2}\right)+0.7854\left(x_{5} x_{7}^{2}+x_{4} x_{6}^{2}\right) \\& -1.508 x_{1}\left(x_{7}^{2}+x_{6}^{2}\right)+7.477\left(x_{7}^{3}+x_{6}^{3}\right)&\\
	&Subject \; to:g_{1}(x) =  x1x_{2}^{2}x3 + 27\leq0&\\
	&\qquad \qquad \quad g_{2}(x)=-x_{1} x_{2}^{2} x_{3}^{2}+397.5 \leqslant 0 &\\
	&\qquad \qquad \quad g_{3}(x)=-x_{2} x_{6}^{4} x_{3} x_{4}^{-3}+1.93\leqslant 0 &\\
	&\qquad \qquad \quad g_{4}(x)=-x_{2} x_{7}^{4} x_{3} x_{5}^{-3}+1.93 \leqslant 0&\\
	&\qquad \qquad \quad g_{5}(x)=10 x_{6}^{-4} \sqrt{16.91 \times 10^{6}+\left(745 x_{4} x_{2}^{-1} x_{3}^{-1}\right)^{2}}-1100 \leqslant 0  &\\
	&\qquad \qquad \quad g_{6}(x)=10 x_{7}^{-3} \sqrt{157.5 \times 10^{6}+\left(745 x_{5} x_{2}^{-1} x_{3}^{-1}\right)^{2}}-850 \leqslant 0&\\
	&\qquad \qquad \quad g_{7}(x)=x_{2} x_{3}-40 \leqslant 0 &\\
	&\qquad \qquad \quad g_{8}(x)=-x_{1} x_{2}^{-1}+5 \leqslant 0 &\\
	&\qquad \qquad \quad g_{9}(x)=x_{1} x_{2}^{-1}-12 \leqslant 0 &\\
	&\qquad \qquad \quad g_{10}(x)=1.5 x_{6}-x_{4}+1.9 \leqslant 0 &\\
	&\qquad \qquad \quad g_{11}(x)=1.1 x_{7}-x_{5}+1.9 \leqslant 0&\\
	&With bounds:2.6 \leq x_{1} \leq 3.6,0.7 \leq x_{2} \leq 0.8,17 \leq x_{3} \leq 28&\\
	&\qquad \qquad \quad\quad7.3 \leq x_{4},x_{5}\leq 8.3,2.9 \leq x_{6} \leq 3.9,5\leq x_{7} \leq 5.5& 
\end{flalign*}


\begin{figure}[H]
	\centering
	\includegraphics[width=0.7\linewidth]{SpeedShow.jpg}
	\caption{Model of Speed reducer design problem model}
	\label{fig:7}
\end{figure}
\par To evaluate the efficacy of MLMRFO algorithm in comparison to other optimization techniques, the performance results of eight distinct algorithms were analyzed. The findings are detailed in Table~\ref{tab12} and Table~\ref{tab13}. As illustrated in Table~\ref{tab12}, MLMRFO achieves the first place in mean and standard deviation, and it is tied with HYMRFO for the first place in optimal value, indicating that it outperforms the other algorithms. This superior performance suggests that MLMRFO can be utilized in the design of speed reducer.
\section{Conclusion and Future Works}\label{sec16}
This paper presents a multi-strategy improved Manta Ray Foraging Optimization (MLMRFO) for constrained engineering design problems, integrating three effective optimization strategies. The adaptive somersault factor enhances the balance between MLMRFO's global and local search abilities, mitigating issues of premature and slow convergence. The multi-leader mechanism with an improved memory structure enables the algorithm to fully leverage information from multiple dominant individuals, helping to avoid local optima. The Laplace crossover operator facilitates the combination of diverse parent features and the exploration of new solutions, aiding in escaping local optima.
\par Performance testing on the CEC2017 test suite demonstrates that MLMRFO outperforms other algorithms in terms of convergence speed and solution accuracy. The ablation experiment shows that each strategy adopted in MLMRFO improves the performance of MRFO. MLMRFO, which incorporates all three improvement strategies, exhibits the most significant performance enhancement. Additionally, MLMRFO's high solution accuracy on four engineering optimization problems confirms its effectiveness in practical engineering applications.
\par In future research, we plan to apply MLMRFO to solve various real-world problems, such as feature selection, flexible job shop scheduling, and resource allocation in wireless networks.

\section*{Acknowledgments}
This work was supported by the Zhejiang Provincial Natural Science Foundation of China under Grant No. LY22F020019, Public-welfare Technology Application Research of Zhejiang Province in China under Grant LGG22F020032, the Zhejiang Science and Technology Plan Project under Grant No. 2022C01045, and the National Natural Science Foundation of China under Grants (No. 61972359 and 62132014).
\section*{Data Availability}
The datasets used during the current study are available from the corresponding author upon reasonable request.
\section*{Declarations}
\textbf{Conflict of Interest} The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper.

\section*{Appendix}\label{secA1}
See Tables \ref{tab4},\ref{tab5},\ref{tab6},\ref{tab7},\ref{tab8},\ref{tab9},\ref{tab10},\ref{tab11},\ref{tab12},\ref{tab13}.
\small
\begin{landscape}
	\begin{longtable}[htp]{p{1cm}p{1.2cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}}
		\caption{Results of MLMRFO and other optimization algorithms running 20 times on CEC2017 ($dim$=10)}\label{tab4}\\
		\hline		
		Function&Metric&MLMRFO&MRFO&AMRFO&HYMRFO&WOA&GWO&SMA&HHO\\
		\hline
		\endfirsthead % ????????????????????		
		\caption{continue} \\
		\hline
		Function&Metric&MLMRFO&MRFO&AMRFO&HYMRFO&WOA&GWO&SMA&HHO\\
		\hline
		\endhead
		\hline
		\endfoot
		% ????????????????????????
		\hline
		\endlastfoot
		F1&Ave&\textbf{2.2492E+02}&1.8861E+03&1.9969E+03&1.5437E+03&1.2193E+05&4.6719E+07&8.7781E+03&2.0832E+05\\
		&Std&\textbf{1.6982E+02}&1.9497E+03&1.9604E+03&1.5250E+03&2.6274E+05&1.3255E+08&4.6145E+03&1.3276E+05\\
		&Rank&1&3&4&2&6&8&5&7\\
		&	$p$-value	&&	2.3557E-06	&	8.2924E-05	&	6.8682E-04	&	6.7956E-08	&	6.7956E-08	&	4.5390E-07	&	6.7956E-08	\\
		F3&Ave&\textbf{3.0000E+02}&\textbf{3.0000E+02}&\textbf{3.0000E+02}&\textbf{3.0000E+02}&4.2710E+02&8.9782E+02&\textbf{3.0000E+02}&3.0064E+02\\
		&Std&\textbf{1.8442E-14}&2.2587E-14&2.2587E-14&2.9160E-14&1.2328E+02&1.1453E+03&9.6112E-05&3.2593E-01\\
		&Rank&1&1&1&1&7&8&1&6\\
		&	$p$-value	&&	6.5381E-01	&	6.5381E-01	&	2.2546E-01	&	1.5124E-08	&	1.5124E-08	&	1.5124E-08	&	1.5124E-08	\\
		F4&Ave&\textbf{4.0000E+02}&\textbf{4.0000E+02}&\textbf{4.0000E+02}&\textbf{4.0000E+02}&4.2624E+02&4.1609E+02&4.0815E+02&4.0762E+02\\
		&Std&\textbf{1.7202E-12}&1.1740E-03&1.0863E-03&1.0153E-03&4.0602E+01&2.1126E+01&1.5577E+01&1.5308E+01\\
		&Rank&1&1&1&1&8&7&6&5\\
		&	$p$-value	&&	6.6909E-08	&	6.6909E-08	&	6.6909E-08	&	6.6909E-08	&	6.6909E-08	&	6.6909E-08	&	6.6909E-08	\\
		F5&Ave&\textbf{5.1075E+02}&5.1985E+02&5.2234E+02&5.2139E+02&5.4634E+02&5.1319E+02&5.1329E+02&5.3729E+02\\
		&Std&5.0071E+00&1.1752E+01&1.2115E+01&8.8904E+00&1.4665E+01&6.6655E+00&\textbf{4.6975E+00}&1.4155E+01\\
		&Rank&1&4&6&5&8&2&3&7\\
		&	$p$-value	&&	1.8440E-03	&	1.7662E-04	&	1.3532E-04	&	6.7193E-08	&	2.9750E-01	&	2.7425E-02	&	3.3812E-07	\\
		F6&Ave&\textbf{6.0003E+02}&6.0053E+02&6.0134E+02&6.0132E+02&6.3011E+02&6.0050E+02&6.0003E+02&6.2586E+02\\
		&Std&1.4422E-01&1.0068E+00&2.6109E+00&3.2749E+00&1.3623E+01&8.6157E-01&\textbf{1.1033E-02}&1.0102E+01\\
		&Rank&1&4&6&5&8&3&1&7\\
		&	$p$-value	&&	6.7782E-03	&	1.1471E-04	&	2.2132E-02	&	6.6626E-08	&	5.9135E-07	&	1.1817E-06	&	6.6626E-08	\\
		F7&Ave&7.2295E+02&7.4403E+02&7.4963E+02&7.3831E+02&7.9063E+02&7.2604E+02&\textbf{7.2006E+02}&7.7303E+02\\
		&Std&7.4188E+00&1.4015E+01&1.8789E+01&1.0248E+01&2.9429E+01&9.0375E+00&\textbf{4.5177E+00}&2.1142E+01\\
		&Rank&2&5&6&4&8&3&1&7\\
		&	$p$-value	&&	8.5974E-06	&	2.3557E-06	&	2.0407E-05	&	6.7956E-08	&	2.6162E-01	&	2.3932E-01	&	1.6571E-07	\\
		F8&Ave&\textbf{8.0826E+02}&8.2244E+02&8.2055E+02&8.2099E+02&8.4409E+02&8.1261E+02&8.1279E+02&8.3033E+02\\
		&Std&\textbf{3.6105E+00}&8.1694E+00&7.8560E+00&8.3300E+00&1.3769E+01&4.4832E+00&6.2032E+00&7.8427E+00\\
		&Rank&1&6&4&5&8&2&3&7\\
		&	$p$-value	&&	7.2920E-07	&	2.0144E-06	&	1.2536E-06	&	6.6815E-08	&	2.3280E-03	&	2.3280E-03	&	6.6815E-08	\\
		F9&Ave&9.0194E+02&9.0206E+02&9.0482E+02&9.0135E+02&1.2798E+03&9.0563E+02&\textbf{9.0000E+02}&1.3454E+03\\
		&Std&3.3252E+00&4.9192E+00&8.1718E+00&2.0394E+00&2.7909E+02&1.3698E+01&\textbf{1.0399E-04}&2.6633E+02\\
		&Rank&3&4&5&2&7&6&1&8\\
		&	$p$-value	&&	9.3390E-01	&	7.2692E-02	&	7.4230E-01	&	6.3943E-08	&	3.6387E-01	&	3.1175E-02	&	1.1644E-07	\\
		F10&Ave&1.5230E+03&1.7735E+03&1.6594E+03&1.7714E+03&1.9866E+03&\textbf{1.4704E+03}&1.5165E+03&1.8794E+03\\
		&Std&2.7009E+02&2.6882E+02&2.1995E+02&2.2891E+02&3.1504E+02&2.2183E+02&\textbf{9.8787E+01}&2.5719E+02\\
		&Rank&3&6&4&5&8&1&2&7\\
		&	$p$-value	&&	9.7865E-03	&	2.7483E-02	&	2.5606E-03	&	4.1658E-05	&	8.6043E-01	&	2.3932E-01	&	8.3572E-04	\\
		F11&Ave&1.1073E+03&1.1088E+03&1.1109E+03&1.1115E+03&1.2044E+03&1.1366E+03&\textbf{1.1072E+03}&1.1497E+03\\
		&Std&\textbf{3.2928E+00}&5.2215E+00&7.5275E+00&1.3067E+01&9.4530E+01&3.4246E+01&3.8124E+00&4.9818E+01\\
		&Rank&2&3&4&5&8&6&1&7\\
		&	$p$-value	&&	3.9410E-01	&	1.4779E-01	&	3.6477E-01	&	7.8650E-08	&	9.1348E-08	&	7.3523E-01	&	9.1348E-08	\\
		F12&Ave&\textbf{6.3359E+03}&1.4351E+04&1.2008E+04&9.4565E+03&3.3815E+06&4.2954E+05&4.3433E+04&6.8989E+05\\
		&Std&\textbf{4.1681E+03}&1.3428E+04&1.1314E+04&7.6235E+03&4.1105E+06&6.3345E+05&1.6052E+04&7.1609E+05\\
		&Rank&1&4&3&2&8&6&5&7\\
		&	$p$-value	&&	1.2345E-02	&	4.0936E-01	&	3.2348E-01	&	5.2269E-07	&	3.9388E-07	&	1.0473E-06	&	6.7956E-08	\\
		F13&Ave&\textbf{1.3823E+03}&1.5395E+03&1.5336E+03&1.5218E+03&1.2564E+04&9.3190E+03&9.8823E+03&1.6184E+04\\
		&Std&\textbf{7.9691E+01}&1.7621E+02&1.7435E+02&1.1232E+02&1.1038E+04&4.6311E+03&1.0908E+04&9.8111E+03\\
		&Rank&1&4&3&2&7&5&6&8\\
		&	$p$-value	&&	3.0480E-04	&	2.3413E-03	&	1.0373E-04	&	9.1728E-08	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	\\
		F14&Ave&\textbf{1.4163E+03}&1.4195E+03&1.4184E+03&1.4231E+03&1.5117E+03&2.5785E+03&1.4340E+03&1.4989E+03\\
		&Std&\textbf{1.0281E+01}&1.0996E+01&1.0960E+01&1.4510E+01&3.2648E+01&1.6300E+03&1.0339E+01&2.4957E+01\\
		&Rank&1&3&2&4&7&8&5&6\\
		&	$p$-value	&&	3.7933E-01	&	5.9786E-01	&	1.2643E-01	&	6.7956E-08	&	6.7956E-08	&	2.9249E-05	&	1.4309E-07	\\
		F15&Ave&\textbf{1.5082E+03}&1.5133E+03&1.5107E+03&1.5116E+03&2.8759E+03&3.1138E+03&1.5155E+03&1.7511E+03\\
		&Std&\textbf{5.0398E+00}&1.0010E+01&6.9551E+00&7.7239E+00&1.2532E+03&1.7944E+03&9.7949E+00&2.4387E+02\\
		&Rank&1&4&2&3&7&8&5&6\\
		&	$p$-value	&&	9.0907E-02	&	3.2348E-01	&	2.1841E-01	&	6.7956E-08	&	6.7956E-08	&	7.7118E-03	&	6.7956E-08	\\
		F16&Ave&\textbf{1.6369E+03}&1.7272E+03&1.6607E+03&1.7071E+03&1.7828E+03&1.7344E+03&1.6793E+03&1.8353E+03\\
		&Std&\textbf{6.8557E+01}&1.1118E+02&7.2592E+01&1.1082E+02&1.3911E+02&1.0871E+02&9.2237E+01&1.4529E+02\\
		&Rank&1&5&2&4&7&6&3&8\\
		&	$p$-value	&&	3.0566E-03	&	9.4608E-01	&	4.1124E-02	&	4.1658E-05	&	4.1658E-05	&	6.8682E-04	&	1.5757E-06	\\
		F17&Ave&\textbf{1.7224E+03}&1.7462E+03&1.7388E+03&1.7378E+03&1.7946E+03&1.7455E+03&1.7294E+03&1.7715E+03\\
		&Std&\textbf{1.4256E+01}&2.2936E+01&2.1691E+01&1.9623E+01&5.3291E+01&1.7729E+01&1.6440E+01&3.8182E+01\\
		&Rank&1&6&4&3&8&5&2&7\\
		&	$p$-value	&&	9.6593E-04	&	1.0581E-02	&	1.2345E-02	&	5.2269E-07	&	1.7936E-04	&	1.3328E-01	&	1.8030E-06	\\
		F18&Ave&\textbf{1.8344E+03}&3.7854E+03&3.6477E+03&3.1588E+03&1.8903E+04&2.8191E+04&2.2947E+04&1.3729E+04\\
		&Std&\textbf{2.4469E+01}&2.9266E+03&3.3101E+03&2.5512E+03&1.1531E+04&1.7622E+04&1.1859E+04&1.0606E+04\\
		&Rank&1&4&3&2&6&8&7&5\\
		&	$p$-value	&&	1.0646E-07	&	6.7956E-08	&	7.8980E-08	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	\\
		F19&Ave&\textbf{1.9082E+03}&1.9122E+03&1.9135E+03&1.9103E+03&1.8598E+04&1.8826E+04&1.9736E+03&8.4924E+03\\
		&Std&8.8146E+00&\textbf{4.9616E+00}&5.6921E+00&5.7762E+00&1.4016E+04&5.8365E+04&2.0310E+02&6.8248E+03\\
		&Rank&1&3&4&2&7&8&5&6\\
		&	$p$-value	&&	5.5605E-03	&	1.6253E-03	&	2.9441E-02	&	6.7956E-08	&	6.0148E-07	&	1.6360E-01	&	6.7956E-08	\\
		F20&Ave&\textbf{2.0054E+03}&2.0171E+03&2.0172E+03&2.0149E+03&2.1445E+03&2.0863E+03&2.0193E+03&2.1414E+03\\
		&Std&\textbf{5.7619E+00}&1.3722E+01&1.3623E+01&1.1076E+01&7.3379E+01&5.6746E+01&9.0109E+00&7.0082E+01\\
		&Rank&1&3&4&2&8&6&5&7\\
		&	$p$-value	&&	1.0004E-02	&	8.2516E-03	&	8.5938E-03	&	6.5690E-08	&	6.5690E-08	&	2.8649E-05	&	6.5690E-08	\\
		F21&Ave&2.2004E+03&\textbf{2.2000E+03}&2.2007E+03&2.2001E+03&2.2925E+03&2.2972E+03&2.2952E+03&2.3187E+03\\
		&Std&8.6621E-01&\textbf{4.3015E-13}&1.1867E+00&5.8994E-01&7.0658E+01&4.1650E+01&4.7844E+01&6.3983E+01\\
		&Rank&3&1&4&2&5&7&6&8\\
		&	$p$-value	&&	4.5254E-03	&	2.7213E-04	&	1.4319E-03	&	2.9504E-08	&	1.1952E-07	&	2.9504E-08	&	1.8797E-07	\\
		F22&Ave&2.3012E+03&\textbf{2.2984E+03}&2.2999E+03&2.2996E+03&2.3021E+03&2.3067E+03&2.3987E+03&2.3132E+03\\
		&Std&\textbf{6.3657E-01}&1.8463E+01&9.3530E+00&1.1829E+01&3.4050E+01&7.7144E+00&3.2843E+02&4.0096E+00\\
		&Rank&4&1&3&2&5&6&8&7\\
		&	$p$-value	&&	9.0454E-03	&	1.1986E-01	&	1.9292E-02	&	1.6098E-04	&	1.9971E-04	&	5.1658E-06	&	6.7956E-08	\\
		F23&Ave&\textbf{2.6136E+03}&2.6223E+03&2.6219E+03&2.6177E+03&2.6462E+03&2.6168E+03&2.6187E+03&2.6669E+03\\
		&Std&5.1808E+00&8.8849E+00&1.0312E+01&\textbf{5.1525E+00}&1.9948E+01&8.0926E+00&5.2450E+00&2.3855E+01\\
		&Rank&1&6&5&3&7&2&4&8\\
		&	$p$-value	&&	2.1393E-03	&	7.1135E-03	&	2.2270E-02	&	2.3557E-06	&	2.5030E-01	&	6.5572E-03	&	6.7956E-08	\\
		F24&Ave&2.6078E+03&2.5791E+03&\textbf{2.5610E+03}&2.5656E+03&2.7734E+03&2.7401E+03&2.7565E+03&2.7130E+03\\
		&Std&1.2230E+02&1.1495E+02&1.0835E+02&1.0699E+02&1.3621E+01&8.4434E+00&\textbf{7.5853E+00}&1.2706E+02\\
		&Rank&4&3&1&2&8&6&7&5\\
		&	$p$-value	&&	3.5768E-01	&	3.1272E-01	&	4.8526E-01	&	5.3656E-08	&	2.1625E-03	&	1.1427E-07	&	2.5316E-05	\\
		F25&Ave&2.9321E+03&2.9221E+03&2.9289E+03&2.9287E+03&\textbf{2.9059E+03}&2.9277E+03&2.9268E+03&2.9273E+03\\
		&Std&2.2901E+01&2.3774E+01&2.2496E+01&2.2664E+01&9.1187E+01&\textbf{1.6864E+01}&2.3880E+01&2.3226E+01\\
		&Rank&8&2&7&6&1&5&3&4\\
		&	$p$-value	&&	1.2636E-01	&	2.3921E-01	&	1.6766E-01	&	1.6358E-01	&	1.0171E-01	&	4.4073E-01	&	4.5693E-01	\\
		F26&Ave&2.9174E+03&\textbf{2.9173E+03}&2.9289E+03&2.9250E+03&3.2411E+03&3.0006E+03&3.1745E+03&3.4078E+03\\
		&Std&\textbf{4.6157E+01}&1.0620E+02&1.2434E+02&5.6721E+01&4.4496E+02&2.9382E+02&4.0256E+02&4.6548E+02\\
		&Rank&2&1&4&3&7&5&6&8\\
		&	$p$-value	&&	2.9675E-01	&	1.6951E-01	&	8.9622E-02	&	4.7487E-05	&	2.6396E-04	&	6.3327E-06	&	2.0138E-05	\\
		F27&Ave&3.1009E+03&3.0995E+03&3.1013E+03&3.1045E+03&3.1262E+03&3.0950E+03&\textbf{3.0901E+03}&3.1586E+03\\
		&Std&6.8657E+00&5.1363E+00&6.0459E+00&1.1160E+01&3.1902E+01&7.6779E+00&\textbf{1.3475E+00}&4.0719E+01\\
		&Rank&4&3&5&6&7&2&1&8\\
		&	$p$-value	&&	9.8921E-01	&	6.1676E-01	&	1.8056E-01	&	1.3479E-03	&	1.3745E-06	&	6.7860E-08	&	1.5739E-06	\\
		F28&Ave&3.1683E+03&\textbf{3.1579E+03}&3.1893E+03&3.1830E+03&3.3041E+03&3.3763E+03&3.1658E+03&3.3752E+03\\
		&Std&1.1596E+02&1.1567E+02&1.4023E+02&1.2657E+02&1.5313E+02&\textbf{8.4525E+01}&1.2459E+02&1.3409E+02\\
		&Rank&3&1&5&4&6&8&2&7\\
		&	$p$-value	&&	2.8490E-01	&	5.5862E-02	&	1.3802E-01	&	5.7470E-05	&	6.0164E-07	&	5.4569E-03	&	1.6094E-06	\\
		F29&Ave&\textbf{3.1740E+03}&3.1939E+03&3.1825E+03&3.2011E+03&3.2560E+03&3.1920E+03&3.1760E+03&3.3085E+03\\
		&Std&2.3641E+01&4.0466E+01&\textbf{2.0376E+01}&3.1279E+01&7.2254E+01&4.5173E+01&4.9356E+01&6.3179E+01\\
		&Rank&1&5&3&6&7&4&2&8\\
		&	$p$-value	&&	6.7868E-02	&	1.3328E-01	&	6.0403E-03	&	9.2780E-05	&	2.9768E-01	&	4.4075E-01	&	2.9598E-07	\\
		F30&Ave&\textbf{3.5984E+03}&1.3343E+05&2.9239E+05&5.9169E+03&3.2470E+05&6.1737E+05&4.7333E+04&7.6139E+05\\
		&Std&\textbf{1.3230E+02}&2.9796E+05&3.9954E+05&1.8309E+03&4.0260E+05&9.4619E+05&1.8220E+05&1.3075E+06\\
		&Rank&1&4&5&2&6&7&3&8\\
		&	$p$-value	&&	2.5629E-07	&	1.4309E-07	&	6.0148E-07	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	\\
		\hline\\
		Average &rank&\textbf{1.93}&3.45&3.79&3.28&6.90&5.45&3.76&6.86\\
		+/=/-&&&17/12/0&16/13/0&16/13/0&29/0/0&14/11/4&12/11/6&29/0/0\\	
		%\hline				
	\end{longtable}
\end{landscape}
\normalsize
\small
\begin{landscape}
	\begin{longtable}[h]{p{1cm}p{1.2cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}p{1.8cm}}
		\caption{Results of MLMRFO and other optimization algorithms running 20 times on CEC2017 ($dim$=50)}\label{tab5}\\
		%\caption{????????????} \\ % ??????
		\hline
		Function&Metric&MLMRFO&MRFO&AMRFO&HYMRFO&WOA&GWO&SMA&HHO\\
		\hline
		\endfirsthead % ????????????????????
		%\caption[]{???????????????} \\ % ??????????[]?????????
		\caption{continue} \\
		\hline
		Function&Metric&MLMRFO&MRFO&AMRFO&HYMRFO&WOA&GWO&SMA&HHO\\
		\hline
		\endhead
		\hline
		\endfoot
		\hline
		\endlastfoot	
		%\caption{Results of MULXMRFO and other optimization algorithms running 20 times on cec2017(dim=10)}\label{tab4}%
		
		%\hline
		%\endhead
		%\begin{tabular}{@{}lllllllll@{}}
		%\toprule
		%Function&MULXMRFO&MRFO&AMRFO&HYMRFO&WOA&GWO&SMA&HHO\\
		%\midrule
		F1&Ave&\textbf{1.7144E+03}&3.9143E+03&5.3349E+03&7.0393E+03&1.8202E+08&6.6454E+09&1.3095E+04&5.2581E+07\\
		&Std&\textbf{1.7999E+03}&6.6177E+03&4.8715E+03&7.9351E+03&8.3900E+07&1.6734E+09&7.7235E+03&	1.0020E+07\\
		&Rank&1&2&3&4&7&8&5&6\\
		&	$p$-value	&&	3.7933E-01	&	5.5605E-03	&	2.2270E-02	&	6.7956E-08	&	6.7956E-08	&	4.5390E-07	&	6.7956E-08	\\
		F3&Ave&2.1579E+03&2.2974E+04&2.5218E+04&1.5428E+04&1.5283E+05&8.9230E+04&\textbf{1.0170E+03}&3.3536E+04\\
		&Std&2.4395E+03&5.7464E+03&8.7641E+03&5.4314E+03&5.5536E+04&1.4045E+04&\textbf{5.5165E+02}&	9.8939E+03\\
		&Rank&2&4&5&3&8&7&1&6\\
		&	$p$-value	&&	6.7956E-08	&	6.7956E-08	&	1.6571E-07	&	6.7956E-08	&	6.7956E-08	&	2.9441E-02	&	6.7956E-08	\\
		F4&Ave&\textbf{4.9979E+02}&5.0769E+02&5.0556E+02&5.0158E+02&8.3299E+02&1.1259E+03&5.7733E+02&	6.4264E+02\\
		&Std&4.6531E+01&5.3496E+01&5.7759E+01&4.8335E+01&8.9613E+01&3.4571E+02&\textbf{4.3551E+01}&	5.0227E+01\\
		&Rank&1&4&3&2&7&8&5&6\\
		&	$p$-value	&&	7.1498E-01	&	9.8921E-01	&	7.7639E-01	&	6.7956E-08	&	6.7956E-08	&	2.0407E-05	&	1.0646E-07	\\
		F5&Ave&7.2516E+02&8.1669E+02&8.1072E+02&8.1371E+02&9.7005E+02&\textbf{7.1193E+02}&7.3217E+02&	8.7785E+02\\
		&Std&4.4496E+01&6.0630E+01&4.9572E+01&4.4460E+01&7.9921E+01&3.9269E+01&6.3357E+01&	\textbf{2.7614E+01}\\
		&Rank&2&6&4&5&8&1&3&7\\
		&	$p$-value	&&	3.7051E-05	&	2.9249E-05	&	4.5401E-06	&	6.7956E-08	&	2.8530E-01	&	5.2499E-01	&	6.7956E-08	\\
		F6&Ave&\textbf{6.1382E+02}&6.4080E+02&6.4532E+02&6.3992E+02&6.7968E+02&6.1698E+02&6.1415E+02&	6.7023E+02\\
		&Std&8.4391E+00&9.7276E+00&7.9069E+00&1.2319E+01&9.9349E+00&\textbf{4.4365E+00}&1.1999E+01&	5.0270E+00\\
		&Rank&1&5&6&4&8&3&2&7\\
		&	$p$-value	&&	2.9598E-07	&	1.0646E-07	&	5.2269E-07	&	6.7956E-08	&	6.3892E-02	&	6.7501E-01	&	6.7956E-08	\\
		F7&Ave&1.0743E+03&1.4522E+03&1.4595E+03&1.4051E+03&1.7495E+03&1.0547E+03&\textbf{1.0143E+03}&	1.7820E+03\\
		&Std&6.6444E+01&1.7168E+02&1.6257E+02&2.0095E+02&8.8215E+01&\textbf{4.7351E+01}&4.7394E+01&	7.5792E+01\\
		&Rank&3&5&6&4&7&2&1&8\\
		&	$p$-value	&&	6.0148E-07	&	7.8980E-08	&	6.9166E-07	&	6.7956E-08	&	4.0936E-01	&	7.7118E-03	&	6.7956E-08	\\
		F8&Ave&\textbf{1.0053E+03}&1.1248E+03&1.1399E+03&1.1336E+03&1.2605E+03&1.0271E+03&1.0182E+03&	1.1702E+03\\
		&Std&3.6454E+01&4.6696E+01&5.7835E+01&5.4287E+01&6.6936E+01&5.4372E+01&4.3371E+01&	\textbf{2.8162E+01}\\
		&Rank&1&4&6&5&8&3&2&7\\
		&	$p$-value	&&	1.9177E-07	&	3.4156E-07	&	6.0148E-07	&	6.7956E-08	&	2.6162E-01	&	4.4075E-01	&	6.7956E-08	\\
		F9&Ave&\textbf{4.9965E+03}&9.8123E+03&9.4695E+03&1.0156E+04&2.3262E+04&6.6176E+03&1.1499E+04&	2.2264E+04\\
		&Std&\textbf{1.5828E+03}&2.7271E+03&2.5297E+03&2.4471E+03&7.1553E+03&3.2017E+03&4.8164E+03&	2.8106E+03\\
		&Rank&1&4&3&5&8&2&6&7\\
		&	$p$-value	&&	1.0473E-06	&	2.0616E-06	&	2.6898E-06	&	6.7956E-08	&	5.6517E-02	&	9.7480E-06	&	6.7956E-08	\\
		F10&Ave&7.3621E+03&\textbf{7.0517E+03}&7.6061E+03&7.4546E+03&1.0876E+04&7.1036E+03&7.1532E+03&8.7614E+03\\
		&Std&\textbf{6.7096E+02}&7.8258E+02&8.5378E+02&7.8863E+02&1.0229E+03&9.9526E+02&8.7342E+02&	1.1646E+03\\
		&Rank&4&1&6&5&8&2&3&7\\
		&	$p$-value	&&	2.6162E-01	&	3.6484E-01	&	8.3923E-01	&	6.7956E-08	&	3.5070E-01	&	7.9720E-01	&	1.4438E-04	\\
		F11&Ave&\textbf{1.2422E+03}&1.2477E+03&1.2740E+03&1.2441E+03&1.9270E+03&4.8452E+03&1.3920E+03&	1.4577E+03\\
		&Std&2.7433E+01&3.7659E+01&4.3101E+01&\textbf{2.6470E+01}&1.8557E+02&1.8679E+03&7.8329E+01&	1.0413E+02\\
		&Rank&1&3&4&2&7&8&5&6\\
		&	$p$-value	&&	4.7348E-01	&	1.7939E-02	&	7.9720E-01	&	6.7956E-08	&	6.7956E-08	&	2.2178E-07	&	6.7956E-08	\\
		F12&Ave&\textbf{1.4460E+05}&4.8279E+05&6.4713E+05&3.8407E+05&3.6232E+08&5.8365E+08&9.2787E+06&	8.4101E+07\\
		&Std&\textbf{8.6272E+04}&2.8885E+05&3.0766E+05&1.9992E+05&2.0408E+08&4.6111E+08&4.7729E+06&	4.2051E+07\\
		&Rank&1&4&2&3&7&8&5&6\\
		&	$p$-value	&&	1.2505E-05	&	5.2269E-07	&	3.2931E-05	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	\\
		F13&Ave&\textbf{3.9935E+03}&5.2368E+03&6.7743E+03&5.9268E+03&1.2037E+06&2.5520E+08&3.5819E+04&2.0676E+06\\
		&Std&\textbf{3.3557E+03}&4.5501E+03&6.4952E+03&5.5690E+03&8.4690E+05&5.8704E+08&1.3352E+04&1.0020E+06\\
		&Rank&1&2&4&3&6&8&5&7\\
		&	$p$-value	&&	2.3932E-01	&	1.8954E-01	&	5.0751E-01	&	6.7956E-08	&	6.7956E-08	&	1.0646E-07	&	6.7956E-08	\\
		F14&Ave&\textbf{4.3955E+03}&2.4313E+04&3.2177E+04&2.3605E+04&1.8456E+06&5.9098E+05&1.6274E+05&5.9922E+05\\
		&Std&\textbf{1.9778E+03}&1.4476E+04&1.9727E+04&2.0500E+04&1.0887E+06&5.3681E+05&1.4394E+05&	5.1557E+05\\
		&Rank&1&3&4&2&8&6&5&7\\
		&	$p$-value	&&	3.0691E-06	&	3.9388E-07	&	1.5757E-06	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	\\
		F15&Ave&\textbf{5.5320E+03}&9.8318E+03&9.4128E+03&7.8794E+03&2.8112E+05&2.8047E+07&2.6045E+04&3.5369E+05\\
		&Std&\textbf{5.4010E+03}&5.7138E+03&7.5238E+03&6.2318E+03&4.3049E+05&9.4971E+07&9.7988E+03&	1.4623E+05\\
		&Rank&1&4&3&2&6&8&5&7\\
		&	$p$-value	&&	1.7939E-02	&	1.1355E-01	&	2.7329E-01	&	6.7956E-08	&	6.7956E-08	&	9.1266E-07	&	6.7956E-08	\\
		F16&Ave&\textbf{2.8842E+03}&3.4744E+03&3.2785E+03&3.4127E+03&5.1504E+03&3.0399E+03&3.2885E+03&3.9893E+03\\
		&Std&4.3797E+02&5.1539E+02&3.5942E+02&3.3116E+02&7.0078E+02&3.4486E+02&\textbf{3.2652E+02}&	5.9737E+02\\
		&Rank&1&6&3&5&8&2&4&7\\
		&	$p$-value	&&	4.1550E-04	&	7.1135E-03	&	3.7499E-04	&	6.7956E-08	&	1.8954E-01	&	4.7025E-03	&	1.5757E-06	\\
		F17&Ave&2.9790E+03&3.3723E+03&3.1899E+03&3.3798E+03&4.1358E+03&\textbf{2.7824E+03}&3.0767E+03&3.5281E+03\\
		&Std&\textbf{2.3229E+02}&3.1457E+02&3.3337E+02&3.6665E+02&5.1785E+02&3.0701E+02&3.0370E+02&	3.8828E+02\\
		&Rank&2&5&4&6&8&1&3&7\\
		&	$p$-value	&&	2.4706E-04	&	1.0173E-01	&	3.3819E-04	&	2.2178E-07	&	1.6669E-02	&	2.5030E-01	&	4.6804E-05	\\
		F18&Ave&\textbf{4.5362E+04}&2.4366E+05&2.2819E+05&1.6070E+05&9.3620E+06&6.0957E+06&1.1972E+06&1.6657E+06\\
		&Std&\textbf{2.3777E+04}&1.6593E+05&1.2899E+05&8.7496E+04&7.3163E+06&7.7527E+06&8.9054E+05&	8.8779E+05\\
		&Rank&1&4&3&2&8&7&5&6\\
		&	$p$-value	&&	1.0646E-07	&	5.2269E-07	&	2.6898E-06	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	&	6.7956E-08	\\
		F19&Ave&1.7221E+04&1.9873E+04&2.0571E+04&\textbf{1.4768E+04}&3.4562E+06&1.3397E+06&2.1050E+04&8.1061E+05\\
		&Std&7.9241E+03&1.1514E+04&1.0520E+04&\textbf{7.6214E+03}&2.4749E+06&1.6958E+06&1.8834E+04&	5.1245E+05\\
		&Rank&2&3&4&1&8&7&5&6\\
		&	$p$-value	&&	6.1677E-01	&	2.1841E-01	&	3.2348E-01	&	7.8980E-08	&	6.7956E-08	&	9.0312E-01	&	6.7956E-08	\\
		F20&Ave&2.9128E+03&3.0911E+03&3.0612E+03&3.1132E+03&3.7453E+03&\textbf{2.8934E+03}&3.1338E+03&3.3745E+03\\
		&Std&3.2002E+02&3.4777E+02&3.4046E+02&3.0914E+02&2.5562E+02&\textbf{2.2976E+02}&3.6758E+02&	2.3746E+02\\
		&Rank&2&4&3&5&8&1&6&7\\
		&	$p$-value	&&	1.7193E-01	&	1.9883E-01	&	6.0111E-02	&	1.2346E-07	&	8.6043E-01	&	3.1517E-02	&	7.4064E-05	\\
		F21&Ave&\textbf{2.4833E+03}&2.5566E+03&2.5722E+03&2.5635E+03&2.9277E+03&2.5191E+03&2.5116E+03&2.8293E+03\\
		&Std&\textbf{3.2303E+01}&4.3323E+01&7.6625E+01&4.0040E+01&9.6911E+01&4.7087E+01&5.3211E+01&	7.0894E+01\\
		&Rank&1&4&6&5&8&3&2&7\\
		&	$p$-value	&&	8.5974E-06	&	6.6104E-05	&	1.8030E-06	&	6.7956E-08	&	1.6669E-02	&	9.6196E-02	&	6.7956E-08	\\
		F22&Ave&\textbf{8.4001E+03}&9.2654E+03&9.5280E+03&9.5939E+03&1.2642E+04&8.6534E+03&8.9490E+03&1.0764E+04\\
		&Std&2.2741E+03&1.0417E+03&8.6169E+02&9.9254E+02&1.5159E+03&8.2339E+02&\textbf{7.3912E+02}&	8.0247E+02\\
		&Rank&1&4&5&6&8&2&3&7\\
		&	$p$-value	&&	2.6162E-01	&	4.9864E-02	&	3.3718E-02	&	2.9598E-07	&	3.3692E-01	&	8.6043E-01	&	2.6898E-06	\\
		F23&Ave&2.9975E+03&3.0999E+03&3.0964E+03&3.0972E+03&3.6694E+03&\textbf{2.9474E+03}&2.9604E+03&3.7321E+03\\
		&Std&\textbf{4.1955E+01}&7.2605E+01&7.9923E+01&1.0544E+02&1.7395E+02&5.6568E+01&5.6276E+01&	1.5358E+02\\
		&Rank&3&6&4&5&7&1&2&8\\
		&	$p$-value	&&	1.1045E-05	&	8.2924E-05	&	1.7936E-04	&	6.7956E-08	&	4.6007E-04	&	9.0454E-03	&	6.7956E-08	\\
		F24&Ave&3.1664E+03&3.2980E+03&3.2894E+03&3.2831E+03&3.8198E+03&3.1544E+03&\textbf{3.1181E+03}&4.1272E+03\\
		&Std&5.1182E+01&7.8940E+01&1.2361E+02&1.1942E+02&1.7097E+02&1.1005E+02&\textbf{4.1267E+01}&	2.1508E+02\\
		&Rank&3&6&5&4&7&2&1&8\\
		&	$p$-value	&&	1.1045E-05	&	2.2220E-04	&	1.7824E-03	&	6.7956E-08	&	8.5855E-02	&	6.0403E-03	&	6.7956E-08	\\
		F25&Ave&3.0716E+03&3.0631E+03&3.0558E+03&3.0676E+03&3.2636E+03&3.6220E+03&\textbf{3.0359E+03}&3.1612E+03\\
		&Std&3.2746E+01&3.5178E+01&4.2957E+01&3.0477E+01&7.8062E+01&2.7301E+02&\textbf{2.4643E+01}&	3.8108E+01\\
		&Rank&5&3&2&4&7&8&1&6\\
		&	$p$-value	&&	3.7933E-01	&	2.8530E-01	&	6.5536E-01	&	6.7956E-08	&	6.7956E-08	&	1.1590E-04	&	5.2269E-07	\\
		F26&Ave&8.6527E+03&7.7972E+03&7.5948E+03&8.5406E+03&1.3403E+04&6.2437E+03&\textbf{5.1602E+03}&1.0127E+04\\
		&Std&1.2748E+03&3.7860E+03&3.8589E+03&3.5894E+03&1.0127E+03&\textbf{4.5656E+02}&1.3838E+03&	2.4902E+03\\
		&Rank&6&4&3&5&8&2&1&7\\
		&	$p$-value	&&	5.4268E-01	&	6.3588E-01	&	2.8530E-01	&	6.7956E-08	&	1.5757E-06	&	1.4309E-07	&	1.0373E-04	\\
		F27&Ave&3.6230E+03&3.6986E+03&3.6351E+03&3.6172E+03&4.4026E+03&3.5552E+03&\textbf{3.3592E+03}&4.1350E+03\\
		&Std&8.9973E+01&1.3395E+02&1.0657E+02&1.5150E+02&3.6835E+02&8.6276E+01&\textbf{6.2128E+01}&	3.2070E+02\\
		&Rank&4&6&5&3&8&2&1&7\\
		&	$p$-value	&&	3.3718E-02	&	5.4277E-01	&	4.2488E-01	&	6.7956E-08	&	3.8515E-02	&	1.6571E-07	&	1.0646E-07	\\
		F28&Ave&\textbf{3.2940E+03}&3.3081E+03&3.3016E+03&3.2998E+03&3.7814E+03&4.1309E+03&3.3005E+03&3.3932E+03\\
		&Std&\textbf{2.0314E+01}&2.1122E+01&2.9098E+01&2.6742E+01&1.5077E+02&3.8132E+02&2.6415E+01&	3.1622E+01\\
		&Rank&1&5&4&2&7&8&3&6\\
		&	$p$-value	&&	4.1124E-02	&	2.6162E-01	&	7.3527E-01	&	6.7956E-08	&	6.7956E-08	&	3.1040E-01	&	6.7956E-08	\\
		F29&Ave&4.4635E+03&4.5275E+03&4.3407E+03&4.3751E+03&7.7021E+03&4.3311E+03&\textbf{4.3262E+03}&5.6448E+03\\
		&Std&3.4612E+02&4.0284E+02&3.6887E+02&3.7508E+02&1.0794E+03&\textbf{1.9253E+02}&3.2501E+02&	5.9792E+02\\
		&Rank&5&6&3&4&8&2&1&7\\
		&	$p$-value	&&	7.5574E-01	&	2.5030E-01	&	5.2499E-01	&	6.7956E-08	&	1.1355E-01	&	1.8954E-01	&	5.2269E-07	\\
		F30&Ave&\textbf{9.4483E+05}&1.0088E+06&1.0554E+06&9.5338E+05&1.4784E+08&1.0085E+08&1.6787E+06&2.1860E+07\\
		&Std&\textbf{1.4261E+05}&2.0886E+05&3.0063E+05&3.1566E+05&4.5865E+07&4.8552E+07&4.2826E+05&	3.8887E+06\\
		&Rank&1&3&4&2&8&7&5&6\\
		&	$p$-value	&&	5.0751E-01	&	3.7933E-01	&	2.2869E-01	&	6.7956E-08	&	6.7956E-08	&	1.8030E-06	&	6.7956E-08	\\  	 	 
		\hline\\ 	 	 	 	 	 	 	  	 	 	 	 	 	 
		Average&rank&\textbf{2.03}&4.10&4.10&3.69&7.55&4.41 
		&3.31&6.76\\
		+/=/-&&&17/12/0&16/13/0&16/13/0&29/0/0&14/11/4&12/11/6&29/0/0\\      																				
		%\hline	
		%\botrule
		%\end{tabular}
	\end{longtable}
\end{landscape}
\normalsize
\begin{table}[h]
	\caption{Results for the Pressure vessel design problem}\label{tab6}%
	\begin{tabular}{@{}p{2cm}p{2cm}p{2cm}p{2cm}@{}}
		\toprule
		Algorithm&Avg&Best&Std\\
		\midrule
		MLMRFO&\textbf{5885.4802}&\textbf{5885.3360}&\textbf{0.3540}\\ 
		MRFO&5917.8907&5890.1447&24.6543\\ 
		AMRFO&5926.2723&5896.6723&20.3638\\ 
		HYMRFO&5960.5919&5898.0260&108.0473\\
		WOA&8320.0624 &6289.0352&1463.6895\\ 
		GWO&6049.3633&5892.7643&371.2113\\ 
		SMA&6744.5743&5885.3768&557.8840\\ 
		HHO&6743.4275&6159.9247&388.5543\\
		\botrule
	\end{tabular}
\end{table}

\begin{table}[h]
	\caption{Optimal solutions for the Pressure vessel design problem}\label{tab7}%
	\begin{tabular}{@{}p{1.5cm}p{1.5cm}p{1.5cm}p{1.5cm}p{1.5cm}p{1.5cm}@{}}
		\toprule
		Algorithm&$X_{1}$&$X_{2}$&$X_{3}$&$X_{4}$&f(x)\\
		\midrule
		MLMRFO&0.778169&0.384650&40.319634&199.999784 &	5885.3360\\ 
		MRFO&0.779857&0.385488 &40.396773 &198.952994&5890.1447\\ 
		AMRFO&0.779391 &0.387641 &40.382919 &199.228489&5896.6723\\
		HYMRFO&0.784533 &0.388188 &40.647366 &195.501343&5898.0260\\
		WOA&0.813815 &0.477541&41.309082 &186.669867&6289.0352\\ 
		GWO&0.778634 &0.385251&40.333753&200.000000&5892.7643\\ 
		SMA&0.778193&0.384661 &40.320885 &199.982388&5885.3768\\ 
		HHO&0.877393&0.438562 &44.912492 &144.676619&6159.9247\\  
		\botrule
	\end{tabular}
\end{table}

\begin{table}[h]
	\caption{Results for the Tension spring design problem}\label{tab8}%
	\begin{tabular}{@{}p{2cm}p{2cm}p{2cm}p{2cm}@{}}
		\toprule
		Algorithm&Avg&Best&Std\\
		\midrule
		MLMRFO&\textbf{0.012669}&\textbf{0.012666}&\textbf{0.000004}\\
		MRFO&0.012728&0.012667&0.000062\\
		AMRFO&0.012707&0.012672&0.000034\\
		HYMRFO&0.012726&0.012668&0.000055\\
		WOA&0.014058&0.012667&0.001688\\
		GWO&0.012727&0.012696&0.000012\\
		SMA&0.013183&0.012669&0.001091\\
		HHO&0.013805&0.012667&0.001062\\
		
		\botrule
	\end{tabular}
\end{table}

\begin{table}[h]
	\caption{Optimal solutions for the Tension spring design problem}\label{tab9}%
	\begin{tabular}{@{}p{1.5cm}p{1.5cm}p{1.5cm}p{1.5cm}p{1.5cm}p{1.5cm}@{}}
		\toprule
		Algorithm&$X_{1}$&$X_{2}$&$X_{3}$&f(x)\\
		\midrule
		MLMRFO&0.051731&0.357722&11.230562&0.012666\\
		MRFO&0.051942&0.362837&10.939143&0.012667\\
		AMRFO&0.052058&0.365602&10.789875&0.012672\\
		HYMRFO&0.051974&0.363594&10.897637&0.012668\\
		WOA&0.051956&0.363176&10.920156&0.012667\\
		GWO&0.052756&0.382799&9.916605&0.012696\\
		SMA&0.051252&0.346301&11.926879&0.012669\\
		HHO&0.052001&0.364278&10.859141&0.012667\\
		\botrule
	\end{tabular}
\end{table}
\begin{table}[h]
	\caption{Results for the Three-bar truss design problem}\label{tab10}%
	\begin{tabular}{@{}p{2cm}p{2cm}p{2cm}p{2cm}@{}}
		\toprule
		Algorithm&Avg&Best&Std\\
		\midrule
		MLMRFO&\textbf{263.89585}&\textbf{263.89584}&\textbf{0.00001}\\ 
		MRFO&263.89593&\textbf{263.89584}&0.00007\\ 
		AMRFO&263.89600&263.89585&0.00019\\
		HYMRFO&263.89589&263.89585&0.00005\\ 
		WOA&265.19342&263.89839&2.49080\\ 
		GWO&263.90103&263.89678&0.00422\\ 
		SMA&269.26243&265.73381&2.12695\\ 
		HHO&263.96912&263.89588&0.08334\\ 
		\botrule
	\end{tabular}
\end{table}
\begin{table}[h]
	\caption{Optimal solutions for the Three-bar truss design problem}\label{tab11}%
	\centering
	\begin{tabular}{@{}p{2cm}p{2cm}p{2cm}p{2cm}@{}}
		\toprule
		Algorithm&$X_{1}$&$X_{2}$&f(x)\\
		\midrule
		MLMRFO&0.788674&0.408251&263.89584\\ 
		MRFO&0.788642&0.408341&263.89584\\ 
		AMRFO&0.788621&0.408403&263.89585\\ 
		HYMRFO&0.788711&0.408147&263.89585\\ 
		WOA	&0.786822&0.413516&263.89839\\ 
		GWO&0.789542&0.405805&263.89678\\ 
		SMA&0.822201&0.331802&265.73381\\ 
		HHO&0.788452&0.408880&263.89588\\ 
		\botrule
	\end{tabular}
\end{table}

\begin{table}[h]
	\caption{Results for the Speed reducer design problem}\label{tab12}%
	\begin{tabular}{@{}p{2cm}p{2cm}p{2cm}p{2cm}@{}}
		\toprule
		Algorithm&Avg&Best&Std\\
		\midrule
		MLMRFO&\textbf{2994.4711}&\textbf{2994.4711}&\textbf{0.0000}\\ 
		MRFO&2994.4829&2994.4722&0.0138\\ 
		AMRFO&2994.4851&2994.4716&0.0142\\ 
		HYMRFO&2994.4716&\textbf{2994.4711}&0.0004\\
		WOA&3204.8809&3013.4942&212.9514\\ 
		GWO&3011.2037&2999.9465&6.0963\\ 
		SMA&2994.4751&2994.4714&0.0034\\ 
		HHO&3489.0562&3039.2060&456.1142\\ 
		\botrule
	\end{tabular}
\end{table}
\begin{table}[h]
	\caption{Optimal solutions for the Speed reducer design problem}\label{tab13}%
	\begin{tabular}{@{}lllllllll@{}}
		\toprule
		Algorithm&$X_{1}$&$X_{2}$&$X_{3}$&$X_{4}$&$X_{5}$&$X_{6}$&$X_{7}$&f(x)\\
		\midrule
		MLMRFO&3.50000&0.70000&17.00000&7.30000&7.71532&3.35022&5.28665&2994.4711\\ 
		MRFO&3.50000&0.70000&17.00000&7.30003&7.71533 &3.35022&5.28666&2994.4722\\
		AMRFO&3.50000&0.70000&17.00000&7.30001&7.71533 &3.35022 &5.28666&2994.4716\\
		HYMRFO&3.50000&0.70000&17.00000&7.30000 &7.71532&3.35022 &5.28665&2994.4711\\ 
		WOA&3.50018&0.70000&17.00000&7.96819&7.99831 &3.37628&5.28675&3013.4942\\ 
		GWO&3.50007&0.70000&17.00000&7.31365 &7.86004&3.35687 &5.28736&2999.9465\\ 
		SMA&3.50000&0.70000&17.00000&7.30000 &7.71533 &3.35022 &5.28666&2994.4714\\ 
		HHO&3.55316&0.70000&17.00000&8.02073&8.17358&3.37778&5.28708&3039.2060\\   
		\botrule
	\end{tabular}
\end{table}
%\begin{itemize}
%\item Funding
%\item Conflict of interest/Competing interests (check journal-specific guidelines for which heading to use)
%\item Ethics approval 
%\item Consent to participate
%\item Consent for publication
%\item Availability of data and materials
%\item Code availability 
%\item Authors' contributions
%\end{itemize}


%%===================================================%%
%% For presentation purpose, we have included        %%
%% \bigskip command. please ignore this.             %%
%%===================================================%%
%\bigskip
%\begin{flushleft}%
%Editorial Policies for:

%\bigskip\noindent
%Springer journals and proceedings: %\url{https://www.springer.com/gp/editorial-policies}

%\bigskip\noindent
%Nature Portfolio journals: %\url{https://www.nature.com/nature-research/editorial-policies}

%\bigskip\noindent
%\textit{Scientific Reports}: %\url{https://www.nature.com/srep/journal-policies/editorial-policies}

%%BMC journals: \url{https://www.biomedcentral.com/getpublished/editorial-policies}
%\end{flushleft}



%%=============================================%%
%% For submissions to Nature Portfolio Journals %%
%% please use the heading ``Extended Data''.   %%
%%=============================================%%

%%=============================================================%%
%% Sample for another appendix section			       %%
%%=============================================================%%

%% \section{Example of another appendix section}\label{secA2}%
%% Appendices may be used for helpful, supporting or essential material that would otherwise 
%% clutter, break up or be distracting to the text. Appendices can consist of sections, figures, 
%% tables and equations etc.


%%===========================================================================================%%
%% If you are submitting to one of the Nature Portfolio journals, using the eJP submission   %%
%% system, please include the references within the manuscript file itself. You may do this  %%
%% by copying the reference list from your .bbl file, paste it into the main manuscript .tex %%
%% file, and delete the associated \verb+\bibliography+ commands.                            %%
%%===========================================================================================%%
%\bibliographystyle{unsrt}
\clearpage
\bibliography{sn-bibliography}% common bib file
%% if required, the content of .bbl file can be included here once bbl is generated
%%\input sn-article.bbl


\end{document}
