CINXE.COM

Layer activation functions

<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="description" content="Keras documentation"> <meta name="author" content="Keras Team"> <link rel="shortcut icon" href="https://keras.io/img/favicon.ico"> <link rel="canonical" href="https://keras.io/api/layers/activations/" /> <!-- Social --> <meta property="og:title" content="Keras documentation: Layer activation functions"> <meta property="og:image" content="https://keras.io/img/logo-k-keras-wb.png"> <meta name="twitter:title" content="Keras documentation: Layer activation functions"> <meta name="twitter:image" content="https://keras.io/img/k-keras-social.png"> <meta name="twitter:card" content="summary"> <title>Layer activation functions</title> <!-- Bootstrap core CSS --> <link href="/css/bootstrap.min.css" rel="stylesheet"> <!-- Custom fonts for this template --> <link href="https://fonts.googleapis.com/css2?family=Open+Sans:wght@400;600;700;800&display=swap" rel="stylesheet"> <!-- Custom styles for this template --> <link href="/css/docs.css" rel="stylesheet"> <link href="/css/monokai.css" rel="stylesheet"> <!-- Google Tag Manager --> <script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer','GTM-5DNGF4N'); </script> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-175165319-128', 'auto'); ga('send', 'pageview'); </script> <!-- End Google Tag Manager --> <script async defer src="https://buttons.github.io/buttons.js"></script> </head> <body> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5DNGF4N" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <div class='k-page'> <div class="k-nav" id="nav-menu"> <a href='/'><img src='/img/logo-small.png' class='logo-small' /></a> <div class="nav flex-column nav-pills" role="tablist" aria-orientation="vertical"> <a class="nav-link" href="/about/" role="tab" aria-selected="">About Keras</a> <a class="nav-link" href="/getting_started/" role="tab" aria-selected="">Getting started</a> <a class="nav-link" href="/guides/" role="tab" aria-selected="">Developer guides</a> <a class="nav-link" href="/examples/" role="tab" aria-selected="">Code examples</a> <a class="nav-link active" href="/api/" role="tab" aria-selected="">Keras 3 API documentation</a> <a class="nav-sublink" href="/api/models/">Models API</a> <a class="nav-sublink active" href="/api/layers/">Layers API</a> <a class="nav-sublink2" href="/api/layers/base_layer/">The base Layer class</a> <a class="nav-sublink2 active" href="/api/layers/activations/">Layer activations</a> <a class="nav-sublink2" href="/api/layers/initializers/">Layer weight initializers</a> <a class="nav-sublink2" href="/api/layers/regularizers/">Layer weight regularizers</a> <a class="nav-sublink2" href="/api/layers/constraints/">Layer weight constraints</a> <a class="nav-sublink2" href="/api/layers/core_layers/">Core layers</a> <a class="nav-sublink2" href="/api/layers/convolution_layers/">Convolution layers</a> <a class="nav-sublink2" href="/api/layers/pooling_layers/">Pooling layers</a> <a class="nav-sublink2" href="/api/layers/recurrent_layers/">Recurrent layers</a> <a class="nav-sublink2" href="/api/layers/preprocessing_layers/">Preprocessing layers</a> <a class="nav-sublink2" href="/api/layers/normalization_layers/">Normalization layers</a> <a class="nav-sublink2" href="/api/layers/regularization_layers/">Regularization layers</a> <a class="nav-sublink2" href="/api/layers/attention_layers/">Attention layers</a> <a class="nav-sublink2" href="/api/layers/reshaping_layers/">Reshaping layers</a> <a class="nav-sublink2" href="/api/layers/merging_layers/">Merging layers</a> <a class="nav-sublink2" href="/api/layers/activation_layers/">Activation layers</a> <a class="nav-sublink2" href="/api/layers/backend_specific_layers/">Backend-specific layers</a> <a class="nav-sublink" href="/api/callbacks/">Callbacks API</a> <a class="nav-sublink" href="/api/ops/">Ops API</a> <a class="nav-sublink" href="/api/optimizers/">Optimizers</a> <a class="nav-sublink" href="/api/metrics/">Metrics</a> <a class="nav-sublink" href="/api/losses/">Losses</a> <a class="nav-sublink" href="/api/data_loading/">Data loading</a> <a class="nav-sublink" href="/api/datasets/">Built-in small datasets</a> <a class="nav-sublink" href="/api/applications/">Keras Applications</a> <a class="nav-sublink" href="/api/mixed_precision/">Mixed precision</a> <a class="nav-sublink" href="/api/distribution/">Multi-device distribution</a> <a class="nav-sublink" href="/api/random/">RNG API</a> <a class="nav-sublink" href="/api/utils/">Utilities</a> <a class="nav-link" href="/2.18/api/" role="tab" aria-selected="">Keras 2 API documentation</a> <a class="nav-link" href="/keras_tuner/" role="tab" aria-selected="">KerasTuner: Hyperparam Tuning</a> <a class="nav-link" href="/keras_hub/" role="tab" aria-selected="">KerasHub: Pretrained Models</a> </div> </div> <div class='k-main'> <div class='k-main-top'> <script> function displayDropdownMenu() { e = document.getElementById("nav-menu"); if (e.style.display == "block") { e.style.display = "none"; } else { e.style.display = "block"; document.getElementById("dropdown-nav").style.display = "block"; } } function resetMobileUI() { if (window.innerWidth <= 840) { document.getElementById("nav-menu").style.display = "none"; document.getElementById("dropdown-nav").style.display = "block"; } else { document.getElementById("nav-menu").style.display = "block"; document.getElementById("dropdown-nav").style.display = "none"; } var navmenu = document.getElementById("nav-menu"); var menuheight = navmenu.clientHeight; var kmain = document.getElementById("k-main-id"); kmain.style.minHeight = (menuheight + 100) + 'px'; } window.onresize = resetMobileUI; window.addEventListener("load", (event) => { resetMobileUI() }); </script> <div id='dropdown-nav' onclick="displayDropdownMenu();"> <svg viewBox="-20 -20 120 120" width="60" height="60"> <rect width="100" height="20"></rect> <rect y="30" width="100" height="20"></rect> <rect y="60" width="100" height="20"></rect> </svg> </div> <form class="bd-search d-flex align-items-center k-search-form" id="search-form"> <input type="search" class="k-search-input" id="search-input" placeholder="Search Keras documentation..." aria-label="Search Keras documentation..." autocomplete="off"> <button class="k-search-btn"> <svg width="13" height="13" viewBox="0 0 13 13"><title>search</title><path d="m4.8495 7.8226c0.82666 0 1.5262-0.29146 2.0985-0.87438 0.57232-0.58292 0.86378-1.2877 0.87438-2.1144 0.010599-0.82666-0.28086-1.5262-0.87438-2.0985-0.59352-0.57232-1.293-0.86378-2.0985-0.87438-0.8055-0.010599-1.5103 0.28086-2.1144 0.87438-0.60414 0.59352-0.8956 1.293-0.87438 2.0985 0.021197 0.8055 0.31266 1.5103 0.87438 2.1144 0.56172 0.60414 1.2665 0.8956 2.1144 0.87438zm4.4695 0.2115 3.681 3.6819-1.259 1.284-3.6817-3.7 0.0019784-0.69479-0.090043-0.098846c-0.87973 0.76087-1.92 1.1413-3.1207 1.1413-1.3553 0-2.5025-0.46363-3.4417-1.3909s-1.4088-2.0686-1.4088-3.4239c0-1.3553 0.4696-2.4966 1.4088-3.4239 0.9392-0.92727 2.0864-1.3969 3.4417-1.4088 1.3553-0.011889 2.4906 0.45771 3.406 1.4088 0.9154 0.95107 1.379 2.0924 1.3909 3.4239 0 1.2126-0.38043 2.2588-1.1413 3.1385l0.098834 0.090049z"></path></svg> </button> </form> <script> var form = document.getElementById('search-form'); form.onsubmit = function(e) { e.preventDefault(); var query = document.getElementById('search-input').value; window.location.href = '/search.html?query=' + query; return False } </script> </div> <div class='k-main-inner' id='k-main-id'> <div class='k-location-slug'> <span class="k-location-slug-pointer">►</span> <a href='/api/'>Keras 3 API documentation</a> / <a href='/api/layers/'>Layers API</a> / Layer activation functions </div> <div class='k-content'> <h1 id="layer-activation-functions">Layer activation functions</h1> <h2 id="usage-of-activations">Usage of activations</h2> <p>Activations can either be used through an <code>Activation</code> layer, or through the <code>activation</code> argument supported by all forward layers:</p> <div class="codehilite"><pre><span></span><code><span class="n">model</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">layers</span><span class="o">.</span><span class="n">Dense</span><span class="p">(</span><span class="mi">64</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="n">activations</span><span class="o">.</span><span class="n">relu</span><span class="p">))</span> </code></pre></div> <p>This is equivalent to:</p> <div class="codehilite"><pre><span></span><code><span class="kn">from</span> <span class="nn">keras</span> <span class="kn">import</span> <span class="n">layers</span> <span class="kn">from</span> <span class="nn">keras</span> <span class="kn">import</span> <span class="n">activations</span> <span class="n">model</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">layers</span><span class="o">.</span><span class="n">Dense</span><span class="p">(</span><span class="mi">64</span><span class="p">))</span> <span class="n">model</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">layers</span><span class="o">.</span><span class="n">Activation</span><span class="p">(</span><span class="n">activations</span><span class="o">.</span><span class="n">relu</span><span class="p">))</span> </code></pre></div> <p>All built-in activations may also be passed via their string identifier:</p> <div class="codehilite"><pre><span></span><code><span class="n">model</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">layers</span><span class="o">.</span><span class="n">Dense</span><span class="p">(</span><span class="mi">64</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">))</span> </code></pre></div> <hr /> <h2 id="available-activations">Available activations</h2> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L6">[source]</a></span></p> <h3 id="relu-function"><code>relu</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">relu</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">negative_slope</span><span class="o">=</span><span class="mf">0.0</span><span class="p">,</span> <span class="n">max_value</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">threshold</span><span class="o">=</span><span class="mf">0.0</span><span class="p">)</span> </code></pre></div> <p>Applies the rectified linear unit activation function.</p> <p>With default values, this returns the standard ReLU activation: <code>max(x, 0)</code>, the element-wise maximum of 0 and the input tensor.</p> <p>Modifying default parameters allows you to use non-zero thresholds, change the max value of the activation, and to use a non-zero multiple of the input for values below the threshold.</p> <p><strong>Examples</strong></p> <div class="codehilite"><pre><span></span><code><span class="o">&gt;&gt;&gt;</span> <span class="n">x</span> <span class="o">=</span> <span class="p">[</span><span class="o">-</span><span class="mi">10</span><span class="p">,</span> <span class="o">-</span><span class="mi">5</span><span class="p">,</span> <span class="mf">0.0</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">10</span><span class="p">]</span> <span class="o">&gt;&gt;&gt;</span> <span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">relu</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="p">[</span> <span class="mf">0.</span><span class="p">,</span> <span class="mf">0.</span><span class="p">,</span> <span class="mf">0.</span><span class="p">,</span> <span class="mf">5.</span><span class="p">,</span> <span class="mf">10.</span><span class="p">]</span> <span class="o">&gt;&gt;&gt;</span> <span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">relu</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">negative_slope</span><span class="o">=</span><span class="mf">0.5</span><span class="p">)</span> <span class="p">[</span><span class="o">-</span><span class="mf">5.</span> <span class="p">,</span> <span class="o">-</span><span class="mf">2.5</span><span class="p">,</span> <span class="mf">0.</span> <span class="p">,</span> <span class="mf">5.</span> <span class="p">,</span> <span class="mf">10.</span> <span class="p">]</span> <span class="o">&gt;&gt;&gt;</span> <span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">relu</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">max_value</span><span class="o">=</span><span class="mf">5.</span><span class="p">)</span> <span class="p">[</span><span class="mf">0.</span><span class="p">,</span> <span class="mf">0.</span><span class="p">,</span> <span class="mf">0.</span><span class="p">,</span> <span class="mf">5.</span><span class="p">,</span> <span class="mf">5.</span><span class="p">]</span> <span class="o">&gt;&gt;&gt;</span> <span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">relu</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">threshold</span><span class="o">=</span><span class="mf">5.</span><span class="p">)</span> <span class="p">[</span><span class="o">-</span><span class="mf">0.</span><span class="p">,</span> <span class="o">-</span><span class="mf">0.</span><span class="p">,</span> <span class="mf">0.</span><span class="p">,</span> <span class="mf">0.</span><span class="p">,</span> <span class="mf">10.</span><span class="p">]</span> </code></pre></div> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> <li><strong>negative_slope</strong>: A <code>float</code> that controls the slope for values lower than the threshold.</li> <li><strong>max_value</strong>: A <code>float</code> that sets the saturation threshold (the largest value the function will return).</li> <li><strong>threshold</strong>: A <code>float</code> giving the threshold value of the activation function below which values will be damped or set to zero.</li> </ul> <p><strong>Returns</strong></p> <p>A tensor with the same shape and dtype as input <code>x</code>.</p> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L446">[source]</a></span></p> <h3 id="sigmoid-function"><code>sigmoid</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">sigmoid</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Sigmoid activation function.</p> <p>It is defined as: <code>sigmoid(x) = 1 / (1 + exp(-x))</code>.</p> <p>For small values (&lt;-5), <code>sigmoid</code> returns a value close to zero, and for large values (&gt;5) the result of the function gets close to 1.</p> <p>Sigmoid is equivalent to a 2-element softmax, where the second element is assumed to be zero. The sigmoid function always returns a value between 0 and 1.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L137">[source]</a></span></p> <h3 id="softmax-function"><code>softmax</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">softmax</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">axis</span><span class="o">=-</span><span class="mi">1</span><span class="p">)</span> </code></pre></div> <p>Softmax converts a vector of values to a probability distribution.</p> <p>The elements of the output vector are in range <code>[0, 1]</code> and sum to 1.</p> <p>Each input vector is handled independently. The <code>axis</code> argument sets which axis of the input the function is applied along.</p> <p>Softmax is often used as the activation for the last layer of a classification network because the result could be interpreted as a probability distribution.</p> <p>The softmax of each vector x is computed as <code>exp(x) / sum(exp(x))</code>.</p> <p>The input values in are the log-odds of the resulting probability.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> <li><strong>axis</strong>: Integer, axis along which the softmax is applied.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L238">[source]</a></span></p> <h3 id="softplus-function"><code>softplus</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">softplus</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Softplus activation function.</p> <p>It is defined as: <code>softplus(x) = log(exp(x) + 1)</code>.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L250">[source]</a></span></p> <h3 id="softsign-function"><code>softsign</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">softsign</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Softsign activation function.</p> <p>Softsign is defined as: <code>softsign(x) = x / (abs(x) + 1)</code>.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L386">[source]</a></span></p> <h3 id="tanh-function"><code>tanh</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">tanh</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Hyperbolic tangent activation function.</p> <p>It is defined as: <code>tanh(x) = sinh(x) / cosh(x)</code>, i.e. <code>tanh(x) = ((exp(x) - exp(-x)) / (exp(x) + exp(-x)))</code>.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L198">[source]</a></span></p> <h3 id="selu-function"><code>selu</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">selu</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Scaled Exponential Linear Unit (SELU).</p> <p>The Scaled Exponential Linear Unit (SELU) activation function is defined as:</p> <ul> <li><code>scale * x</code> if <code>x &gt; 0</code></li> <li><code>scale * alpha * (exp(x) - 1)</code> if <code>x &lt; 0</code></li> </ul> <p>where <code>alpha</code> and <code>scale</code> are pre-defined constants (<code>alpha=1.67326324</code> and <code>scale=1.05070098</code>).</p> <p>Basically, the SELU activation function multiplies <code>scale</code> (&gt; 1) with the output of the <a href="/api/layers/activations#elu-function"><code>keras.activations.elu</code></a> function to ensure a slope larger than one for positive inputs.</p> <p>The values of <code>alpha</code> and <code>scale</code> are chosen so that the mean and variance of the inputs are preserved between two consecutive layers as long as the weights are initialized correctly (see <a href="/api/layers/initializers#lecunnormal-class"><code>keras.initializers.LecunNormal</code></a> initializer) and the number of input units is "large enough" (see reference paper for more information).</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <p>Notes:</p> <ul> <li>To be used together with the <a href="/api/layers/initializers#lecunnormal-class"><code>keras.initializers.LecunNormal</code></a> initializer.</li> <li>To be used together with the dropout variant <a href="/api/layers/regularization_layers/alpha_dropout#alphadropout-class"><code>keras.layers.AlphaDropout</code></a> (rather than regular dropout).</li> </ul> <p><strong>Reference</strong></p> <ul> <li><a href="https://arxiv.org/abs/1706.02515">Klambauer et al., 2017</a></li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L170">[source]</a></span></p> <h3 id="elu-function"><code>elu</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">elu</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">alpha</span><span class="o">=</span><span class="mf">1.0</span><span class="p">)</span> </code></pre></div> <p>Exponential Linear Unit.</p> <p>The exponential linear unit (ELU) with <code>alpha &gt; 0</code> is defined as:</p> <ul> <li><code>x</code> if <code>x &gt; 0</code></li> <li>alpha * <code>exp(x) - 1</code> if <code>x &lt; 0</code></li> </ul> <p>ELUs have negative values which pushes the mean of the activations closer to zero.</p> <p>Mean activations that are closer to zero enable faster learning as they bring the gradient closer to the natural gradient. ELUs saturate to a negative value when the argument gets smaller. Saturation means a small derivative which decreases the variation and the information that is propagated to the next layer.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <p><strong>Reference</strong></p> <ul> <li><a href="https://arxiv.org/abs/1511.07289">Clevert et al., 2016</a></li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L473">[source]</a></span></p> <h3 id="exponential-function"><code>exponential</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">exponential</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Exponential activation function.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L113">[source]</a></span></p> <h3 id="leakyrelu-function"><code>leaky_relu</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">leaky_relu</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">negative_slope</span><span class="o">=</span><span class="mf">0.2</span><span class="p">)</span> </code></pre></div> <p>Leaky relu activation function.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> <li><strong>negative_slope</strong>: A <code>float</code> that controls the slope for values lower than the threshold.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L125">[source]</a></span></p> <h3 id="relu6-function"><code>relu6</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">relu6</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Relu6 activation function.</p> <p>It's the ReLU function, but truncated to a maximum value of 6.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L280">[source]</a></span></p> <h3 id="silu-function"><code>silu</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">silu</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Swish (or Silu) activation function.</p> <p>It is defined as: <code>swish(x) = x * sigmoid(x)</code>.</p> <p>The Swish (or Silu) activation function is a smooth, non-monotonic function that is unbounded above and bounded below.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <p><strong>Reference</strong></p> <ul> <li><a href="https://arxiv.org/abs/1710.05941">Ramachandran et al., 2017</a></li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L519">[source]</a></span></p> <h3 id="hardsilu-function"><code>hard_silu</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">hard_silu</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Hard SiLU activation function, also known as Hard Swish.</p> <p>It is defined as:</p> <ul> <li><code>0</code> if <code>if x &lt; -3</code></li> <li><code>x</code> if <code>x &gt; 3</code></li> <li><code>x * (x + 3) / 6</code> if <code>-3 &lt;= x &lt;= 3</code></li> </ul> <p>It's a faster, piecewise linear approximation of the silu activation.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <p><strong>Reference</strong></p> <ul> <li><a href="https://arxiv.org/abs/1905.02244">A Howard, 2019</a></li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L321">[source]</a></span></p> <h3 id="gelu-function"><code>gelu</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">gelu</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">approximate</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span> </code></pre></div> <p>Gaussian error linear unit (GELU) activation function.</p> <p>The Gaussian error linear unit (GELU) is defined as:</p> <p><code>gelu(x) = x * P(X &lt;= x)</code> where <code>P(X) ~ N(0, 1)</code>, i.e. <code>gelu(x) = 0.5 * x * (1 + erf(x / sqrt(2)))</code>.</p> <p>GELU weights inputs by their value, rather than gating inputs by their sign as in ReLU.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> <li><strong>approximate</strong>: A <code>bool</code>, whether to enable approximation.</li> </ul> <p><strong>Reference</strong></p> <ul> <li><a href="https://arxiv.org/abs/1606.08415">Hendrycks et al., 2016</a></li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L483">[source]</a></span></p> <h3 id="hardsigmoid-function"><code>hard_sigmoid</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">hard_sigmoid</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Hard sigmoid activation function.</p> <p>The hard sigmoid activation is defined as:</p> <ul> <li><code>0</code> if <code>if x &lt;= -3</code></li> <li><code>1</code> if <code>x &gt;= 3</code></li> <li><code>(x/6) + 0.5</code> if <code>-3 &lt; x &lt; 3</code></li> </ul> <p>It's a faster, piecewise linear approximation of the sigmoid activation.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <p><strong>Reference</strong></p> <ul> <li><a href="https://en.wikipedia.org/wiki/Hard_sigmoid">Wikipedia "Hard sigmoid"</a></li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L542">[source]</a></span></p> <h3 id="linear-function"><code>linear</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">linear</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Linear activation function (pass-through).</p> <p>A "linear" activation is an identity function: it returns the input, unmodified.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L567">[source]</a></span></p> <h3 id="mish-function"><code>mish</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">mish</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> </code></pre></div> <p>Mish activation function.</p> <p>It is defined as:</p> <p><code>mish(x) = x * tanh(softplus(x))</code></p> <p>where <code>softplus</code> is defined as:</p> <p><code>softplus(x) = log(exp(x) + 1)</code></p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> </ul> <p><strong>Reference</strong></p> <ul> <li><a href="https://arxiv.org/abs/1908.08681">Misra, 2019</a></li> </ul> <hr /> <p><span style="float:right;"><a href="https://github.com/keras-team/keras/tree/v3.7.0/keras/src/activations/activations.py#L590">[source]</a></span></p> <h3 id="logsoftmax-function"><code>log_softmax</code> function</h3> <div class="codehilite"><pre><span></span><code><span class="n">keras</span><span class="o">.</span><span class="n">activations</span><span class="o">.</span><span class="n">log_softmax</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">axis</span><span class="o">=-</span><span class="mi">1</span><span class="p">)</span> </code></pre></div> <p>Log-Softmax activation function.</p> <p>Each input vector is handled independently. The <code>axis</code> argument sets which axis of the input the function is applied along.</p> <p><strong>Arguments</strong></p> <ul> <li><strong>x</strong>: Input tensor.</li> <li><strong>axis</strong>: Integer, axis along which the softmax is applied.</li> </ul> <hr /> <hr /> <h2 id="creating-custom-activations">Creating custom activations</h2> <p>You can also use a callable as an activation (in this case it should take a tensor and return a tensor of the same shape and dtype):</p> <div class="codehilite"><pre><span></span><code><span class="n">model</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">layers</span><span class="o">.</span><span class="n">Dense</span><span class="p">(</span><span class="mi">64</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="n">keras</span><span class="o">.</span><span class="n">ops</span><span class="o">.</span><span class="n">tanh</span><span class="p">))</span> </code></pre></div> <hr /> <h2 id="about-advanced-activation-layers">About "advanced activation" layers</h2> <p>Activations that are more complex than a simple function (eg. learnable activations, which maintain a state) are available as <a href="/api/layers/activation_layers/">Advanced Activation layers</a>.</p> </div> <div class='k-outline'> <div class='k-outline-depth-1'> <a href='#layer-activation-functions'>Layer activation functions</a> </div> <div class='k-outline-depth-2'> ◆ <a href='#usage-of-activations'>Usage of activations</a> </div> <div class='k-outline-depth-2'> ◆ <a href='#available-activations'>Available activations</a> </div> <div class='k-outline-depth-3'> <a href='#relu-function'><code>relu</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#sigmoid-function'><code>sigmoid</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#softmax-function'><code>softmax</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#softplus-function'><code>softplus</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#softsign-function'><code>softsign</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#tanh-function'><code>tanh</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#selu-function'><code>selu</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#elu-function'><code>elu</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#exponential-function'><code>exponential</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#leakyrelu-function'><code>leaky_relu</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#relu6-function'><code>relu6</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#silu-function'><code>silu</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#hardsilu-function'><code>hard_silu</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#gelu-function'><code>gelu</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#hardsigmoid-function'><code>hard_sigmoid</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#linear-function'><code>linear</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#mish-function'><code>mish</code> function</a> </div> <div class='k-outline-depth-3'> <a href='#logsoftmax-function'><code>log_softmax</code> function</a> </div> <div class='k-outline-depth-2'> ◆ <a href='#creating-custom-activations'>Creating custom activations</a> </div> <div class='k-outline-depth-2'> ◆ <a href='#about-advanced-activation-layers'>About "advanced activation" layers</a> </div> </div> </div> </div> </div> </body> <footer style="float: left; width: 100%; padding: 1em; border-top: solid 1px #bbb;"> <a href="https://policies.google.com/terms">Terms</a> | <a href="https://policies.google.com/privacy">Privacy</a> </footer> </html>

Pages: 1 2 3 4 5 6 7 8 9 10