CINXE.COM

Generalized additive model - Wikipedia

<!DOCTYPE html> <html class="client-nojs vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available" lang="en" dir="ltr"> <head> <meta charset="UTF-8"> <title>Generalized additive model - Wikipedia</title> <script>(function(){var className="client-js vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available";var cookie=document.cookie.match(/(?:^|; )enwikimwclientpreferences=([^;]+)/);if(cookie){cookie[1].split('%2C').forEach(function(pref){className=className.replace(new RegExp('(^| )'+pref.replace(/-clientpref-\w+$|[^\w-]+/g,'')+'-clientpref-\\w+( |$)'),'$1'+pref+'$2');});}document.documentElement.className=className;}());RLCONF={"wgBreakFrames":false,"wgSeparatorTransformTable":["",""],"wgDigitTransformTable":["",""],"wgDefaultDateFormat":"dmy", "wgMonthNames":["","January","February","March","April","May","June","July","August","September","October","November","December"],"wgRequestId":"db5b4f24-dd46-4845-8d9f-a94190095ebc","wgCanonicalNamespace":"","wgCanonicalSpecialPageName":false,"wgNamespaceNumber":0,"wgPageName":"Generalized_additive_model","wgTitle":"Generalized additive model","wgCurRevisionId":1256715804,"wgRevisionId":1256715804,"wgArticleId":3608284,"wgIsArticle":true,"wgIsRedirect":false,"wgAction":"view","wgUserName":null,"wgUserGroups":["*"],"wgCategories":["Articles with short description","Short description is different from Wikidata","Wikipedia articles needing clarification from July 2019","Articles with example R code","Generalized linear models","Nonparametric regression","Regression models"],"wgPageViewLanguage":"en","wgPageContentLanguage":"en","wgPageContentModel":"wikitext","wgRelevantPageName":"Generalized_additive_model","wgRelevantArticleId":3608284,"wgIsProbablyEditable":true, "wgRelevantPageIsProbablyEditable":true,"wgRestrictionEdit":[],"wgRestrictionMove":[],"wgNoticeProject":"wikipedia","wgCiteReferencePreviewsActive":false,"wgFlaggedRevsParams":{"tags":{"status":{"levels":1}}},"wgMediaViewerOnClick":true,"wgMediaViewerEnabledByDefault":true,"wgPopupsFlags":0,"wgVisualEditor":{"pageLanguageCode":"en","pageLanguageDir":"ltr","pageVariantFallbacks":"en"},"wgMFDisplayWikibaseDescriptions":{"search":true,"watchlist":true,"tagline":false,"nearby":true},"wgWMESchemaEditAttemptStepOversample":false,"wgWMEPageLength":40000,"wgRelatedArticlesCompat":[],"wgCentralAuthMobileDomain":false,"wgEditSubmitButtonLabelPublish":true,"wgULSPosition":"interlanguage","wgULSisCompactLinksEnabled":false,"wgVector2022LanguageInHeader":true,"wgULSisLanguageSelectorEmpty":false,"wgWikibaseItemId":"Q3318054","wgCheckUserClientHintsHeadersJsApi":["brands","architecture","bitness","fullVersionList","mobile","model","platform","platformVersion"],"GEHomepageSuggestedEditsEnableTopics": true,"wgGETopicsMatchModeEnabled":false,"wgGEStructuredTaskRejectionReasonTextInputEnabled":false,"wgGELevelingUpEnabledForUser":false};RLSTATE={"ext.globalCssJs.user.styles":"ready","site.styles":"ready","user.styles":"ready","ext.globalCssJs.user":"ready","user":"ready","user.options":"loading","ext.cite.styles":"ready","ext.math.styles":"ready","skins.vector.search.codex.styles":"ready","skins.vector.styles":"ready","skins.vector.icons":"ready","ext.wikimediamessages.styles":"ready","ext.visualEditor.desktopArticleTarget.noscript":"ready","ext.uls.interlanguage":"ready","wikibase.client.init":"ready","ext.wikimediaBadges":"ready"};RLPAGEMODULES=["ext.cite.ux-enhancements","site","mediawiki.page.ready","mediawiki.toc","skins.vector.js","ext.centralNotice.geoIP","ext.centralNotice.startUp","ext.gadget.ReferenceTooltips","ext.gadget.switcher","ext.urlShortener.toolbar","ext.centralauth.centralautologin","ext.popups","ext.visualEditor.desktopArticleTarget.init", "ext.visualEditor.targetLoader","ext.echo.centralauth","ext.eventLogging","ext.wikimediaEvents","ext.navigationTiming","ext.uls.interface","ext.cx.eventlogging.campaigns","ext.cx.uls.quick.actions","wikibase.client.vector-2022","ext.checkUser.clientHints","ext.growthExperiments.SuggestedEditSession","wikibase.sidebar.tracking"];</script> <script>(RLQ=window.RLQ||[]).push(function(){mw.loader.impl(function(){return["user.options@12s5i",function($,jQuery,require,module){mw.user.tokens.set({"patrolToken":"+\\","watchToken":"+\\","csrfToken":"+\\"}); }];});});</script> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=ext.cite.styles%7Cext.math.styles%7Cext.uls.interlanguage%7Cext.visualEditor.desktopArticleTarget.noscript%7Cext.wikimediaBadges%7Cext.wikimediamessages.styles%7Cskins.vector.icons%2Cstyles%7Cskins.vector.search.codex.styles%7Cwikibase.client.init&amp;only=styles&amp;skin=vector-2022"> <script async="" src="/w/load.php?lang=en&amp;modules=startup&amp;only=scripts&amp;raw=1&amp;skin=vector-2022"></script> <meta name="ResourceLoaderDynamicStyles" content=""> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=site.styles&amp;only=styles&amp;skin=vector-2022"> <meta name="generator" content="MediaWiki 1.44.0-wmf.4"> <meta name="referrer" content="origin"> <meta name="referrer" content="origin-when-cross-origin"> <meta name="robots" content="max-image-preview:standard"> <meta name="format-detection" content="telephone=no"> <meta name="viewport" content="width=1120"> <meta property="og:title" content="Generalized additive model - Wikipedia"> <meta property="og:type" content="website"> <link rel="alternate" media="only screen and (max-width: 640px)" href="//en.m.wikipedia.org/wiki/Generalized_additive_model"> <link rel="alternate" type="application/x-wiki" title="Edit this page" href="/w/index.php?title=Generalized_additive_model&amp;action=edit"> <link rel="apple-touch-icon" href="/static/apple-touch/wikipedia.png"> <link rel="icon" href="/static/favicon/wikipedia.ico"> <link rel="search" type="application/opensearchdescription+xml" href="/w/rest.php/v1/search" title="Wikipedia (en)"> <link rel="EditURI" type="application/rsd+xml" href="//en.wikipedia.org/w/api.php?action=rsd"> <link rel="canonical" href="https://en.wikipedia.org/wiki/Generalized_additive_model"> <link rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/deed.en"> <link rel="alternate" type="application/atom+xml" title="Wikipedia Atom feed" href="/w/index.php?title=Special:RecentChanges&amp;feed=atom"> <link rel="dns-prefetch" href="//meta.wikimedia.org" /> <link rel="dns-prefetch" href="//login.wikimedia.org"> </head> <body class="skin--responsive skin-vector skin-vector-search-vue mediawiki ltr sitedir-ltr mw-hide-empty-elt ns-0 ns-subject mw-editable page-Generalized_additive_model rootpage-Generalized_additive_model skin-vector-2022 action-view"><a class="mw-jump-link" href="#bodyContent">Jump to content</a> <div class="vector-header-container"> <header class="vector-header mw-header"> <div class="vector-header-start"> <nav class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-dropdown" class="vector-dropdown vector-main-menu-dropdown vector-button-flush-left vector-button-flush-right" > <input type="checkbox" id="vector-main-menu-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-main-menu-dropdown" class="vector-dropdown-checkbox " aria-label="Main menu" > <label id="vector-main-menu-dropdown-label" for="vector-main-menu-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-menu mw-ui-icon-wikimedia-menu"></span> <span class="vector-dropdown-label-text">Main menu</span> </label> <div class="vector-dropdown-content"> <div id="vector-main-menu-unpinned-container" class="vector-unpinned-container"> <div id="vector-main-menu" class="vector-main-menu vector-pinnable-element"> <div class="vector-pinnable-header vector-main-menu-pinnable-header vector-pinnable-header-unpinned" data-feature-name="main-menu-pinned" data-pinnable-element-id="vector-main-menu" data-pinned-container-id="vector-main-menu-pinned-container" data-unpinned-container-id="vector-main-menu-unpinned-container" > <div class="vector-pinnable-header-label">Main menu</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-main-menu.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-main-menu.unpin">hide</button> </div> <div id="p-navigation" class="vector-menu mw-portlet mw-portlet-navigation" > <div class="vector-menu-heading"> Navigation </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-mainpage-description" class="mw-list-item"><a href="/wiki/Main_Page" title="Visit the main page [z]" accesskey="z"><span>Main page</span></a></li><li id="n-contents" class="mw-list-item"><a href="/wiki/Wikipedia:Contents" title="Guides to browsing Wikipedia"><span>Contents</span></a></li><li id="n-currentevents" class="mw-list-item"><a href="/wiki/Portal:Current_events" title="Articles related to current events"><span>Current events</span></a></li><li id="n-randompage" class="mw-list-item"><a href="/wiki/Special:Random" title="Visit a randomly selected article [x]" accesskey="x"><span>Random article</span></a></li><li id="n-aboutsite" class="mw-list-item"><a href="/wiki/Wikipedia:About" title="Learn about Wikipedia and how it works"><span>About Wikipedia</span></a></li><li id="n-contactpage" class="mw-list-item"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us" title="How to contact Wikipedia"><span>Contact us</span></a></li> </ul> </div> </div> <div id="p-interaction" class="vector-menu mw-portlet mw-portlet-interaction" > <div class="vector-menu-heading"> Contribute </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-help" class="mw-list-item"><a href="/wiki/Help:Contents" title="Guidance on how to use and edit Wikipedia"><span>Help</span></a></li><li id="n-introduction" class="mw-list-item"><a href="/wiki/Help:Introduction" title="Learn how to edit Wikipedia"><span>Learn to edit</span></a></li><li id="n-portal" class="mw-list-item"><a href="/wiki/Wikipedia:Community_portal" title="The hub for editors"><span>Community portal</span></a></li><li id="n-recentchanges" class="mw-list-item"><a href="/wiki/Special:RecentChanges" title="A list of recent changes to Wikipedia [r]" accesskey="r"><span>Recent changes</span></a></li><li id="n-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_upload_wizard" title="Add images or other media for use on Wikipedia"><span>Upload file</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> <a href="/wiki/Main_Page" class="mw-logo"> <img class="mw-logo-icon" src="/static/images/icons/wikipedia.png" alt="" aria-hidden="true" height="50" width="50"> <span class="mw-logo-container skin-invert"> <img class="mw-logo-wordmark" alt="Wikipedia" src="/static/images/mobile/copyright/wikipedia-wordmark-en.svg" style="width: 7.5em; height: 1.125em;"> <img class="mw-logo-tagline" alt="The Free Encyclopedia" src="/static/images/mobile/copyright/wikipedia-tagline-en.svg" width="117" height="13" style="width: 7.3125em; height: 0.8125em;"> </span> </a> </div> <div class="vector-header-end"> <div id="p-search" role="search" class="vector-search-box-vue vector-search-box-collapses vector-search-box-show-thumbnail vector-search-box-auto-expand-width vector-search-box"> <a href="/wiki/Special:Search" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only search-toggle" title="Search Wikipedia [f]" accesskey="f"><span class="vector-icon mw-ui-icon-search mw-ui-icon-wikimedia-search"></span> <span>Search</span> </a> <div class="vector-typeahead-search-container"> <div class="cdx-typeahead-search cdx-typeahead-search--show-thumbnail cdx-typeahead-search--auto-expand-width"> <form action="/w/index.php" id="searchform" class="cdx-search-input cdx-search-input--has-end-button"> <div id="simpleSearch" class="cdx-search-input__input-wrapper" data-search-loc="header-moved"> <div class="cdx-text-input cdx-text-input--has-start-icon"> <input class="cdx-text-input__input" type="search" name="search" placeholder="Search Wikipedia" aria-label="Search Wikipedia" autocapitalize="sentences" title="Search Wikipedia [f]" accesskey="f" id="searchInput" > <span class="cdx-text-input__icon cdx-text-input__start-icon"></span> </div> <input type="hidden" name="title" value="Special:Search"> </div> <button class="cdx-button cdx-search-input__end-button">Search</button> </form> </div> </div> </div> <nav class="vector-user-links vector-user-links-wide" aria-label="Personal tools"> <div class="vector-user-links-main"> <div id="p-vector-user-menu-preferences" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-userpage" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-dropdown" class="vector-dropdown " title="Change the appearance of the page&#039;s font size, width, and color" > <input type="checkbox" id="vector-appearance-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-appearance-dropdown" class="vector-dropdown-checkbox " aria-label="Appearance" > <label id="vector-appearance-dropdown-label" for="vector-appearance-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-appearance mw-ui-icon-wikimedia-appearance"></span> <span class="vector-dropdown-label-text">Appearance</span> </label> <div class="vector-dropdown-content"> <div id="vector-appearance-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <div id="p-vector-user-menu-notifications" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-overflow" class="vector-menu mw-portlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&amp;utm_medium=sidebar&amp;utm_campaign=C13_en.wikipedia.org&amp;uselang=en" class=""><span>Donate</span></a> </li> <li id="pt-createaccount-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:CreateAccount&amp;returnto=Generalized+additive+model" title="You are encouraged to create an account and log in; however, it is not mandatory" class=""><span>Create account</span></a> </li> <li id="pt-login-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:UserLogin&amp;returnto=Generalized+additive+model" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o" class=""><span>Log in</span></a> </li> </ul> </div> </div> </div> <div id="vector-user-links-dropdown" class="vector-dropdown vector-user-menu vector-button-flush-right vector-user-menu-logged-out" title="Log in and more options" > <input type="checkbox" id="vector-user-links-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-user-links-dropdown" class="vector-dropdown-checkbox " aria-label="Personal tools" > <label id="vector-user-links-dropdown-label" for="vector-user-links-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-ellipsis mw-ui-icon-wikimedia-ellipsis"></span> <span class="vector-dropdown-label-text">Personal tools</span> </label> <div class="vector-dropdown-content"> <div id="p-personal" class="vector-menu mw-portlet mw-portlet-personal user-links-collapsible-item" title="User menu" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport" class="user-links-collapsible-item mw-list-item"><a href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&amp;utm_medium=sidebar&amp;utm_campaign=C13_en.wikipedia.org&amp;uselang=en"><span>Donate</span></a></li><li id="pt-createaccount" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:CreateAccount&amp;returnto=Generalized+additive+model" title="You are encouraged to create an account and log in; however, it is not mandatory"><span class="vector-icon mw-ui-icon-userAdd mw-ui-icon-wikimedia-userAdd"></span> <span>Create account</span></a></li><li id="pt-login" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:UserLogin&amp;returnto=Generalized+additive+model" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o"><span class="vector-icon mw-ui-icon-logIn mw-ui-icon-wikimedia-logIn"></span> <span>Log in</span></a></li> </ul> </div> </div> <div id="p-user-menu-anon-editor" class="vector-menu mw-portlet mw-portlet-user-menu-anon-editor" > <div class="vector-menu-heading"> Pages for logged out editors <a href="/wiki/Help:Introduction" aria-label="Learn more about editing"><span>learn more</span></a> </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-anoncontribs" class="mw-list-item"><a href="/wiki/Special:MyContributions" title="A list of edits made from this IP address [y]" accesskey="y"><span>Contributions</span></a></li><li id="pt-anontalk" class="mw-list-item"><a href="/wiki/Special:MyTalk" title="Discussion about edits from this IP address [n]" accesskey="n"><span>Talk</span></a></li> </ul> </div> </div> </div> </div> </nav> </div> </header> </div> <div class="mw-page-container"> <div class="mw-page-container-inner"> <div class="vector-sitenotice-container"> <div id="siteNotice"><!-- CentralNotice --></div> </div> <div class="vector-column-start"> <div class="vector-main-menu-container"> <div id="mw-navigation"> <nav id="mw-panel" class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-pinned-container" class="vector-pinned-container"> </div> </nav> </div> </div> <div class="vector-sticky-pinned-container"> <nav id="mw-panel-toc" aria-label="Contents" data-event-name="ui.sidebar-toc" class="mw-table-of-contents-container vector-toc-landmark"> <div id="vector-toc-pinned-container" class="vector-pinned-container"> <div id="vector-toc" class="vector-toc vector-pinnable-element"> <div class="vector-pinnable-header vector-toc-pinnable-header vector-pinnable-header-pinned" data-feature-name="toc-pinned" data-pinnable-element-id="vector-toc" > <h2 class="vector-pinnable-header-label">Contents</h2> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-toc.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-toc.unpin">hide</button> </div> <ul class="vector-toc-contents" id="mw-panel-toc-list"> <li id="toc-mw-content-text" class="vector-toc-list-item vector-toc-level-1"> <a href="#" class="vector-toc-link"> <div class="vector-toc-text">(Top)</div> </a> </li> <li id="toc-Theoretical_background" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Theoretical_background"> <div class="vector-toc-text"> <span class="vector-toc-numb">1</span> <span>Theoretical background</span> </div> </a> <ul id="toc-Theoretical_background-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Generality" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Generality"> <div class="vector-toc-text"> <span class="vector-toc-numb">2</span> <span>Generality</span> </div> </a> <ul id="toc-Generality-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-GAM_fitting_methods" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#GAM_fitting_methods"> <div class="vector-toc-text"> <span class="vector-toc-numb">3</span> <span>GAM fitting methods</span> </div> </a> <ul id="toc-GAM_fitting_methods-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-The_rank_reduced_framework" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#The_rank_reduced_framework"> <div class="vector-toc-text"> <span class="vector-toc-numb">4</span> <span>The rank reduced framework</span> </div> </a> <button aria-controls="toc-The_rank_reduced_framework-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle The rank reduced framework subsection</span> </button> <ul id="toc-The_rank_reduced_framework-sublist" class="vector-toc-list"> <li id="toc-Bayesian_smoothing_priors" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Bayesian_smoothing_priors"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.1</span> <span>Bayesian smoothing priors</span> </div> </a> <ul id="toc-Bayesian_smoothing_priors-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Smoothing_parameter_estimation" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Smoothing_parameter_estimation"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.2</span> <span>Smoothing parameter estimation</span> </div> </a> <ul id="toc-Smoothing_parameter_estimation-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Software" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Software"> <div class="vector-toc-text"> <span class="vector-toc-numb">5</span> <span>Software</span> </div> </a> <ul id="toc-Software-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Model_checking" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Model_checking"> <div class="vector-toc-text"> <span class="vector-toc-numb">6</span> <span>Model checking</span> </div> </a> <ul id="toc-Model_checking-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Model_selection" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Model_selection"> <div class="vector-toc-text"> <span class="vector-toc-numb">7</span> <span>Model selection</span> </div> </a> <ul id="toc-Model_selection-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Caveats" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Caveats"> <div class="vector-toc-text"> <span class="vector-toc-numb">8</span> <span>Caveats</span> </div> </a> <ul id="toc-Caveats-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-See_also" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#See_also"> <div class="vector-toc-text"> <span class="vector-toc-numb">9</span> <span>See also</span> </div> </a> <ul id="toc-See_also-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-References" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#References"> <div class="vector-toc-text"> <span class="vector-toc-numb">10</span> <span>References</span> </div> </a> <ul id="toc-References-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-External_links" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#External_links"> <div class="vector-toc-text"> <span class="vector-toc-numb">11</span> <span>External links</span> </div> </a> <ul id="toc-External_links-sublist" class="vector-toc-list"> </ul> </li> </ul> </div> </div> </nav> </div> </div> <div class="mw-content-container"> <main id="content" class="mw-body"> <header class="mw-body-header vector-page-titlebar"> <nav aria-label="Contents" class="vector-toc-landmark"> <div id="vector-page-titlebar-toc" class="vector-dropdown vector-page-titlebar-toc vector-button-flush-left" > <input type="checkbox" id="vector-page-titlebar-toc-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-titlebar-toc" class="vector-dropdown-checkbox " aria-label="Toggle the table of contents" > <label id="vector-page-titlebar-toc-label" for="vector-page-titlebar-toc-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-listBullet mw-ui-icon-wikimedia-listBullet"></span> <span class="vector-dropdown-label-text">Toggle the table of contents</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-titlebar-toc-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <h1 id="firstHeading" class="firstHeading mw-first-heading"><span class="mw-page-title-main">Generalized additive model</span></h1> <div id="p-lang-btn" class="vector-dropdown mw-portlet mw-portlet-lang" > <input type="checkbox" id="p-lang-btn-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-p-lang-btn" class="vector-dropdown-checkbox mw-interlanguage-selector" aria-label="Go to an article in another language. Available in 4 languages" > <label id="p-lang-btn-label" for="p-lang-btn-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--action-progressive mw-portlet-lang-heading-4" aria-hidden="true" ><span class="vector-icon mw-ui-icon-language-progressive mw-ui-icon-wikimedia-language-progressive"></span> <span class="vector-dropdown-label-text">4 languages</span> </label> <div class="vector-dropdown-content"> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li class="interlanguage-link interwiki-ca mw-list-item"><a href="https://ca.wikipedia.org/wiki/Model_additiu_generalitzat" title="Model additiu generalitzat – Catalan" lang="ca" hreflang="ca" data-title="Model additiu generalitzat" data-language-autonym="Català" data-language-local-name="Catalan" class="interlanguage-link-target"><span>Català</span></a></li><li class="interlanguage-link interwiki-et mw-list-item"><a href="https://et.wikipedia.org/wiki/%C3%9Cldistatud_aditiivne_meetod" title="Üldistatud aditiivne meetod – Estonian" lang="et" hreflang="et" data-title="Üldistatud aditiivne meetod" data-language-autonym="Eesti" data-language-local-name="Estonian" class="interlanguage-link-target"><span>Eesti</span></a></li><li class="interlanguage-link interwiki-fr mw-list-item"><a href="https://fr.wikipedia.org/wiki/Mod%C3%A8le_additif_g%C3%A9n%C3%A9ralis%C3%A9" title="Modèle additif généralisé – French" lang="fr" hreflang="fr" data-title="Modèle additif généralisé" data-language-autonym="Français" data-language-local-name="French" class="interlanguage-link-target"><span>Français</span></a></li><li class="interlanguage-link interwiki-ko mw-list-item"><a href="https://ko.wikipedia.org/wiki/%EC%9D%BC%EB%B0%98%ED%99%94_%EA%B0%80%EB%B2%95_%EB%AA%A8%EB%8D%B8" title="일반화 가법 모델 – Korean" lang="ko" hreflang="ko" data-title="일반화 가법 모델" data-language-autonym="한국어" data-language-local-name="Korean" class="interlanguage-link-target"><span>한국어</span></a></li> </ul> <div class="after-portlet after-portlet-lang"><span class="wb-langlinks-edit wb-langlinks-link"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q3318054#sitelinks-wikipedia" title="Edit interlanguage links" class="wbc-editpage">Edit links</a></span></div> </div> </div> </div> </header> <div class="vector-page-toolbar"> <div class="vector-page-toolbar-container"> <div id="left-navigation"> <nav aria-label="Namespaces"> <div id="p-associated-pages" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-associated-pages" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-nstab-main" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Generalized_additive_model" title="View the content page [c]" accesskey="c"><span>Article</span></a></li><li id="ca-talk" class="vector-tab-noicon mw-list-item"><a href="/wiki/Talk:Generalized_additive_model" rel="discussion" title="Discuss improvements to the content page [t]" accesskey="t"><span>Talk</span></a></li> </ul> </div> </div> <div id="vector-variants-dropdown" class="vector-dropdown emptyPortlet" > <input type="checkbox" id="vector-variants-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-variants-dropdown" class="vector-dropdown-checkbox " aria-label="Change language variant" > <label id="vector-variants-dropdown-label" for="vector-variants-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">English</span> </label> <div class="vector-dropdown-content"> <div id="p-variants" class="vector-menu mw-portlet mw-portlet-variants emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> </div> </div> </nav> </div> <div id="right-navigation" class="vector-collapsible"> <nav aria-label="Views"> <div id="p-views" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-views" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-view" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Generalized_additive_model"><span>Read</span></a></li><li id="ca-edit" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-history" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Generalized_additive_model&amp;action=history" title="Past revisions of this page [h]" accesskey="h"><span>View history</span></a></li> </ul> </div> </div> </nav> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-dropdown" class="vector-dropdown vector-page-tools-dropdown" > <input type="checkbox" id="vector-page-tools-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-tools-dropdown" class="vector-dropdown-checkbox " aria-label="Tools" > <label id="vector-page-tools-dropdown-label" for="vector-page-tools-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">Tools</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-tools-unpinned-container" class="vector-unpinned-container"> <div id="vector-page-tools" class="vector-page-tools vector-pinnable-element"> <div class="vector-pinnable-header vector-page-tools-pinnable-header vector-pinnable-header-unpinned" data-feature-name="page-tools-pinned" data-pinnable-element-id="vector-page-tools" data-pinned-container-id="vector-page-tools-pinned-container" data-unpinned-container-id="vector-page-tools-unpinned-container" > <div class="vector-pinnable-header-label">Tools</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-page-tools.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-page-tools.unpin">hide</button> </div> <div id="p-cactions" class="vector-menu mw-portlet mw-portlet-cactions emptyPortlet vector-has-collapsible-items" title="More options" > <div class="vector-menu-heading"> Actions </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-more-view" class="selected vector-more-collapsible-item mw-list-item"><a href="/wiki/Generalized_additive_model"><span>Read</span></a></li><li id="ca-more-edit" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-more-history" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Generalized_additive_model&amp;action=history"><span>View history</span></a></li> </ul> </div> </div> <div id="p-tb" class="vector-menu mw-portlet mw-portlet-tb" > <div class="vector-menu-heading"> General </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-whatlinkshere" class="mw-list-item"><a href="/wiki/Special:WhatLinksHere/Generalized_additive_model" title="List of all English Wikipedia pages containing links to this page [j]" accesskey="j"><span>What links here</span></a></li><li id="t-recentchangeslinked" class="mw-list-item"><a href="/wiki/Special:RecentChangesLinked/Generalized_additive_model" rel="nofollow" title="Recent changes in pages linked from this page [k]" accesskey="k"><span>Related changes</span></a></li><li id="t-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_Upload_Wizard" title="Upload files [u]" accesskey="u"><span>Upload file</span></a></li><li id="t-specialpages" class="mw-list-item"><a href="/wiki/Special:SpecialPages" title="A list of all special pages [q]" accesskey="q"><span>Special pages</span></a></li><li id="t-permalink" class="mw-list-item"><a href="/w/index.php?title=Generalized_additive_model&amp;oldid=1256715804" title="Permanent link to this revision of this page"><span>Permanent link</span></a></li><li id="t-info" class="mw-list-item"><a href="/w/index.php?title=Generalized_additive_model&amp;action=info" title="More information about this page"><span>Page information</span></a></li><li id="t-cite" class="mw-list-item"><a href="/w/index.php?title=Special:CiteThisPage&amp;page=Generalized_additive_model&amp;id=1256715804&amp;wpFormIdentifier=titleform" title="Information on how to cite this page"><span>Cite this page</span></a></li><li id="t-urlshortener" class="mw-list-item"><a href="/w/index.php?title=Special:UrlShortener&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FGeneralized_additive_model"><span>Get shortened URL</span></a></li><li id="t-urlshortener-qrcode" class="mw-list-item"><a href="/w/index.php?title=Special:QrCode&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FGeneralized_additive_model"><span>Download QR code</span></a></li> </ul> </div> </div> <div id="p-coll-print_export" class="vector-menu mw-portlet mw-portlet-coll-print_export" > <div class="vector-menu-heading"> Print/export </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="coll-download-as-rl" class="mw-list-item"><a href="/w/index.php?title=Special:DownloadAsPdf&amp;page=Generalized_additive_model&amp;action=show-download-screen" title="Download this page as a PDF file"><span>Download as PDF</span></a></li><li id="t-print" class="mw-list-item"><a href="/w/index.php?title=Generalized_additive_model&amp;printable=yes" title="Printable version of this page [p]" accesskey="p"><span>Printable version</span></a></li> </ul> </div> </div> <div id="p-wikibase-otherprojects" class="vector-menu mw-portlet mw-portlet-wikibase-otherprojects" > <div class="vector-menu-heading"> In other projects </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-wikibase" class="wb-otherproject-link wb-otherproject-wikibase-dataitem mw-list-item"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q3318054" title="Structured data on this page hosted by Wikidata [g]" accesskey="g"><span>Wikidata item</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> </div> </div> </div> <div class="vector-column-end"> <div class="vector-sticky-pinned-container"> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-pinned-container" class="vector-pinned-container"> </div> </nav> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-pinned-container" class="vector-pinned-container"> <div id="vector-appearance" class="vector-appearance vector-pinnable-element"> <div class="vector-pinnable-header vector-appearance-pinnable-header vector-pinnable-header-pinned" data-feature-name="appearance-pinned" data-pinnable-element-id="vector-appearance" data-pinned-container-id="vector-appearance-pinned-container" data-unpinned-container-id="vector-appearance-unpinned-container" > <div class="vector-pinnable-header-label">Appearance</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-appearance.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-appearance.unpin">hide</button> </div> </div> </div> </nav> </div> </div> <div id="bodyContent" class="vector-body" aria-labelledby="firstHeading" data-mw-ve-target-container> <div class="vector-body-before-content"> <div class="mw-indicators"> </div> <div id="siteSub" class="noprint">From Wikipedia, the free encyclopedia</div> </div> <div id="contentSub"><div id="mw-content-subtitle"></div></div> <div id="mw-content-text" class="mw-body-content"><div class="mw-content-ltr mw-parser-output" lang="en" dir="ltr"><div class="shortdescription nomobile noexcerpt noprint searchaux" style="display:none">Statistics models class</div><p>In <a href="/wiki/Statistics" title="Statistics">statistics</a>, a <b>generalized additive model</b> (<b>GAM</b>) is a <a href="/wiki/Generalized_linear_model" title="Generalized linear model">generalized linear model</a> in which the linear response variable depends linearly on unknown <a href="/wiki/Smooth_function" class="mw-redirect" title="Smooth function">smooth functions</a> of some predictor variables, and interest focuses on inference about these smooth functions. </p><p>GAMs were originally developed by <a href="/wiki/Trevor_Hastie" title="Trevor Hastie">Trevor Hastie</a> and <a href="/wiki/Robert_Tibshirani" title="Robert Tibshirani">Robert Tibshirani</a><sup id="cite_ref-Hastie1990_1-0" class="reference"><a href="#cite_note-Hastie1990-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup> to blend properties of <a href="/wiki/Generalized_linear_model" title="Generalized linear model">generalized linear models</a> with <a href="/wiki/Additive_model" title="Additive model">additive models</a>. They can be interpreted as the discriminative generalization of the <a href="/wiki/Naive_Bayes" class="mw-redirect" title="Naive Bayes">naive Bayes</a> generative model.<sup id="cite_ref-2" class="reference"><a href="#cite_note-2"><span class="cite-bracket">&#91;</span>2<span class="cite-bracket">&#93;</span></a></sup> </p><p>The model relates a univariate response variable, <i>Y</i>, to some predictor variables, <i>x</i><sub><i>i</i></sub>. An <a href="/wiki/Exponential_family" title="Exponential family">exponential family</a> distribution is specified for Y (for example <a href="/wiki/Normal_distribution" title="Normal distribution">normal</a>, <a href="/wiki/Binomial_distribution" title="Binomial distribution">binomial</a> or <a href="/wiki/Poisson_distribution" title="Poisson distribution">Poisson</a> distributions) along with a <a href="/wiki/Link_function" class="mw-redirect" title="Link function">link function</a> <i>g</i> (for example the identity or log functions) relating the expected value of <i>Y</i> to the predictor variables via a structure such as </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle g(\operatorname {E} (Y))=\beta _{0}+f_{1}(x_{1})+f_{2}(x_{2})+\cdots +f_{m}(x_{m}).\,\!}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>g</mi> <mo stretchy="false">(</mo> <mi mathvariant="normal">E</mi> <mo>&#x2061;<!-- ⁡ --></mo> <mo stretchy="false">(</mo> <mi>Y</mi> <mo stretchy="false">)</mo> <mo stretchy="false">)</mo> <mo>=</mo> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mn>0</mn> </mrow> </msub> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> <mo stretchy="false">)</mo> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> <mo stretchy="false">)</mo> <mo>+</mo> <mo>&#x22EF;<!-- ⋯ --></mo> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>m</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>m</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>.</mo> <mspace width="thinmathspace" /> <mspace width="negativethinmathspace" /> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle g(\operatorname {E} (Y))=\beta _{0}+f_{1}(x_{1})+f_{2}(x_{2})+\cdots +f_{m}(x_{m}).\,\!}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/8687b329e733508852acb766d559076d2a8ab205" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; margin-right: -0.387ex; width:49.08ex; height:2.843ex;" alt="{\displaystyle g(\operatorname {E} (Y))=\beta _{0}+f_{1}(x_{1})+f_{2}(x_{2})+\cdots +f_{m}(x_{m}).\,\!}"></span></dd></dl> <p>The functions <i>f</i><sub><i>i</i></sub> may be functions with a specified parametric form (for example a polynomial, or an un-penalized regression spline of a variable) or may be specified non-parametrically, or semi-parametrically, simply as 'smooth functions', to be estimated by <a href="/wiki/Nonparametric_regression" title="Nonparametric regression">non-parametric means</a>. So a typical GAM might use a scatterplot smoothing function, such as a locally weighted mean, for <i>f</i><sub>1</sub>(<i>x</i><sub>1</sub>), and then use a factor model for <i>f</i><sub>2</sub>(<i>x</i><sub>2</sub>). This flexibility to allow non-parametric fits with relaxed assumptions on the actual relationship between response and predictor, provides the potential for better fits to data than purely parametric models, but arguably with some loss of interpretability. </p> <meta property="mw:PageProp/toc" /> <div class="mw-heading mw-heading2"><h2 id="Theoretical_background">Theoretical background</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=1" title="Edit section: Theoretical background"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>It had been known since the 1950s (via the <a href="/wiki/Kolmogorov%E2%80%93Arnold_representation_theorem" title="Kolmogorov–Arnold representation theorem">Kolmogorov–Arnold representation theorem</a>) that any multivariate continuous function could be represented as sums and compositions of univariate functions, </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f({\vec {x}})=\sum _{q=0}^{2n}\Phi _{q}\left(\sum _{p=1}^{n}\phi _{q,p}(x_{p})\right)}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>f</mi> <mo stretchy="false">(</mo> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>x</mi> <mo stretchy="false">&#x2192;<!-- → --></mo> </mover> </mrow> </mrow> <mo stretchy="false">)</mo> <mo>=</mo> <munderover> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>q</mi> <mo>=</mo> <mn>0</mn> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> <mi>n</mi> </mrow> </munderover> <msub> <mi mathvariant="normal">&#x03A6;<!-- Φ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>q</mi> </mrow> </msub> <mrow> <mo>(</mo> <mrow> <munderover> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>p</mi> <mo>=</mo> <mn>1</mn> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>n</mi> </mrow> </munderover> <msub> <mi>&#x03D5;<!-- ϕ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>q</mi> <mo>,</mo> <mi>p</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>p</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mrow> <mo>)</mo> </mrow> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f({\vec {x}})=\sum _{q=0}^{2n}\Phi _{q}\left(\sum _{p=1}^{n}\phi _{q,p}(x_{p})\right)}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/9dc1fdf3d424eb148c914c786d18a3a13545f99b" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; width:29.592ex; height:7.676ex;" alt="{\displaystyle f({\vec {x}})=\sum _{q=0}^{2n}\Phi _{q}\left(\sum _{p=1}^{n}\phi _{q,p}(x_{p})\right)}"></span>.</dd></dl> <p>Unfortunately, though the <a href="/wiki/Kolmogorov%E2%80%93Arnold_representation_theorem" title="Kolmogorov–Arnold representation theorem">Kolmogorov–Arnold representation theorem</a> asserts the existence of a function of this form, it gives no mechanism whereby one could be constructed. Certain constructive proofs exist, but they tend to require highly complicated (i.e. fractal) functions, and thus are not suitable for modeling approaches. Therefore, the generalized additive model<sup id="cite_ref-Hastie1990_1-1" class="reference"><a href="#cite_note-Hastie1990-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup> drops the outer sum, and demands instead that the function belong to a simpler class, </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f({\vec {x}})=\Phi \left(\sum _{p=1}^{n}\phi _{p}(x_{p})\right)}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>f</mi> <mo stretchy="false">(</mo> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>x</mi> <mo stretchy="false">&#x2192;<!-- → --></mo> </mover> </mrow> </mrow> <mo stretchy="false">)</mo> <mo>=</mo> <mi mathvariant="normal">&#x03A6;<!-- Φ --></mi> <mrow> <mo>(</mo> <mrow> <munderover> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>p</mi> <mo>=</mo> <mn>1</mn> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>n</mi> </mrow> </munderover> <msub> <mi>&#x03D5;<!-- ϕ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>p</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>p</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mrow> <mo>)</mo> </mrow> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f({\vec {x}})=\Phi \left(\sum _{p=1}^{n}\phi _{p}(x_{p})\right)}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/c12c37cdc55afa52b3fd6637449d58432a75806f" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; width:23.647ex; height:7.676ex;" alt="{\displaystyle f({\vec {x}})=\Phi \left(\sum _{p=1}^{n}\phi _{p}(x_{p})\right)}"></span>.</dd></dl> <p>where <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \Phi }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi mathvariant="normal">&#x03A6;<!-- Φ --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \Phi }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/aed80a2011a3912b028ba32a52dfa57165455f24" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.678ex; height:2.176ex;" alt="{\displaystyle \Phi }"></span> is a smooth monotonic function. Writing <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle g}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>g</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle g}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/d3556280e66fe2c0d0140df20935a6f057381d77" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.116ex; height:2.009ex;" alt="{\displaystyle g}"></span> for the inverse of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \Phi }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi mathvariant="normal">&#x03A6;<!-- Φ --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \Phi }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/aed80a2011a3912b028ba32a52dfa57165455f24" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.678ex; height:2.176ex;" alt="{\displaystyle \Phi }"></span>, this is traditionally written as </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle g(f({\vec {x}}))=\sum _{i}f_{i}(x_{i})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>g</mi> <mo stretchy="false">(</mo> <mi>f</mi> <mo stretchy="false">(</mo> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>x</mi> <mo stretchy="false">&#x2192;<!-- → --></mo> </mover> </mrow> </mrow> <mo stretchy="false">)</mo> <mo stretchy="false">)</mo> <mo>=</mo> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </munder> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle g(f({\vec {x}}))=\sum _{i}f_{i}(x_{i})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/66c23712d541fc0d91042dc167cb480c099bf443" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.005ex; width:20.061ex; height:5.509ex;" alt="{\displaystyle g(f({\vec {x}}))=\sum _{i}f_{i}(x_{i})}"></span>.</dd></dl> <p>When this function is approximating the expectation of some observed quantity, it could be written as </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle g(\operatorname {E} (Y))=\beta _{0}+f_{1}(x_{1})+f_{2}(x_{2})+\cdots +f_{m}(x_{m}).\,\!}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>g</mi> <mo stretchy="false">(</mo> <mi mathvariant="normal">E</mi> <mo>&#x2061;<!-- ⁡ --></mo> <mo stretchy="false">(</mo> <mi>Y</mi> <mo stretchy="false">)</mo> <mo stretchy="false">)</mo> <mo>=</mo> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mn>0</mn> </mrow> </msub> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> <mo stretchy="false">)</mo> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> <mo stretchy="false">)</mo> <mo>+</mo> <mo>&#x22EF;<!-- ⋯ --></mo> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>m</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>m</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>.</mo> <mspace width="thinmathspace" /> <mspace width="negativethinmathspace" /> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle g(\operatorname {E} (Y))=\beta _{0}+f_{1}(x_{1})+f_{2}(x_{2})+\cdots +f_{m}(x_{m}).\,\!}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/8687b329e733508852acb766d559076d2a8ab205" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; margin-right: -0.387ex; width:49.08ex; height:2.843ex;" alt="{\displaystyle g(\operatorname {E} (Y))=\beta _{0}+f_{1}(x_{1})+f_{2}(x_{2})+\cdots +f_{m}(x_{m}).\,\!}"></span></dd></dl> <p>Which is the standard formulation of a generalized additive model. It was then shown<sup id="cite_ref-Hastie1990_1-2" class="reference"><a href="#cite_note-Hastie1990-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup><sup class="noprint Inline-Template" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Please_clarify" title="Wikipedia:Please clarify"><span title="Please clarify the preceding statement or statements with a good explanation from a reliable source. (July 2019)">how?</span></a></i>&#93;</sup> that the <a href="/wiki/Backfitting_algorithm" title="Backfitting algorithm">backfitting algorithm</a> will always converge for these functions. </p> <div class="mw-heading mw-heading2"><h2 id="Generality">Generality</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=2" title="Edit section: Generality"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The GAM model class is quite broad, given that <i>smooth function</i> is a rather broad category. For example, a covariate <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle x_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle x_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/5db47cb3d2f9496205a17a6856c91c1d3d363ccd" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.239ex; height:2.343ex;" alt="{\displaystyle x_{j}}"></span> may be multivariate and the corresponding <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> a smooth function of several variables, or <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> might be the function mapping the level of a factor to the value of a random effect. Another example is a varying coefficient (geographic regression) term such as <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle z_{j}f_{j}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>z</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle z_{j}f_{j}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/5a89f81b2df5b0b4d7a06c229c15c1d25744f47b" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:8.089ex; height:3.009ex;" alt="{\displaystyle z_{j}f_{j}(x_{j})}"></span> where <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle z_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>z</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle z_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/412a06424b2eeb1f51d963bc33fb3bd5c3df5f49" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:1.991ex; height:2.343ex;" alt="{\displaystyle z_{j}}"></span> and <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle x_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle x_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/5db47cb3d2f9496205a17a6856c91c1d3d363ccd" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.239ex; height:2.343ex;" alt="{\displaystyle x_{j}}"></span> are both covariates. Or if <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle x_{j}(t)}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <mi>t</mi> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle x_{j}(t)}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/27aa814b79c9ef68a273b400b57c7021fa2ae07a" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:4.888ex; height:3.009ex;" alt="{\displaystyle x_{j}(t)}"></span> is itself an observation of a function, we might include a term such as <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \int f_{j}(t)x_{j}(t)dt}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mo>&#x222B;<!-- ∫ --></mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <mi>t</mi> <mo stretchy="false">)</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <mi>t</mi> <mo stretchy="false">)</mo> <mi>d</mi> <mi>t</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \int f_{j}(t)x_{j}(t)dt}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/f1c7d249b426decc08e4c525f74e3821491b63c4" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -2.338ex; width:14.222ex; height:5.676ex;" alt="{\displaystyle \int f_{j}(t)x_{j}(t)dt}"></span> (sometimes known as a signal regression term). <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> could also be a simple parametric function as might be used in any generalized linear model. The model class has been generalized in several directions, notably beyond exponential family response distributions, beyond modelling of only the mean and beyond univariate data.<sup id="cite_ref-Wood2016_3-0" class="reference"><a href="#cite_note-Wood2016-3"><span class="cite-bracket">&#91;</span>3<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Yee2015_4-0" class="reference"><a href="#cite_note-Yee2015-4"><span class="cite-bracket">&#91;</span>4<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Rigby2005_5-0" class="reference"><a href="#cite_note-Rigby2005-5"><span class="cite-bracket">&#91;</span>5<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="GAM_fitting_methods">GAM fitting methods</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=3" title="Edit section: GAM fitting methods"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The original GAM fitting method estimated the smooth components of the model using non-parametric smoothers (for example smoothing splines or local linear regression smoothers) via the <a href="/wiki/Backfitting_algorithm" title="Backfitting algorithm">backfitting algorithm</a>.<sup id="cite_ref-Hastie1990_1-3" class="reference"><a href="#cite_note-Hastie1990-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup> Backfitting works by iterative smoothing of partial residuals and provides a very general modular estimation method capable of using a wide variety of smoothing methods to estimate the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/b2ad05d2a5db540d7100ae5a2d4b5ac032216317" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:6.098ex; height:3.009ex;" alt="{\displaystyle f_{j}(x_{j})}"></span> terms. A disadvantage of backfitting is that it is difficult to integrate with the estimation of the degree of smoothness of the model terms, so that in practice the user must set these, or select between a modest set of pre-defined smoothing levels. </p><p>If the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/b2ad05d2a5db540d7100ae5a2d4b5ac032216317" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:6.098ex; height:3.009ex;" alt="{\displaystyle f_{j}(x_{j})}"></span> are represented using <a href="/wiki/Smoothing_spline" title="Smoothing spline">smoothing splines</a><sup id="cite_ref-Wahba1990_6-0" class="reference"><a href="#cite_note-Wahba1990-6"><span class="cite-bracket">&#91;</span>6<span class="cite-bracket">&#93;</span></a></sup> then the degree of smoothness can be estimated as part of model fitting using generalized cross validation, or by <a href="/wiki/Restricted_maximum_likelihood" title="Restricted maximum likelihood">restricted maximum likelihood</a> (REML, sometimes known as 'GML') which exploits the duality between spline smoothers and Gaussian random effects.<sup id="cite_ref-Gu1991_7-0" class="reference"><a href="#cite_note-Gu1991-7"><span class="cite-bracket">&#91;</span>7<span class="cite-bracket">&#93;</span></a></sup> This full spline approach carries an <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle O(n^{3})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>O</mi> <mo stretchy="false">(</mo> <msup> <mi>n</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>3</mn> </mrow> </msup> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle O(n^{3})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/6b04f5c5cfea38f43406d9442387ad28555e2609" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:6.032ex; height:3.176ex;" alt="{\displaystyle O(n^{3})}"></span> computational cost, where <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle n}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>n</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle n}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/a601995d55609f2d9f5e233e36fbe9ea26011b3b" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.395ex; height:1.676ex;" alt="{\displaystyle n}"></span> is the number of observations for the response variable, rendering it somewhat impractical for moderately large datasets. More recent methods have addressed this computational cost either by up front reduction of the size of the basis used for smoothing (rank reduction)<sup id="cite_ref-Wood2000_8-0" class="reference"><a href="#cite_note-Wood2000-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Fahrmeier2001_9-0" class="reference"><a href="#cite_note-Fahrmeier2001-9"><span class="cite-bracket">&#91;</span>9<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-kim2004_10-0" class="reference"><a href="#cite_note-kim2004-10"><span class="cite-bracket">&#91;</span>10<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Wood2017_11-0" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Ruppert2003_12-0" class="reference"><a href="#cite_note-Ruppert2003-12"><span class="cite-bracket">&#91;</span>12<span class="cite-bracket">&#93;</span></a></sup> or by finding sparse representations of the smooths using <a href="/wiki/Markov_random_field" title="Markov random field">Markov random fields</a>, which are amenable to the use of <a href="/wiki/Sparse_matrix" title="Sparse matrix">sparse matrix</a> methods for computation.<sup id="cite_ref-Rue2009_13-0" class="reference"><a href="#cite_note-Rue2009-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> These more computationally efficient methods use GCV (or AIC or similar) or REML or take a fully Bayesian approach for inference about the degree of smoothness of the model components. Estimating the degree of smoothness via REML can be viewed as an <a href="/wiki/Empirical_Bayes_method" title="Empirical Bayes method">empirical Bayes method</a>. </p><p>An alternative approach with particular advantages in high dimensional settings is to use <a href="/wiki/Boosting_(machine_learning)" title="Boosting (machine learning)">boosting</a>, although this typically requires bootstrapping for uncertainty quantification.<sup id="cite_ref-mboost_14-0" class="reference"><a href="#cite_note-mboost-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-mayr2012_15-0" class="reference"><a href="#cite_note-mayr2012-15"><span class="cite-bracket">&#91;</span>15<span class="cite-bracket">&#93;</span></a></sup> GAMs fit using bagging and boosting have been found to generally outperform GAMs fit using spline methods.<sup id="cite_ref-LouCaruana2012_16-0" class="reference"><a href="#cite_note-LouCaruana2012-16"><span class="cite-bracket">&#91;</span>16<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="The_rank_reduced_framework">The rank reduced framework</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=4" title="Edit section: The rank reduced framework"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Many modern implementations of GAMs and their extensions are built around the reduced rank smoothing approach, because it allows well founded estimation of the smoothness of the component smooths at comparatively modest computational cost, and also facilitates implementation of a number of model extensions in a way that is more difficult with other methods. At its simplest the idea is to replace the unknown smooth functions in the model with basis expansions </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}(x_{j})=\sum _{k=1}^{K_{j}}\beta _{jk}b_{jk}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>=</mo> <munderover> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>k</mi> <mo>=</mo> <mn>1</mn> </mrow> <mrow class="MJX-TeXAtom-ORD"> <msub> <mi>K</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mrow> </munderover> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> <mi>k</mi> </mrow> </msub> <msub> <mi>b</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> <mi>k</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}(x_{j})=\sum _{k=1}^{K_{j}}\beta _{jk}b_{jk}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/a581e68a10fd0d010d42c1ed9151efaad64915f6" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.005ex; width:22.833ex; height:7.676ex;" alt="{\displaystyle f_{j}(x_{j})=\sum _{k=1}^{K_{j}}\beta _{jk}b_{jk}(x_{j})}"></span></dd></dl> <p>where the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle b_{jk}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>b</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> <mi>k</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle b_{jk}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/4dcabade97378bdd7934f9de298c7d9313cd9a68" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:6.812ex; height:3.009ex;" alt="{\displaystyle b_{jk}(x_{j})}"></span> are known basis functions, usually chosen for good approximation theoretic properties (for example <a href="/wiki/B_spline" class="mw-redirect" title="B spline">B splines</a> or reduced rank <a href="/wiki/Thin_plate_spline" title="Thin plate spline">thin plate splines</a>), and the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta _{jk}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> <mi>k</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta _{jk}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/8392b21e3788b15d0367c1ad9d80ad0b8581cbc1" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:3.082ex; height:2.843ex;" alt="{\displaystyle \beta _{jk}}"></span> are coefficients to be estimated as part of model fitting. The basis dimension <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle K_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>K</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle K_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/f1d33b5cc4537e660f8ae20089fcab7f25936705" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.883ex; height:2.843ex;" alt="{\displaystyle K_{j}}"></span> is chosen to be sufficiently large that we expect it to overfit the data to hand (thereby avoiding bias from model over-simplification), but small enough to retain computational efficiency. If <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle p=\sum _{j}K_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>p</mi> <mo>=</mo> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </munder> <msub> <mi>K</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle p=\sum _{j}K_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/361031d9d6c79b488d8e388fce030dcc230ddbc3" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; margin-left: -0.089ex; width:10.982ex; height:5.843ex;" alt="{\displaystyle p=\sum _{j}K_{j}}"></span> then the computational cost of model estimation this way will be <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle O(np^{2})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>O</mi> <mo stretchy="false">(</mo> <mi>n</mi> <msup> <mi>p</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msup> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle O(np^{2})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/d6cbdf14949c0fd309b0b9e87edf9f26ff8697bd" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:7.201ex; height:3.176ex;" alt="{\displaystyle O(np^{2})}"></span>. </p><p>Notice that the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> are only identifiable to within an intercept term (we could add any constant to <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{1}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{1}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/50dfd257a51e037112c917f8a9e47c9c053466df" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:2.193ex; height:2.509ex;" alt="{\displaystyle f_{1}}"></span> while subtracting it from <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{2}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{2}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/cc886fdaa7adc9be11ff4a5076da5e0943bcff58" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:2.193ex; height:2.509ex;" alt="{\displaystyle f_{2}}"></span> without changing the model predictions at all), so identifiability constraints have to be imposed on the smooth terms to remove this ambiguity. Sharpest inference about the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> is generally obtained by using the sum-to-zero constraints </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \sum _{i}f_{j}(x_{ji})=0}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </munder> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> <mi>i</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>=</mo> <mn>0</mn> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \sum _{i}f_{j}(x_{ji})=0}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/a8ae6c6906e14b67a846dff5c9ab82f3c2860ee7" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.005ex; width:14.668ex; height:5.509ex;" alt="{\displaystyle \sum _{i}f_{j}(x_{ji})=0}"></span></dd></dl> <p>i.e. by insisting that the sum of each the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> evaluated at its observed covariate values should be zero. Such linear constraints can most easily be imposed by reparametrization at the basis setup stage,<sup id="cite_ref-Wood2017_11-1" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> so below it is assumed that this has been done. </p><p>Having replaced all the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> in the model with such basis expansions we have turned the GAM into a <a href="/wiki/Generalized_linear_model" title="Generalized linear model">generalized linear model</a> (GLM), with a model matrix that simply contains the basis functions evaluated at the observed <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle x_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle x_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/5db47cb3d2f9496205a17a6856c91c1d3d363ccd" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.239ex; height:2.343ex;" alt="{\displaystyle x_{j}}"></span> values. However, because the basis dimensions, <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle K_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>K</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle K_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/f1d33b5cc4537e660f8ae20089fcab7f25936705" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.883ex; height:2.843ex;" alt="{\displaystyle K_{j}}"></span>, have been chosen to be a somewhat larger than is believed to be necessary for the data, the model is over-parameterized and will overfit the data if estimated as a regular GLM. The solution to this problem is to penalize departure from smoothness in the model fitting process, controlling the weight given to the smoothing penalties using smoothing parameters. For example, consider the situation in which all the smooths are univariate functions. Writing all the parameters in one vector, <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/7ed48a5e36207156fb792fa79d29925d2f7901e8" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.332ex; height:2.509ex;" alt="{\displaystyle \beta }"></span>, suppose that <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle D(\beta )}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>D</mi> <mo stretchy="false">(</mo> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle D(\beta )}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/20ea525675001bb1e9428f0d2f10a2364ca272c2" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:5.066ex; height:2.843ex;" alt="{\displaystyle D(\beta )}"></span> is the deviance (twice the difference between saturated log likelihood and the model log likelihood) for the model. Minimizing the deviance by the usual iteratively re-weighted least squares would result in overfit, so we seek <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/7ed48a5e36207156fb792fa79d29925d2f7901e8" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.332ex; height:2.509ex;" alt="{\displaystyle \beta }"></span> to minimize </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle D(\beta )+\sum _{j}\lambda _{j}\int f_{j}^{\prime \prime }(x)^{2}dx}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>D</mi> <mo stretchy="false">(</mo> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">)</mo> <mo>+</mo> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </munder> <msub> <mi>&#x03BB;<!-- λ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo>&#x222B;<!-- ∫ --></mo> <msubsup> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi class="MJX-variant" mathvariant="normal">&#x2032;<!-- ′ --></mi> <mi class="MJX-variant" mathvariant="normal">&#x2032;<!-- ′ --></mi> </mrow> </msubsup> <mo stretchy="false">(</mo> <mi>x</mi> <msup> <mo stretchy="false">)</mo> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msup> <mi>d</mi> <mi>x</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle D(\beta )+\sum _{j}\lambda _{j}\int f_{j}^{\prime \prime }(x)^{2}dx}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/de0300fef115584cb7d4ce4b8f5f1144f5c96e42" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; width:26.077ex; height:6.676ex;" alt="{\displaystyle D(\beta )+\sum _{j}\lambda _{j}\int f_{j}^{\prime \prime }(x)^{2}dx}"></span></dd></dl> <p>where the integrated square second derivative penalties serve to penalize wiggliness (lack of smoothness) of the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> during fitting, and the smoothing parameters <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \lambda _{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>&#x03BB;<!-- λ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \lambda _{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/fa91daf9145f27bb95746fd2a37537342d587b77" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.265ex; height:2.843ex;" alt="{\displaystyle \lambda _{j}}"></span> control the tradeoff between model goodness of fit and model smoothness. In the example <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \lambda _{j}\to \infty }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>&#x03BB;<!-- λ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">&#x2192;<!-- → --></mo> <mi mathvariant="normal">&#x221E;<!-- ∞ --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \lambda _{j}\to \infty }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/8067058c8cd712a39e87bf90a92690bfcc920e76" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:8.203ex; height:2.843ex;" alt="{\displaystyle \lambda _{j}\to \infty }"></span> would ensure that the estimate of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/b2ad05d2a5db540d7100ae5a2d4b5ac032216317" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:6.098ex; height:3.009ex;" alt="{\displaystyle f_{j}(x_{j})}"></span> would be a straight line in <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle x_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle x_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/5db47cb3d2f9496205a17a6856c91c1d3d363ccd" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.239ex; height:2.343ex;" alt="{\displaystyle x_{j}}"></span>. </p><p>Given the basis expansion for each <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> the wiggliness penalties can be expressed as <a href="/wiki/Quadratic_form" title="Quadratic form">quadratic forms</a> in the model coefficients.<sup id="cite_ref-Wood2017_11-2" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> That is we can write </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \int f_{j}^{\prime \prime }(x)^{2}dx=\beta _{j}^{T}{\bar {S}}_{j}\beta _{j}=\beta ^{T}S_{j}\beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mo>&#x222B;<!-- ∫ --></mo> <msubsup> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi class="MJX-variant" mathvariant="normal">&#x2032;<!-- ′ --></mi> <mi class="MJX-variant" mathvariant="normal">&#x2032;<!-- ′ --></mi> </mrow> </msubsup> <mo stretchy="false">(</mo> <mi>x</mi> <msup> <mo stretchy="false">)</mo> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msup> <mi>d</mi> <mi>x</mi> <mo>=</mo> <msubsup> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>T</mi> </mrow> </msubsup> <msub> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>S</mi> <mo stretchy="false">&#x00AF;<!-- ¯ --></mo> </mover> </mrow> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo>=</mo> <msup> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>T</mi> </mrow> </msup> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \int f_{j}^{\prime \prime }(x)^{2}dx=\beta _{j}^{T}{\bar {S}}_{j}\beta _{j}=\beta ^{T}S_{j}\beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/9d52089fc9a302cc888a83b1c27b78306f0969a6" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -2.338ex; width:31.801ex; height:5.676ex;" alt="{\displaystyle \int f_{j}^{\prime \prime }(x)^{2}dx=\beta _{j}^{T}{\bar {S}}_{j}\beta _{j}=\beta ^{T}S_{j}\beta }"></span>,</dd></dl> <p>where <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\bar {S}}_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>S</mi> <mo stretchy="false">&#x00AF;<!-- ¯ --></mo> </mover> </mrow> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\bar {S}}_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/41bed21373e38c0229a70c2aed6771a726f51aa5" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.482ex; height:3.343ex;" alt="{\displaystyle {\bar {S}}_{j}}"></span> is a matrix of known coefficients computable from the penalty and basis, <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta _{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta _{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/83edf0558c67ad56ca5c05096b550bd733d62c4b" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.225ex; height:2.843ex;" alt="{\displaystyle \beta _{j}}"></span> is the vector of coefficients for <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span>, and <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle S_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle S_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/222db49df2eefdb67737ba2d2dbd221a1bae0bf0" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.335ex; height:2.843ex;" alt="{\displaystyle S_{j}}"></span> is just <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\bar {S}}_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>S</mi> <mo stretchy="false">&#x00AF;<!-- ¯ --></mo> </mover> </mrow> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\bar {S}}_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/41bed21373e38c0229a70c2aed6771a726f51aa5" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.482ex; height:3.343ex;" alt="{\displaystyle {\bar {S}}_{j}}"></span> padded with zeros so that the second equality holds and we can write the penalty in terms of the full coefficient vector <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/7ed48a5e36207156fb792fa79d29925d2f7901e8" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.332ex; height:2.509ex;" alt="{\displaystyle \beta }"></span>. Many other smoothing penalties can be written in the same way, and given the smoothing parameters the model fitting problem now becomes </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\hat {\beta }}={\text{argmin}}_{\beta }\{D(\beta )+\sum _{j}\lambda _{j}\beta ^{T}S_{j}\beta \}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">&#x005E;<!-- ^ --></mo> </mover> </mrow> </mrow> <mo>=</mo> <msub> <mrow class="MJX-TeXAtom-ORD"> <mtext>argmin</mtext> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>&#x03B2;<!-- β --></mi> </mrow> </msub> <mo fence="false" stretchy="false">{</mo> <mi>D</mi> <mo stretchy="false">(</mo> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">)</mo> <mo>+</mo> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </munder> <msub> <mi>&#x03BB;<!-- λ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <msup> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>T</mi> </mrow> </msup> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mi>&#x03B2;<!-- β --></mi> <mo fence="false" stretchy="false">}</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\hat {\beta }}={\text{argmin}}_{\beta }\{D(\beta )+\sum _{j}\lambda _{j}\beta ^{T}S_{j}\beta \}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/0877606222a5688cec48063b9006a44d7cb750ab" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; width:35.466ex; height:5.843ex;" alt="{\displaystyle {\hat {\beta }}={\text{argmin}}_{\beta }\{D(\beta )+\sum _{j}\lambda _{j}\beta ^{T}S_{j}\beta \}}"></span>,</dd></dl> <p>which can be found using a penalized version of the usual <a href="/wiki/Iteratively_reweighted_least_squares" title="Iteratively reweighted least squares">iteratively reweighted least squares</a> (IRLS) algorithm for GLMs: the algorithm is unchanged except that the sum of quadratic penalties is added to the working least squared objective at each iteration of the algorithm. </p><p>Penalization has several effects on inference, relative to a regular GLM. For one thing the estimates are subject to some smoothing bias, which is the price that must be paid for limiting estimator variance by penalization. However, if smoothing parameters are selected appropriately the (squared) smoothing bias introduced by penalization should be less than the reduction in variance that it produces, so that the net effect is a reduction in mean square estimation error, relative to not penalizing. A related effect of penalization is that the notion of degrees of freedom of a model has to be modified to account for the penalties' action in reducing the coefficients' freedom to vary. For example, if <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle W}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>W</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle W}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/54a9c4c547f4d6111f81946cad242b18298d70b7" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:2.435ex; height:2.176ex;" alt="{\displaystyle W}"></span> is the diagonal matrix of IRLS weights at convergence, and <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle X}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>X</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle X}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/68baa052181f707c662844a465bfeeb135e82bab" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.98ex; height:2.176ex;" alt="{\displaystyle X}"></span> is the GAM model matrix, then the model effective degrees of freedom is given by <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\text{trace}}(F)}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mrow class="MJX-TeXAtom-ORD"> <mtext>trace</mtext> </mrow> <mo stretchy="false">(</mo> <mi>F</mi> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\text{trace}}(F)}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/edefd97b6b8c77eb916ac2a38b459c2b38312e01" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:8.594ex; height:2.843ex;" alt="{\displaystyle {\text{trace}}(F)}"></span> where </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle F=(X^{T}WX+\sum _{j}\lambda _{j}S_{j})^{-1}X^{T}WX}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>F</mi> <mo>=</mo> <mo stretchy="false">(</mo> <msup> <mi>X</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>T</mi> </mrow> </msup> <mi>W</mi> <mi>X</mi> <mo>+</mo> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </munder> <msub> <mi>&#x03BB;<!-- λ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <msup> <mo stretchy="false">)</mo> <mrow class="MJX-TeXAtom-ORD"> <mo>&#x2212;<!-- − --></mo> <mn>1</mn> </mrow> </msup> <msup> <mi>X</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>T</mi> </mrow> </msup> <mi>W</mi> <mi>X</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle F=(X^{T}WX+\sum _{j}\lambda _{j}S_{j})^{-1}X^{T}WX}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/56d7379aea04175d8e7df280986c9a909e037404" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; width:35.766ex; height:5.843ex;" alt="{\displaystyle F=(X^{T}WX+\sum _{j}\lambda _{j}S_{j})^{-1}X^{T}WX}"></span>,</dd></dl> <p>is the effective degrees of freedom matrix.<sup id="cite_ref-Wood2017_11-3" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> In fact summing just the diagonal elements of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle F}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>F</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle F}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/545fd099af8541605f7ee55f08225526be88ce57" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.741ex; height:2.176ex;" alt="{\displaystyle F}"></span> corresponding to the coefficients of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span> gives the effective degrees of freedom for the estimate of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span>. </p> <div class="mw-heading mw-heading3"><h3 id="Bayesian_smoothing_priors">Bayesian smoothing priors</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=5" title="Edit section: Bayesian smoothing priors"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Smoothing bias complicates interval estimation for these models, and the simplest approach turns out to involve a Bayesian approach.<sup id="cite_ref-Wahba83_17-0" class="reference"><a href="#cite_note-Wahba83-17"><span class="cite-bracket">&#91;</span>17<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Nychka88_18-0" class="reference"><a href="#cite_note-Nychka88-18"><span class="cite-bracket">&#91;</span>18<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-silverman85_19-0" class="reference"><a href="#cite_note-silverman85-19"><span class="cite-bracket">&#91;</span>19<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-marra12_20-0" class="reference"><a href="#cite_note-marra12-20"><span class="cite-bracket">&#91;</span>20<span class="cite-bracket">&#93;</span></a></sup> Understanding this Bayesian view of smoothing also helps to understand the REML and full Bayes approaches to smoothing parameter estimation. At some level smoothing penalties are imposed because we believe smooth functions to be more probable than wiggly ones, and if that is true then we might as well formalize this notion by placing a prior on model wiggliness. A very simple prior might be </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \pi (\beta )\propto \exp\{-\beta ^{T}\sum _{j}\lambda _{j}S_{j}\beta /(2\phi )\}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03C0;<!-- π --></mi> <mo stretchy="false">(</mo> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">)</mo> <mo>&#x221D;<!-- ∝ --></mo> <mi>exp</mi> <mo>&#x2061;<!-- ⁡ --></mo> <mo fence="false" stretchy="false">{</mo> <mo>&#x2212;<!-- − --></mo> <msup> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>T</mi> </mrow> </msup> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </munder> <msub> <mi>&#x03BB;<!-- λ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mo>/</mo> </mrow> <mo stretchy="false">(</mo> <mn>2</mn> <mi>&#x03D5;<!-- ϕ --></mi> <mo stretchy="false">)</mo> <mo fence="false" stretchy="false">}</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \pi (\beta )\propto \exp\{-\beta ^{T}\sum _{j}\lambda _{j}S_{j}\beta /(2\phi )\}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/684181c93847b4adbe6f79b809ef31e5ebbe6453" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; width:33.564ex; height:5.843ex;" alt="{\displaystyle \pi (\beta )\propto \exp\{-\beta ^{T}\sum _{j}\lambda _{j}S_{j}\beta /(2\phi )\}}"></span></dd></dl> <p>(where <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \phi }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03D5;<!-- ϕ --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \phi }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/72b1f30316670aee6270a28334bdf4f5072cdde4" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.385ex; height:2.509ex;" alt="{\displaystyle \phi }"></span> is the GLM scale parameter introduced only for later convenience), but we can immediately recognize this as a <a href="/wiki/Multivariate_normal" class="mw-redirect" title="Multivariate normal">multivariate normal</a> prior with mean <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle 0}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mn>0</mn> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle 0}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/2aae8864a3c1fec9585261791a809ddec1489950" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.162ex; height:2.176ex;" alt="{\displaystyle 0}"></span> and precision matrix <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle S_{\lambda }=\sum _{j}\lambda _{j}S_{j}/\phi }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>&#x03BB;<!-- λ --></mi> </mrow> </msub> <mo>=</mo> <munder> <mo>&#x2211;<!-- ∑ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </munder> <msub> <mi>&#x03BB;<!-- λ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mrow class="MJX-TeXAtom-ORD"> <mo>/</mo> </mrow> <mi>&#x03D5;<!-- ϕ --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle S_{\lambda }=\sum _{j}\lambda _{j}S_{j}/\phi }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/2a2e4e52a0dae10fc3b8b8bbbdfa4d3c5e93e5b8" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -3.338ex; width:16.603ex; height:5.843ex;" alt="{\displaystyle S_{\lambda }=\sum _{j}\lambda _{j}S_{j}/\phi }"></span>. Since the penalty allows some functions through unpenalized (straight lines, given the example penalties), <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle S_{\lambda }}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>&#x03BB;<!-- λ --></mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle S_{\lambda }}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/4fbc4b9359b89574a56d634084c2f424f96c4b0c" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:2.615ex; height:2.509ex;" alt="{\displaystyle S_{\lambda }}"></span> is rank deficient, and the prior is actually improper, with a covariance matrix given by the <a href="/wiki/Moore%E2%80%93Penrose_pseudoinverse" class="mw-redirect" title="Moore–Penrose pseudoinverse">Moore–Penrose pseudoinverse</a> of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle S_{\lambda }}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>&#x03BB;<!-- λ --></mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle S_{\lambda }}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/4fbc4b9359b89574a56d634084c2f424f96c4b0c" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:2.615ex; height:2.509ex;" alt="{\displaystyle S_{\lambda }}"></span> (the impropriety corresponds to ascribing infinite variance to the unpenalized components of a smooth).<sup id="cite_ref-silverman85_19-1" class="reference"><a href="#cite_note-silverman85-19"><span class="cite-bracket">&#91;</span>19<span class="cite-bracket">&#93;</span></a></sup> </p><p>Now if this prior is combined with the GLM likelihood, we find that the posterior mode for <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/7ed48a5e36207156fb792fa79d29925d2f7901e8" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.332ex; height:2.509ex;" alt="{\displaystyle \beta }"></span> is exactly the <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\hat {\beta }}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">&#x005E;<!-- ^ --></mo> </mover> </mrow> </mrow> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\hat {\beta }}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/efdb50e00928e4013750a476dab75eeb3cbd5799" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.451ex; height:3.176ex;" alt="{\displaystyle {\hat {\beta }}}"></span> found above by penalized IRLS.<sup id="cite_ref-silverman85_19-2" class="reference"><a href="#cite_note-silverman85-19"><span class="cite-bracket">&#91;</span>19<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Wood2017_11-4" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> Furthermore, we have the large sample result that </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta |y\sim N({\hat {\beta }},(X^{T}WX+S_{\lambda })^{-1}\phi ).}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mo stretchy="false">|</mo> </mrow> <mi>y</mi> <mo>&#x223C;<!-- ∼ --></mo> <mi>N</mi> <mo stretchy="false">(</mo> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">&#x005E;<!-- ^ --></mo> </mover> </mrow> </mrow> <mo>,</mo> <mo stretchy="false">(</mo> <msup> <mi>X</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>T</mi> </mrow> </msup> <mi>W</mi> <mi>X</mi> <mo>+</mo> <msub> <mi>S</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>&#x03BB;<!-- λ --></mi> </mrow> </msub> <msup> <mo stretchy="false">)</mo> <mrow class="MJX-TeXAtom-ORD"> <mo>&#x2212;<!-- − --></mo> <mn>1</mn> </mrow> </msup> <mi>&#x03D5;<!-- ϕ --></mi> <mo stretchy="false">)</mo> <mo>.</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta |y\sim N({\hat {\beta }},(X^{T}WX+S_{\lambda })^{-1}\phi ).}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/36ed3c6fe89496d5937c3eae7fba860bb81fb5d6" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:32.023ex; height:3.343ex;" alt="{\displaystyle \beta |y\sim N({\hat {\beta }},(X^{T}WX+S_{\lambda })^{-1}\phi ).}"></span></dd></dl> <p>which can be used to produce confidence/credible intervals for the smooth components, <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/acc195ab3f9d65994b47774eb013601d09217aee" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.049ex; height:2.843ex;" alt="{\displaystyle f_{j}}"></span>. The Gaussian smoothness priors are also the basis for fully Bayesian inference with GAMs,<sup id="cite_ref-Fahrmeier2001_9-1" class="reference"><a href="#cite_note-Fahrmeier2001-9"><span class="cite-bracket">&#91;</span>9<span class="cite-bracket">&#93;</span></a></sup> as well as methods estimating GAMs as mixed models<sup id="cite_ref-Ruppert2003_12-1" class="reference"><a href="#cite_note-Ruppert2003-12"><span class="cite-bracket">&#91;</span>12<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Wood2011_21-0" class="reference"><a href="#cite_note-Wood2011-21"><span class="cite-bracket">&#91;</span>21<span class="cite-bracket">&#93;</span></a></sup> that are essentially <a href="/wiki/Empirical_Bayes_method" title="Empirical Bayes method">empirical Bayes methods</a>. </p> <div class="mw-heading mw-heading3"><h3 id="Smoothing_parameter_estimation">Smoothing parameter estimation</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=6" title="Edit section: Smoothing parameter estimation"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>So far we have treated estimation and inference given the smoothing parameters, <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \lambda }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03BB;<!-- λ --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \lambda }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/b43d0ea3c9c025af1be9128e62a18fa74bedda2a" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.355ex; height:2.176ex;" alt="{\displaystyle \lambda }"></span>, but these also need to be estimated. One approach is to take a fully Bayesian approach, defining priors on the (log) smoothing parameters, and using stochastic simulation or high order approximation methods to obtain information about the posterior of the model coefficients.<sup id="cite_ref-Fahrmeier2001_9-2" class="reference"><a href="#cite_note-Fahrmeier2001-9"><span class="cite-bracket">&#91;</span>9<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Rue2009_13-1" class="reference"><a href="#cite_note-Rue2009-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> An alternative is to select the smoothing parameters to optimize a prediction error criterion such as Generalized <a href="/wiki/Cross-validation_(statistics)" title="Cross-validation (statistics)">cross validation</a> (GCV) or the <a href="/wiki/Akaike_information_criterion" title="Akaike information criterion">Akaike information criterion</a> (AIC).<sup id="cite_ref-Wood2008_22-0" class="reference"><a href="#cite_note-Wood2008-22"><span class="cite-bracket">&#91;</span>22<span class="cite-bracket">&#93;</span></a></sup> Finally we may choose to maximize the Marginal Likelihood (REML) obtained by integrating the model coefficients, <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/7ed48a5e36207156fb792fa79d29925d2f7901e8" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.332ex; height:2.509ex;" alt="{\displaystyle \beta }"></span> out of the joint density of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta ,y}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> <mo>,</mo> <mi>y</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta ,y}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/7eba39b8e5df59fe3f7afe5088cb7dc433593df2" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:3.521ex; height:2.509ex;" alt="{\displaystyle \beta ,y}"></span>, </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\hat {\lambda }}={\text{argmax}}_{\lambda }\int f(y|\beta ,\lambda )\pi (\beta |\lambda )d\beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>&#x03BB;<!-- λ --></mi> <mo stretchy="false">&#x005E;<!-- ^ --></mo> </mover> </mrow> </mrow> <mo>=</mo> <msub> <mrow class="MJX-TeXAtom-ORD"> <mtext>argmax</mtext> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>&#x03BB;<!-- λ --></mi> </mrow> </msub> <mo>&#x222B;<!-- ∫ --></mo> <mi>f</mi> <mo stretchy="false">(</mo> <mi>y</mi> <mrow class="MJX-TeXAtom-ORD"> <mo stretchy="false">|</mo> </mrow> <mi>&#x03B2;<!-- β --></mi> <mo>,</mo> <mi>&#x03BB;<!-- λ --></mi> <mo stretchy="false">)</mo> <mi>&#x03C0;<!-- π --></mi> <mo stretchy="false">(</mo> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mo stretchy="false">|</mo> </mrow> <mi>&#x03BB;<!-- λ --></mi> <mo stretchy="false">)</mo> <mi>d</mi> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\hat {\lambda }}={\text{argmax}}_{\lambda }\int f(y|\beta ,\lambda )\pi (\beta |\lambda )d\beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/3d10cd7cce9ef169fbb5963f326ad0b6f12bd407" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -2.338ex; width:33.809ex; height:5.676ex;" alt="{\displaystyle {\hat {\lambda }}={\text{argmax}}_{\lambda }\int f(y|\beta ,\lambda )\pi (\beta |\lambda )d\beta }"></span>.</dd></dl> <p>Since <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f(y|\beta ,\lambda )}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>f</mi> <mo stretchy="false">(</mo> <mi>y</mi> <mrow class="MJX-TeXAtom-ORD"> <mo stretchy="false">|</mo> </mrow> <mi>&#x03B2;<!-- β --></mi> <mo>,</mo> <mi>&#x03BB;<!-- λ --></mi> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f(y|\beta ,\lambda )}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/78b1385bf89c5350d388a15ac33077fbda9fd4db" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:8.611ex; height:2.843ex;" alt="{\displaystyle f(y|\beta ,\lambda )}"></span> is just the likelihood of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \beta }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03B2;<!-- β --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \beta }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/7ed48a5e36207156fb792fa79d29925d2f7901e8" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.332ex; height:2.509ex;" alt="{\displaystyle \beta }"></span>, we can view this as choosing <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle \lambda }"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>&#x03BB;<!-- λ --></mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle \lambda }</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/b43d0ea3c9c025af1be9128e62a18fa74bedda2a" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.355ex; height:2.176ex;" alt="{\displaystyle \lambda }"></span> to maximize the average likelihood of random draws from the prior. The preceding integral is usually analytically intractable but can be approximated to quite high accuracy using <a href="/wiki/Laplace%27s_method" title="Laplace&#39;s method">Laplace's method</a>.<sup id="cite_ref-Wood2011_21-1" class="reference"><a href="#cite_note-Wood2011-21"><span class="cite-bracket">&#91;</span>21<span class="cite-bracket">&#93;</span></a></sup> </p><p>Smoothing parameter inference is the most computationally taxing part of model estimation/inference. For example, to optimize a GCV or marginal likelihood typically requires numerical optimization via a Newton or Quasi-Newton method, with each trial value for the (log) smoothing parameter vector requiring a penalized IRLS iteration to evaluate the corresponding <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\hat {\beta }}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">&#x005E;<!-- ^ --></mo> </mover> </mrow> </mrow> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\hat {\beta }}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/efdb50e00928e4013750a476dab75eeb3cbd5799" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.451ex; height:3.176ex;" alt="{\displaystyle {\hat {\beta }}}"></span> alongside the other ingredients of the GCV score or Laplace approximate marginal likelihood (LAML). Furthermore, to obtain the derivatives of the GCV or LAML, required for optimization, involves implicit differentiation to obtain the derivatives of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\hat {\beta }}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>&#x03B2;<!-- β --></mi> <mo stretchy="false">&#x005E;<!-- ^ --></mo> </mover> </mrow> </mrow> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\hat {\beta }}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/efdb50e00928e4013750a476dab75eeb3cbd5799" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:1.451ex; height:3.176ex;" alt="{\displaystyle {\hat {\beta }}}"></span> w.r.t. the log smoothing parameters, and this requires some care is efficiency and numerical stability are to be maintained.<sup id="cite_ref-Wood2011_21-2" class="reference"><a href="#cite_note-Wood2011-21"><span class="cite-bracket">&#91;</span>21<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Software">Software</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=7" title="Edit section: Software"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Backfit GAMs were originally provided by the <code>gam</code> function in S,<sup id="cite_ref-whitebook_23-0" class="reference"><a href="#cite_note-whitebook-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> now ported to the <a href="/wiki/R_language" class="mw-redirect" title="R language">R language</a> as the <code>gam</code> package. The SAS proc <code>GAM</code> also provides backfit GAMs. The recommended package in R for GAMs is <code>mgcv</code>, which stands for <i>mixed GAM computational vehicle</i>,<sup id="cite_ref-Wood2017_11-5" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> which is based on the reduced rank approach with automatic smoothing parameter selection. The SAS proc <code>GAMPL</code> is an alternative implementation. In Python, there is the PyGAM package, with similar features to R's mgcv. Alternatively, there is <code>InterpretML</code> package, which implements a bagging and boosting approach.<sup id="cite_ref-Nori2019_24-0" class="reference"><a href="#cite_note-Nori2019-24"><span class="cite-bracket">&#91;</span>24<span class="cite-bracket">&#93;</span></a></sup> There are many alternative packages. Examples include the R packages <code>mboost</code>,<sup id="cite_ref-mboost_14-1" class="reference"><a href="#cite_note-mboost-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup> which implements a boosting approach; <code>gss</code>, which provides the full spline smoothing methods;<sup id="cite_ref-Gu2013_25-0" class="reference"><a href="#cite_note-Gu2013-25"><span class="cite-bracket">&#91;</span>25<span class="cite-bracket">&#93;</span></a></sup> <code>VGAM</code> which provides vector GAMs;<sup id="cite_ref-Yee2015_4-1" class="reference"><a href="#cite_note-Yee2015-4"><span class="cite-bracket">&#91;</span>4<span class="cite-bracket">&#93;</span></a></sup> and <code>gamlss</code>, which provides <a href="/wiki/Generalized_additive_model_for_location,_scale_and_shape" title="Generalized additive model for location, scale and shape">Generalized additive model for location, scale and shape</a>. <code>BayesX</code> and its R interface provides GAMs and extensions via MCMC and penalized likelihood methods.<sup id="cite_ref-bayesx_26-0" class="reference"><a href="#cite_note-bayesx-26"><span class="cite-bracket">&#91;</span>26<span class="cite-bracket">&#93;</span></a></sup> The <code>INLA</code> software implements a fully Bayesian approach based on Markov random field representations exploiting sparse matrix methods.<sup id="cite_ref-Rue2009_13-2" class="reference"><a href="#cite_note-Rue2009-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> </p><p>As an example of how models can be estimated in practice with software, consider R package <code>mgcv</code>. Suppose that our R workspace contains vectors <i>y</i>, <i>x</i> and <i>z</i> and we want to estimate the model </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle y_{i}=\beta _{0}+f_{1}(x_{i})+f_{2}(z_{i})+\epsilon _{i}{\text{ where }}\epsilon _{i}\sim N(0,\sigma ^{2}).}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>y</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo>=</mo> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mn>0</mn> </mrow> </msub> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>z</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>+</mo> <msub> <mi>&#x03F5;<!-- ϵ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mrow class="MJX-TeXAtom-ORD"> <mtext>&#xA0;where&#xA0;</mtext> </mrow> <msub> <mi>&#x03F5;<!-- ϵ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo>&#x223C;<!-- ∼ --></mo> <mi>N</mi> <mo stretchy="false">(</mo> <mn>0</mn> <mo>,</mo> <msup> <mi>&#x03C3;<!-- σ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msup> <mo stretchy="false">)</mo> <mo>.</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle y_{i}=\beta _{0}+f_{1}(x_{i})+f_{2}(z_{i})+\epsilon _{i}{\text{ where }}\epsilon _{i}\sim N(0,\sigma ^{2}).}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/f53e6f4fed2bec3cc6e5ed9e7cb9388a572026c7" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:50.74ex; height:3.176ex;" alt="{\displaystyle y_{i}=\beta _{0}+f_{1}(x_{i})+f_{2}(z_{i})+\epsilon _{i}{\text{ where }}\epsilon _{i}\sim N(0,\sigma ^{2}).}"></span></dd></dl> <p>Within R we could issue the commands </p> <pre>library(mgcv) # load the package b = gam(y ~ s(x) + s(z)) </pre> <p>In common with most R modelling functions <code>gam</code> expects a model formula to be supplied, specifying the model structure to fit. The response variable is given to the left of the <code>~</code> while the specification of the linear predictor is given to the right. <code>gam</code> sets up bases and penalties for the smooth terms, estimates the model including its smoothing parameters and, in standard R fashion, returns a <i>fitted model object</i>, which can then be interrogated using various helper functions, such as <code>summary</code>, <code>plot</code>, <code>predict</code>, and <code>AIC</code>. </p><p>This simple example has used several default settings which it is important to be aware of. For example a Gaussian distribution and identity link has been assumed, and the smoothing parameter selection criterion was GCV. Also the smooth terms were represented using `penalized thin plate regression splines', and the basis dimension for each was set to 10 (implying a maximum of 9 degrees of freedom after identifiability constraints have been imposed). A second example illustrates how we can control these things. Suppose that we want to estimate the model </p> <dl><dd><span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle y_{i}\sim {\text{Poi}}(\mu _{i}){\text{ where }}\log \mu _{i}=\beta _{0}+\beta _{1}x_{i}+f_{1}(t_{i})+f_{2}(v_{i},w_{i}).}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>y</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo>&#x223C;<!-- ∼ --></mo> <mrow class="MJX-TeXAtom-ORD"> <mtext>Poi</mtext> </mrow> <mo stretchy="false">(</mo> <msub> <mi>&#x03BC;<!-- μ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mrow class="MJX-TeXAtom-ORD"> <mtext>&#xA0;where&#xA0;</mtext> </mrow> <mi>log</mi> <mo>&#x2061;<!-- ⁡ --></mo> <msub> <mi>&#x03BC;<!-- μ --></mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo>=</mo> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mn>0</mn> </mrow> </msub> <mo>+</mo> <msub> <mi>&#x03B2;<!-- β --></mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>t</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>+</mo> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>v</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo>,</mo> <msub> <mi>w</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>i</mi> </mrow> </msub> <mo stretchy="false">)</mo> <mo>.</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle y_{i}\sim {\text{Poi}}(\mu _{i}){\text{ where }}\log \mu _{i}=\beta _{0}+\beta _{1}x_{i}+f_{1}(t_{i})+f_{2}(v_{i},w_{i}).}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/dee3c6badb8b0cda96062d6daf119db0f48f3f3f" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.838ex; width:59.701ex; height:2.843ex;" alt="{\displaystyle y_{i}\sim {\text{Poi}}(\mu _{i}){\text{ where }}\log \mu _{i}=\beta _{0}+\beta _{1}x_{i}+f_{1}(t_{i})+f_{2}(v_{i},w_{i}).}"></span></dd></dl> <p>using REML smoothing parameter selection, and we expect <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{1}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>1</mn> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{1}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/50dfd257a51e037112c917f8a9e47c9c053466df" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:2.193ex; height:2.509ex;" alt="{\displaystyle f_{1}}"></span> to be a relatively complicated function which we would like to model with a penalized cubic regression spline. For <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{2}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mn>2</mn> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{2}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/cc886fdaa7adc9be11ff4a5076da5e0943bcff58" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.671ex; width:2.193ex; height:2.509ex;" alt="{\displaystyle f_{2}}"></span> we also have to decide whether <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle v}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>v</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle v}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/e07b00e7fc0847fbd16391c778d65bc25c452597" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.128ex; height:1.676ex;" alt="{\displaystyle v}"></span> and <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle w}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>w</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle w}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/88b1e0c8e1be5ebe69d18a8010676fa42d7961e6" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.664ex; height:1.676ex;" alt="{\displaystyle w}"></span> are naturally on the same scale so that an isotropic smoother such as <a href="/wiki/Thin_plate_spline" title="Thin plate spline">thin plate spline</a> is appropriate (specified via `s(v,w)'), or whether they are really on different scales so that we need separate smoothing penalties and smoothing parameters for <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle v}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>v</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle v}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/e07b00e7fc0847fbd16391c778d65bc25c452597" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.128ex; height:1.676ex;" alt="{\displaystyle v}"></span> and <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle w}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>w</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle w}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/88b1e0c8e1be5ebe69d18a8010676fa42d7961e6" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:1.664ex; height:1.676ex;" alt="{\displaystyle w}"></span> as provided by a tensor product smoother. Suppose we opted for the latter in this case, then the following R code would estimate the model </p> <pre>b1 = gam(y ~ x + s(t,bs="cr",k=100) + te(v,w),family=poisson,method="REML") </pre> <p>which uses a basis size of 100 for the smooth of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle t}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <mi>t</mi> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle t}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/65658b7b223af9e1acc877d848888ecdb4466560" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -0.338ex; width:0.84ex; height:2.009ex;" alt="{\displaystyle t}"></span>. The specification of distribution and link function uses the `family' objects that are standard when fitting GLMs in R or S. Note that Gaussian random effects can also be added to the linear predictor. </p><p>These examples are only intended to give a very basic flavour of the way that GAM software is used, for more detail refer to the software documentation for the various packages and the references below.<sup id="cite_ref-Wood2017_11-6" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Gu2013_25-1" class="reference"><a href="#cite_note-Gu2013-25"><span class="cite-bracket">&#91;</span>25<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Yee2015_4-2" class="reference"><a href="#cite_note-Yee2015-4"><span class="cite-bracket">&#91;</span>4<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-whitebook_23-1" class="reference"><a href="#cite_note-whitebook-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-mboost_14-2" class="reference"><a href="#cite_note-mboost-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-bayesx_26-1" class="reference"><a href="#cite_note-bayesx-26"><span class="cite-bracket">&#91;</span>26<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Model_checking">Model checking</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=8" title="Edit section: Model checking"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>As with any statistical model it is important to check the model assumptions of a GAM. Residual plots should be examined in the same way as for any GLM. That is deviance residuals (or other standardized residuals) should be examined for patterns that might suggest a substantial violation of the independence or mean-variance assumptions of the model. This will usually involve plotting the standardized residuals against fitted values and covariates to look for mean-variance problems or missing pattern, and may also involve examining <a href="/wiki/Correlogram" title="Correlogram">Correlograms</a> (ACFs) and/or <a href="/wiki/Variogram" title="Variogram">Variograms</a> of the residuals to check for violation of independence. If the model mean-variance relationship is correct then scaled residuals should have roughly constant variance. Note that since GLMs and GAMs can be estimated using <a href="/wiki/Quasi-likelihood" title="Quasi-likelihood">Quasi-likelihood</a>, it follows that details of the distribution of the residuals beyond the mean-variance relationship are of relatively minor importance. </p><p>One issue that is more common with GAMs than with other GLMs is a danger of falsely concluding that data are zero inflated. The difficulty arises when data contain many zeroes that can be modelled by a Poisson or binomial with a very low expected value: the flexibility of the GAM structure will often allow representation of a very low mean over some region of covariate space, but the distribution of standardized residuals will fail to look anything like the approximate normality that introductory GLM classes teach us to expect, even if the model is perfectly correct.<sup id="cite_ref-Augustin2012_27-0" class="reference"><a href="#cite_note-Augustin2012-27"><span class="cite-bracket">&#91;</span>27<span class="cite-bracket">&#93;</span></a></sup> </p><p>The one extra check that GAMs introduce is the need to check that the degrees of freedom chosen are appropriate. This is particularly acute when using methods that do not automatically estimate the smoothness of model components. When using methods with automatic smoothing parameter selection then it is still necessary to check that the choice of basis dimension was not restrictively small, although if the effective degrees of freedom of a term estimate is comfortably below its basis dimension then this is unlikely. In any case, checking <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle f_{j}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>f</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle f_{j}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/b2ad05d2a5db540d7100ae5a2d4b5ac032216317" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:6.098ex; height:3.009ex;" alt="{\displaystyle f_{j}(x_{j})}"></span> is based on examining pattern in the residuals with respect to <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle x_{j}}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle x_{j}}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/5db47cb3d2f9496205a17a6856c91c1d3d363ccd" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.005ex; width:2.239ex; height:2.343ex;" alt="{\displaystyle x_{j}}"></span>. This can be done using partial residuals overlaid on the plot of <span class="mwe-math-element"><span class="mwe-math-mathml-inline mwe-math-mathml-a11y" style="display: none;"><math xmlns="http://www.w3.org/1998/Math/MathML" alttext="{\displaystyle {\hat {f}}_{j}(x_{j})}"> <semantics> <mrow class="MJX-TeXAtom-ORD"> <mstyle displaystyle="true" scriptlevel="0"> <msub> <mrow class="MJX-TeXAtom-ORD"> <mrow class="MJX-TeXAtom-ORD"> <mover> <mi>f</mi> <mo stretchy="false">&#x005E;<!-- ^ --></mo> </mover> </mrow> </mrow> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">(</mo> <msub> <mi>x</mi> <mrow class="MJX-TeXAtom-ORD"> <mi>j</mi> </mrow> </msub> <mo stretchy="false">)</mo> </mstyle> </mrow> <annotation encoding="application/x-tex">{\displaystyle {\hat {f}}_{j}(x_{j})}</annotation> </semantics> </math></span><img src="https://wikimedia.org/api/rest_v1/media/math/render/svg/1eda161f92fb997c9864b1bf32fb7684b9870ac6" class="mwe-math-fallback-image-inline mw-invert skin-invert" aria-hidden="true" style="vertical-align: -1.171ex; width:6.657ex; height:3.676ex;" alt="{\displaystyle {\hat {f}}_{j}(x_{j})}"></span>, or using permutation of the residuals to construct tests for residual pattern. </p> <div class="mw-heading mw-heading2"><h2 id="Model_selection">Model selection</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=9" title="Edit section: Model selection"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>When smoothing parameters are estimated as part of model fitting then much of what would traditionally count as model selection has been absorbed into the fitting process: the smoothing parameters estimation has already selected between a rich family of models of different functional complexity. However smoothing parameter estimation does not typically remove a smooth term from the model altogether, because most penalties leave some functions un-penalized (e.g. straight lines are unpenalized by the spline derivative penalty given above). So the question of whether a term should be in the model at all remains. One simple approach to this issue is to add an extra penalty to each smooth term in the GAM, which penalizes the components of the smooth that would otherwise be unpenalized (and only those). Each extra penalty has its own smoothing parameter and estimation then proceeds as before, but now with the possibility that terms will be completely penalized to zero.<sup id="cite_ref-Marra2011_28-0" class="reference"><a href="#cite_note-Marra2011-28"><span class="cite-bracket">&#91;</span>28<span class="cite-bracket">&#93;</span></a></sup> In high dimensional settings then it may make more sense to attempt this task using the <a href="/wiki/Lasso_(statistics)" title="Lasso (statistics)">lasso</a> or <a href="/wiki/Elastic_net_regularization" title="Elastic net regularization">elastic net regularization</a>. Boosting also performs term selection automatically as part of fitting.<sup id="cite_ref-mboost_14-3" class="reference"><a href="#cite_note-mboost-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup> </p><p>An alternative is to use traditional <a href="/wiki/Stepwise_regression" title="Stepwise regression">stepwise regression</a> methods for model selection. This is also the default method when smoothing parameters are not estimated as part of fitting, in which case each smooth term is usually allowed to take one of a small set of pre-defined smoothness levels within the model, and these are selected between in a stepwise fashion. Stepwise methods operate by iteratively comparing models with or without particular model terms (or possibly with different levels of term complexity), and require measures of model fit or term significance in order to decide which model to select at each stage. For example, we might use <a href="/wiki/P-value" title="P-value">p-values</a> for testing each term for equality to zero to decide on candidate terms for removal from a model, and we might compare <a href="/wiki/Akaike_information_criterion" title="Akaike information criterion">Akaike information criterion</a> (AIC) values for alternative models. </p><p>P-value computation for smooths is not straightforward, because of the effects of penalization, but approximations are available.<sup id="cite_ref-Hastie1990_1-4" class="reference"><a href="#cite_note-Hastie1990-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Wood2017_11-7" class="reference"><a href="#cite_note-Wood2017-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> AIC can be computed in two ways for GAMs. The marginal AIC is based on the Mariginal Likelihood (see above) with the model coefficients integrated out. In this case the AIC penalty is based on the number of smoothing parameters (and any variance parameters) in the model. However, because of the well known fact that REML is not comparable between models with different fixed effects structures, we can not usually use such an AIC to compare models with different smooth terms (since their un-penalized components act like fixed effects). Basing AIC on the marginal likelihood in which only the penalized effects are integrated out is possible (the number of un-penalized coefficients now gets added to the parameter count for the AIC penalty), but this version of the marginal likelihood suffers from the tendency to oversmooth that provided the original motivation for developing REML. Given these problems GAMs are often compared using the conditional AIC, in which the model likelihood (not marginal likelihood) is used in the AIC, and the parameter count is taken as the effective degrees of freedom of the model.<sup id="cite_ref-Hastie1990_1-5" class="reference"><a href="#cite_note-Hastie1990-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Wood2008_22-1" class="reference"><a href="#cite_note-Wood2008-22"><span class="cite-bracket">&#91;</span>22<span class="cite-bracket">&#93;</span></a></sup> </p><p>Naive versions of the conditional AIC have been shown to be much too likely to select larger models in some circumstances, a difficulty attributable to neglect of smoothing parameter uncertainty when computing the effective degrees of freedom,<sup id="cite_ref-Greven2010_29-0" class="reference"><a href="#cite_note-Greven2010-29"><span class="cite-bracket">&#91;</span>29<span class="cite-bracket">&#93;</span></a></sup> however correcting the effective degrees of freedom for this problem restores reasonable performance.<sup id="cite_ref-Wood2016_3-1" class="reference"><a href="#cite_note-Wood2016-3"><span class="cite-bracket">&#91;</span>3<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Caveats">Caveats</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=10" title="Edit section: Caveats"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p><a href="/wiki/Overfitting" title="Overfitting">Overfitting</a> can be a problem with GAMs,<sup id="cite_ref-Wood2008_22-2" class="reference"><a href="#cite_note-Wood2008-22"><span class="cite-bracket">&#91;</span>22<span class="cite-bracket">&#93;</span></a></sup> especially if there is un-modelled residual auto-correlation or un-modelled <a href="/wiki/Overdispersion" title="Overdispersion">overdispersion</a>. <a href="/wiki/Cross-validation_(statistics)" title="Cross-validation (statistics)">Cross-validation</a> can be used to detect and/or reduce overfitting problems with GAMs (or other statistical methods),<sup id="cite_ref-30" class="reference"><a href="#cite_note-30"><span class="cite-bracket">&#91;</span>30<span class="cite-bracket">&#93;</span></a></sup> and software often allows the level of penalization to be increased to force smoother fits. Estimating very large numbers of smoothing parameters is also likely to be statistically challenging, and there are known tendencies for prediction error criteria (GCV, AIC etc.) to occasionally undersmooth substantially, particularly at moderate sample sizes, with REML being somewhat less problematic in this regard.<sup id="cite_ref-Reiss2009_31-0" class="reference"><a href="#cite_note-Reiss2009-31"><span class="cite-bracket">&#91;</span>31<span class="cite-bracket">&#93;</span></a></sup> </p><p>Where appropriate, simpler models such as <a href="/wiki/Generalized_linear_model" title="Generalized linear model">GLMs</a> may be preferable to GAMs unless GAMs improve predictive ability substantially (in validation sets) for the application in question. </p> <div class="mw-heading mw-heading2"><h2 id="See_also">See also</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=11" title="Edit section: See also"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <ul><li><a href="/wiki/Additive_model" title="Additive model">Additive model</a></li> <li><a href="/wiki/Backfitting_algorithm" title="Backfitting algorithm">Backfitting algorithm</a></li> <li><a href="/wiki/Generalized_additive_model_for_location,_scale_and_shape" title="Generalized additive model for location, scale and shape">Generalized additive model for location, scale and shape</a> (GAMLSS)</li> <li><a href="/wiki/Residual_effective_degrees_of_freedom" class="mw-redirect" title="Residual effective degrees of freedom">Residual effective degrees of freedom</a></li> <li><a href="/wiki/Semiparametric_regression" title="Semiparametric regression">Semiparametric regression</a></li></ul> <div class="mw-heading mw-heading2"><h2 id="References">References</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=12" title="Edit section: References"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1239543626">.mw-parser-output .reflist{margin-bottom:0.5em;list-style-type:decimal}@media screen{.mw-parser-output .reflist{font-size:90%}}.mw-parser-output .reflist .references{font-size:100%;margin-bottom:0;list-style-type:inherit}.mw-parser-output .reflist-columns-2{column-width:30em}.mw-parser-output .reflist-columns-3{column-width:25em}.mw-parser-output .reflist-columns{margin-top:0.3em}.mw-parser-output .reflist-columns ol{margin-top:0}.mw-parser-output .reflist-columns li{page-break-inside:avoid;break-inside:avoid-column}.mw-parser-output .reflist-upper-alpha{list-style-type:upper-alpha}.mw-parser-output .reflist-upper-roman{list-style-type:upper-roman}.mw-parser-output .reflist-lower-alpha{list-style-type:lower-alpha}.mw-parser-output .reflist-lower-greek{list-style-type:lower-greek}.mw-parser-output .reflist-lower-roman{list-style-type:lower-roman}</style><div class="reflist"> <div class="mw-references-wrap mw-references-columns"><ol class="references"> <li id="cite_note-Hastie1990-1"><span class="mw-cite-backlink">^ <a href="#cite_ref-Hastie1990_1-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Hastie1990_1-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Hastie1990_1-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-Hastie1990_1-3"><sup><i><b>d</b></i></sup></a> <a href="#cite_ref-Hastie1990_1-4"><sup><i><b>e</b></i></sup></a> <a href="#cite_ref-Hastie1990_1-5"><sup><i><b>f</b></i></sup></a></span> <span class="reference-text"><style data-mw-deduplicate="TemplateStyles:r1238218222">.mw-parser-output cite.citation{font-style:inherit;word-wrap:break-word}.mw-parser-output .citation q{quotes:"\"""\"""'""'"}.mw-parser-output .citation:target{background-color:rgba(0,127,255,0.133)}.mw-parser-output .id-lock-free.id-lock-free a{background:url("//upload.wikimedia.org/wikipedia/commons/6/65/Lock-green.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-limited.id-lock-limited a,.mw-parser-output .id-lock-registration.id-lock-registration a{background:url("//upload.wikimedia.org/wikipedia/commons/d/d6/Lock-gray-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-subscription.id-lock-subscription a{background:url("//upload.wikimedia.org/wikipedia/commons/a/aa/Lock-red-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .cs1-ws-icon a{background:url("//upload.wikimedia.org/wikipedia/commons/4/4c/Wikisource-logo.svg")right 0.1em center/12px no-repeat}body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-free a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-limited a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-registration a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-subscription a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .cs1-ws-icon a{background-size:contain;padding:0 1em 0 0}.mw-parser-output .cs1-code{color:inherit;background:inherit;border:none;padding:inherit}.mw-parser-output .cs1-hidden-error{display:none;color:var(--color-error,#d33)}.mw-parser-output .cs1-visible-error{color:var(--color-error,#d33)}.mw-parser-output .cs1-maint{display:none;color:#085;margin-left:0.3em}.mw-parser-output .cs1-kern-left{padding-left:0.2em}.mw-parser-output .cs1-kern-right{padding-right:0.2em}.mw-parser-output .citation .mw-selflink{font-weight:inherit}@media screen{.mw-parser-output .cs1-format{font-size:95%}html.skin-theme-clientpref-night .mw-parser-output .cs1-maint{color:#18911f}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .cs1-maint{color:#18911f}}</style><cite id="CITEREFHastieTibshirani1990" class="citation book cs1">Hastie, T. J.; Tibshirani, R. J. (1990). <i>Generalized Additive Models</i>. Chapman &amp; Hall/CRC. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-0-412-34390-2" title="Special:BookSources/978-0-412-34390-2"><bdi>978-0-412-34390-2</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Generalized+Additive+Models&amp;rft.pub=Chapman+%26+Hall%2FCRC&amp;rft.date=1990&amp;rft.isbn=978-0-412-34390-2&amp;rft.aulast=Hastie&amp;rft.aufirst=T.+J.&amp;rft.au=Tibshirani%2C+R.+J.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-2"><span class="mw-cite-backlink"><b><a href="#cite_ref-2">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRubinsteinHastie1997" class="citation journal cs1">Rubinstein, Y. Dan; Hastie, Trevor (1997-08-14). <a rel="nofollow" class="external text" href="https://dl.acm.org/doi/abs/10.5555/3001392.3001401">"Discriminative vs informative learning"</a>. <i>Proceedings of the Third International Conference on Knowledge Discovery and Data Mining</i>. KDD'97. Newport Beach, CA: AAAI Press: 49–53.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Proceedings+of+the+Third+International+Conference+on+Knowledge+Discovery+and+Data+Mining&amp;rft.atitle=Discriminative+vs+informative+learning&amp;rft.pages=49-53&amp;rft.date=1997-08-14&amp;rft.aulast=Rubinstein&amp;rft.aufirst=Y.+Dan&amp;rft.au=Hastie%2C+Trevor&amp;rft_id=https%3A%2F%2Fdl.acm.org%2Fdoi%2Fabs%2F10.5555%2F3001392.3001401&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Wood2016-3"><span class="mw-cite-backlink">^ <a href="#cite_ref-Wood2016_3-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Wood2016_3-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWoodPyaSaefken2016" class="citation journal cs1">Wood, S. N.; Pya, N.; Saefken, B. (2016). "Smoothing parameter and model selection for general smooth models (with discussion)". <i><a href="/wiki/Journal_of_the_American_Statistical_Association" title="Journal of the American Statistical Association">Journal of the American Statistical Association</a></i>. <b>111</b> (516): 1548–1575. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1511.03864">1511.03864</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F01621459.2016.1180986">10.1080/01621459.2016.1180986</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:54802107">54802107</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+American+Statistical+Association&amp;rft.atitle=Smoothing+parameter+and+model+selection+for+general+smooth+models+%28with+discussion%29&amp;rft.volume=111&amp;rft.issue=516&amp;rft.pages=1548-1575&amp;rft.date=2016&amp;rft_id=info%3Aarxiv%2F1511.03864&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A54802107%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1080%2F01621459.2016.1180986&amp;rft.aulast=Wood&amp;rft.aufirst=S.+N.&amp;rft.au=Pya%2C+N.&amp;rft.au=Saefken%2C+B.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Yee2015-4"><span class="mw-cite-backlink">^ <a href="#cite_ref-Yee2015_4-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Yee2015_4-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Yee2015_4-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFYee2015" class="citation book cs1">Yee, Thomas (2015). <i>Vector generalized linear and additive models</i>. Springer. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-4939-2817-0" title="Special:BookSources/978-1-4939-2817-0"><bdi>978-1-4939-2817-0</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Vector+generalized+linear+and+additive+models&amp;rft.pub=Springer&amp;rft.date=2015&amp;rft.isbn=978-1-4939-2817-0&amp;rft.aulast=Yee&amp;rft.aufirst=Thomas&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Rigby2005-5"><span class="mw-cite-backlink"><b><a href="#cite_ref-Rigby2005_5-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRigbyStasinopoulos2005" class="citation journal cs1">Rigby, R.A.; Stasinopoulos, D.M. (2005). <a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9876.2005.00510.x">"Generalized additive models for location, scale and shape (with discussion)"</a>. <i>Journal of the Royal Statistical Society, Series C</i>. <b>54</b> (3): 507–554. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9876.2005.00510.x">10.1111/j.1467-9876.2005.00510.x</a></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+C&amp;rft.atitle=Generalized+additive+models+for+location%2C+scale+and+shape+%28with+discussion%29&amp;rft.volume=54&amp;rft.issue=3&amp;rft.pages=507-554&amp;rft.date=2005&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1467-9876.2005.00510.x&amp;rft.aulast=Rigby&amp;rft.aufirst=R.A.&amp;rft.au=Stasinopoulos%2C+D.M.&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1111%252Fj.1467-9876.2005.00510.x&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Wahba1990-6"><span class="mw-cite-backlink"><b><a href="#cite_ref-Wahba1990_6-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWahba" class="citation book cs1">Wahba, Grace. <i>Spline Models for Observational Data</i>. SIAM.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Spline+Models+for+Observational+Data&amp;rft.pub=SIAM&amp;rft.aulast=Wahba&amp;rft.aufirst=Grace&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Gu1991-7"><span class="mw-cite-backlink"><b><a href="#cite_ref-Gu1991_7-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGuWahba1991" class="citation journal cs1">Gu, C.; Wahba, G. (1991). <a rel="nofollow" class="external text" href="http://pages.stat.wisc.edu/~wahba/ftp1/oldie/gu.wahba.mult.1991.pdf">"Minimizing GCV/GML scores with multiple smoothing parameters via the Newton method"</a> <span class="cs1-format">(PDF)</span>. <i>SIAM Journal on Scientific and Statistical Computing</i>. <b>12</b> (2): 383–398. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1137%2F0912021">10.1137/0912021</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=SIAM+Journal+on+Scientific+and+Statistical+Computing&amp;rft.atitle=Minimizing+GCV%2FGML+scores+with+multiple+smoothing+parameters+via+the+Newton+method&amp;rft.volume=12&amp;rft.issue=2&amp;rft.pages=383-398&amp;rft.date=1991&amp;rft_id=info%3Adoi%2F10.1137%2F0912021&amp;rft.aulast=Gu&amp;rft.aufirst=C.&amp;rft.au=Wahba%2C+G.&amp;rft_id=http%3A%2F%2Fpages.stat.wisc.edu%2F~wahba%2Fftp1%2Foldie%2Fgu.wahba.mult.1991.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Wood2000-8"><span class="mw-cite-backlink"><b><a href="#cite_ref-Wood2000_8-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWood2000" class="citation journal cs1">Wood, S. N. (2000). <a rel="nofollow" class="external text" href="http://opus.bath.ac.uk/16633/1/mspfinal.pdf">"Modelling and smoothing parameter estimation with multiple quadratic penalties"</a> <span class="cs1-format">(PDF)</span>. <i><a href="/wiki/Journal_of_the_Royal_Statistical_Society" title="Journal of the Royal Statistical Society">Journal of the Royal Statistical Society</a></i>. Series B. <b>62</b> (2): 413–428. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1111%2F1467-9868.00240">10.1111/1467-9868.00240</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:15500664">15500664</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society&amp;rft.atitle=Modelling+and+smoothing+parameter+estimation+with+multiple+quadratic+penalties&amp;rft.volume=62&amp;rft.issue=2&amp;rft.pages=413-428&amp;rft.date=2000&amp;rft_id=info%3Adoi%2F10.1111%2F1467-9868.00240&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A15500664%23id-name%3DS2CID&amp;rft.aulast=Wood&amp;rft.aufirst=S.+N.&amp;rft_id=http%3A%2F%2Fopus.bath.ac.uk%2F16633%2F1%2Fmspfinal.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Fahrmeier2001-9"><span class="mw-cite-backlink">^ <a href="#cite_ref-Fahrmeier2001_9-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Fahrmeier2001_9-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Fahrmeier2001_9-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFahrmeierLang2001" class="citation journal cs1">Fahrmeier, L.; Lang, S. (2001). "Bayesian Inference for Generalized Additive Mixed Models based on Markov Random Field Priors". <i>Journal of the Royal Statistical Society, Series C</i>. <b>50</b> (2): 201–220. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.304.8706">10.1.1.304.8706</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1111%2F1467-9876.00229">10.1111/1467-9876.00229</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:18074478">18074478</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+C&amp;rft.atitle=Bayesian+Inference+for+Generalized+Additive+Mixed+Models+based+on+Markov+Random+Field+Priors&amp;rft.volume=50&amp;rft.issue=2&amp;rft.pages=201-220&amp;rft.date=2001&amp;rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.304.8706%23id-name%3DCiteSeerX&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A18074478%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1111%2F1467-9876.00229&amp;rft.aulast=Fahrmeier&amp;rft.aufirst=L.&amp;rft.au=Lang%2C+S.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-kim2004-10"><span class="mw-cite-backlink"><b><a href="#cite_ref-kim2004_10-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKimGu2004" class="citation journal cs1">Kim, Y.J.; Gu, C. (2004). <a rel="nofollow" class="external text" href="https://doi.org/10.1046%2Fj.1369-7412.2003.05316.x">"Smoothing spline Gaussian regression: more scalable computation via efficient approximation"</a>. <i>Journal of the Royal Statistical Society, Series B</i>. <b>66</b> (2): 337–356. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1046%2Fj.1369-7412.2003.05316.x">10.1046/j.1369-7412.2003.05316.x</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:41334749">41334749</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+B&amp;rft.atitle=Smoothing+spline+Gaussian+regression%3A+more+scalable+computation+via+efficient+approximation&amp;rft.volume=66&amp;rft.issue=2&amp;rft.pages=337-356&amp;rft.date=2004&amp;rft_id=info%3Adoi%2F10.1046%2Fj.1369-7412.2003.05316.x&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A41334749%23id-name%3DS2CID&amp;rft.aulast=Kim&amp;rft.aufirst=Y.J.&amp;rft.au=Gu%2C+C.&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1046%252Fj.1369-7412.2003.05316.x&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Wood2017-11"><span class="mw-cite-backlink">^ <a href="#cite_ref-Wood2017_11-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Wood2017_11-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Wood2017_11-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-Wood2017_11-3"><sup><i><b>d</b></i></sup></a> <a href="#cite_ref-Wood2017_11-4"><sup><i><b>e</b></i></sup></a> <a href="#cite_ref-Wood2017_11-5"><sup><i><b>f</b></i></sup></a> <a href="#cite_ref-Wood2017_11-6"><sup><i><b>g</b></i></sup></a> <a href="#cite_ref-Wood2017_11-7"><sup><i><b>h</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWood,_S._N.2017" class="citation book cs1">Wood, S. N. (2017). <i>Generalized Additive Models: An Introduction with R (2nd ed)</i>. Chapman &amp; Hall/CRC. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-58488-474-3" title="Special:BookSources/978-1-58488-474-3"><bdi>978-1-58488-474-3</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Generalized+Additive+Models%3A+An+Introduction+with+R+%282nd+ed%29&amp;rft.pub=Chapman+%26+Hall%2FCRC&amp;rft.date=2017&amp;rft.isbn=978-1-58488-474-3&amp;rft.au=Wood%2C+S.+N.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Ruppert2003-12"><span class="mw-cite-backlink">^ <a href="#cite_ref-Ruppert2003_12-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Ruppert2003_12-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRuppertWandCarroll2003" class="citation book cs1">Ruppert, D.; Wand, M.P.; Carroll, R.J. (2003). <i>Semiparametric Regression</i>. Cambridge University Press.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Semiparametric+Regression&amp;rft.pub=Cambridge+University+Press&amp;rft.date=2003&amp;rft.aulast=Ruppert&amp;rft.aufirst=D.&amp;rft.au=Wand%2C+M.P.&amp;rft.au=Carroll%2C+R.J.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Rue2009-13"><span class="mw-cite-backlink">^ <a href="#cite_ref-Rue2009_13-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Rue2009_13-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Rue2009_13-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRueMartinoChopin2009" class="citation journal cs1">Rue, H.; Martino, Sara; Chopin, Nicolas (2009). <a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9868.2008.00700.x">"Approximate Bayesian inference for latent Gaussian models by using integrated nested Laplace approximations (with discussion)"</a>. <i>Journal of the Royal Statistical Society, Series B</i>. <b>71</b> (2): 319–392. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9868.2008.00700.x">10.1111/j.1467-9868.2008.00700.x</a></span>. <a href="/wiki/Hdl_(identifier)" class="mw-redirect" title="Hdl (identifier)">hdl</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://hdl.handle.net/2066%2F75507">2066/75507</a></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+B&amp;rft.atitle=Approximate+Bayesian+inference+for+latent+Gaussian+models+by+using+integrated+nested+Laplace+approximations+%28with+discussion%29&amp;rft.volume=71&amp;rft.issue=2&amp;rft.pages=319-392&amp;rft.date=2009&amp;rft_id=info%3Ahdl%2F2066%2F75507&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1467-9868.2008.00700.x&amp;rft.aulast=Rue&amp;rft.aufirst=H.&amp;rft.au=Martino%2C+Sara&amp;rft.au=Chopin%2C+Nicolas&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1111%252Fj.1467-9868.2008.00700.x&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-mboost-14"><span class="mw-cite-backlink">^ <a href="#cite_ref-mboost_14-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-mboost_14-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-mboost_14-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-mboost_14-3"><sup><i><b>d</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSchmidHothorn2008" class="citation journal cs1">Schmid, M.; Hothorn, T. (2008). "Boosting additive models using component-wise P-splines". <i>Computational Statistics and Data Analysis</i>. <b>53</b> (2): 298–311. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.csda.2008.09.009">10.1016/j.csda.2008.09.009</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Computational+Statistics+and+Data+Analysis&amp;rft.atitle=Boosting+additive+models+using+component-wise+P-splines&amp;rft.volume=53&amp;rft.issue=2&amp;rft.pages=298-311&amp;rft.date=2008&amp;rft_id=info%3Adoi%2F10.1016%2Fj.csda.2008.09.009&amp;rft.aulast=Schmid&amp;rft.aufirst=M.&amp;rft.au=Hothorn%2C+T.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-mayr2012-15"><span class="mw-cite-backlink"><b><a href="#cite_ref-mayr2012_15-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMayrFenskeHofnerKneib2012" class="citation journal cs1">Mayr, A.; Fenske, N.; Hofner, B.; Kneib, T.; Schmid, M. (2012). "Generalized additive models for location, scale and shape for high dimensional data - a flexible approach based on boosting". <i>Journal of the Royal Statistical Society, Series C</i>. <b>61</b> (3): 403–427. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9876.2011.01033.x">10.1111/j.1467-9876.2011.01033.x</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:123646605">123646605</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+C&amp;rft.atitle=Generalized+additive+models+for+location%2C+scale+and+shape+for+high+dimensional+data+-+a+flexible+approach+based+on+boosting&amp;rft.volume=61&amp;rft.issue=3&amp;rft.pages=403-427&amp;rft.date=2012&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1467-9876.2011.01033.x&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A123646605%23id-name%3DS2CID&amp;rft.aulast=Mayr&amp;rft.aufirst=A.&amp;rft.au=Fenske%2C+N.&amp;rft.au=Hofner%2C+B.&amp;rft.au=Kneib%2C+T.&amp;rft.au=Schmid%2C+M.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-LouCaruana2012-16"><span class="mw-cite-backlink"><b><a href="#cite_ref-LouCaruana2012_16-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLouCaruanaGehrke2012" class="citation book cs1">Lou, Yin; Caruana, Rich; Gehrke, Johannes (2012). "Intelligible models for classification and regression". <i>Proceedings of the 18th ACM SIGKDD international conference on Knowledge discovery and data mining - KDD '12</i>. p.&#160;150. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F2339530.2339556">10.1145/2339530.2339556</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9781450314626" title="Special:BookSources/9781450314626"><bdi>9781450314626</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:7715182">7715182</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Intelligible+models+for+classification+and+regression&amp;rft.btitle=Proceedings+of+the+18th+ACM+SIGKDD+international+conference+on+Knowledge+discovery+and+data+mining+-+KDD+%2712&amp;rft.pages=150&amp;rft.date=2012&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A7715182%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1145%2F2339530.2339556&amp;rft.isbn=9781450314626&amp;rft.aulast=Lou&amp;rft.aufirst=Yin&amp;rft.au=Caruana%2C+Rich&amp;rft.au=Gehrke%2C+Johannes&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Wahba83-17"><span class="mw-cite-backlink"><b><a href="#cite_ref-Wahba83_17-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWahba1983" class="citation journal cs1">Wahba, G. (1983). <a rel="nofollow" class="external text" href="http://www.stat.ucla.edu/~cocteau/stat204/readings/wahba1.pdf">"Bayesian Confidence Intervals for the Cross Validated Smoothing Spline"</a> <span class="cs1-format">(PDF)</span>. <i>Journal of the Royal Statistical Society, Series B</i>. <b>45</b>: 133–150.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+B&amp;rft.atitle=Bayesian+Confidence+Intervals+for+the+Cross+Validated+Smoothing+Spline&amp;rft.volume=45&amp;rft.pages=133-150&amp;rft.date=1983&amp;rft.aulast=Wahba&amp;rft.aufirst=G.&amp;rft_id=http%3A%2F%2Fwww.stat.ucla.edu%2F~cocteau%2Fstat204%2Freadings%2Fwahba1.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Nychka88-18"><span class="mw-cite-backlink"><b><a href="#cite_ref-Nychka88_18-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNychka1988" class="citation journal cs1">Nychka, D. (1988). "Bayesian confidence intervals for smoothing splines". <i>Journal of the American Statistical Association</i>. <b>83</b> (404): 1134–1143. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F01621459.1988.10478711">10.1080/01621459.1988.10478711</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+American+Statistical+Association&amp;rft.atitle=Bayesian+confidence+intervals+for+smoothing+splines&amp;rft.volume=83&amp;rft.issue=404&amp;rft.pages=1134-1143&amp;rft.date=1988&amp;rft_id=info%3Adoi%2F10.1080%2F01621459.1988.10478711&amp;rft.aulast=Nychka&amp;rft.aufirst=D.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-silverman85-19"><span class="mw-cite-backlink">^ <a href="#cite_ref-silverman85_19-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-silverman85_19-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-silverman85_19-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSilverman1985" class="citation journal cs1">Silverman, B.W. (1985). <a rel="nofollow" class="external text" href="http://www-personal.umich.edu/~jizhu/jizhu/wuke/Silverman-JRSSB85.pdf">"Some Aspects of the Spline Smoothing Approach to Non-Parametric Regression Curve Fitting (with discussion)"</a> <span class="cs1-format">(PDF)</span>. <i>Journal of the Royal Statistical Society, Series B</i>. <b>47</b>: 1–53.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+B&amp;rft.atitle=Some+Aspects+of+the+Spline+Smoothing+Approach+to+Non-Parametric+Regression+Curve+Fitting+%28with+discussion%29&amp;rft.volume=47&amp;rft.pages=1-53&amp;rft.date=1985&amp;rft.aulast=Silverman&amp;rft.aufirst=B.W.&amp;rft_id=http%3A%2F%2Fwww-personal.umich.edu%2F~jizhu%2Fjizhu%2Fwuke%2FSilverman-JRSSB85.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-marra12-20"><span class="mw-cite-backlink"><b><a href="#cite_ref-marra12_20-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMarraWood2012" class="citation journal cs1">Marra, G.; Wood, S.N. (2012). <a rel="nofollow" class="external text" href="http://opus.bath.ac.uk/26662/1/MarraWoodCI.pdf">"Coverage properties of confidence intervals for generalized additive model components"</a> <span class="cs1-format">(PDF)</span>. <i>Scandinavian Journal of Statistics</i>. <b>39</b>: 53–74. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9469.2011.00760.x">10.1111/j.1467-9469.2011.00760.x</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:49393564">49393564</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Scandinavian+Journal+of+Statistics&amp;rft.atitle=Coverage+properties+of+confidence+intervals+for+generalized+additive+model+components&amp;rft.volume=39&amp;rft.pages=53-74&amp;rft.date=2012&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1467-9469.2011.00760.x&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A49393564%23id-name%3DS2CID&amp;rft.aulast=Marra&amp;rft.aufirst=G.&amp;rft.au=Wood%2C+S.N.&amp;rft_id=http%3A%2F%2Fopus.bath.ac.uk%2F26662%2F1%2FMarraWoodCI.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Wood2011-21"><span class="mw-cite-backlink">^ <a href="#cite_ref-Wood2011_21-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Wood2011_21-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Wood2011_21-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWood2011" class="citation journal cs1">Wood, S.N. (2011). <a rel="nofollow" class="external text" href="http://opus.bath.ac.uk/22707/1/Wood_JRSSB_2011_73_1_3.pdf">"Fast stable restricted maximum likelihood and marginal likelihood estimation of semiparametric generalized linear models"</a> <span class="cs1-format">(PDF)</span>. <i>Journal of the Royal Statistical Society, Series B</i>. <b>73</b>: 3–36. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9868.2010.00749.x">10.1111/j.1467-9868.2010.00749.x</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:123001831">123001831</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+B&amp;rft.atitle=Fast+stable+restricted+maximum+likelihood+and+marginal+likelihood+estimation+of+semiparametric+generalized+linear+models&amp;rft.volume=73&amp;rft.pages=3-36&amp;rft.date=2011&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1467-9868.2010.00749.x&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A123001831%23id-name%3DS2CID&amp;rft.aulast=Wood&amp;rft.aufirst=S.N.&amp;rft_id=http%3A%2F%2Fopus.bath.ac.uk%2F22707%2F1%2FWood_JRSSB_2011_73_1_3.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Wood2008-22"><span class="mw-cite-backlink">^ <a href="#cite_ref-Wood2008_22-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Wood2008_22-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Wood2008_22-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWood2008" class="citation journal cs1">Wood, Simon N. (2008). "Fast stable direct fitting and smoothness selection for generalized additive models". <i>Journal of the Royal Statistical Society, Series B</i>. <b>70</b> (3): 495–518. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/0709.3906">0709.3906</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9868.2007.00646.x">10.1111/j.1467-9868.2007.00646.x</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:17511583">17511583</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+B&amp;rft.atitle=Fast+stable+direct+fitting+and+smoothness+selection+for+generalized+additive+models&amp;rft.volume=70&amp;rft.issue=3&amp;rft.pages=495-518&amp;rft.date=2008&amp;rft_id=info%3Aarxiv%2F0709.3906&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A17511583%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1467-9868.2007.00646.x&amp;rft.aulast=Wood&amp;rft.aufirst=Simon+N.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-whitebook-23"><span class="mw-cite-backlink">^ <a href="#cite_ref-whitebook_23-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-whitebook_23-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFChambersHastie1993" class="citation book cs1">Chambers, J.M.; Hastie, T. (1993). <i>Statistical Models in S</i>. Chapman and Hall.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Statistical+Models+in+S&amp;rft.pub=Chapman+and+Hall&amp;rft.date=1993&amp;rft.aulast=Chambers&amp;rft.aufirst=J.M.&amp;rft.au=Hastie%2C+T.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Nori2019-24"><span class="mw-cite-backlink"><b><a href="#cite_ref-Nori2019_24-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNoriJenkinsKochCaruana2019" class="citation arxiv cs1">Nori, Harsha; Jenkins, Samuel; Koch, Paul; Caruana, Rich (2019). "InterpretML: A Unified Framework for Machine Learning Interpretability". <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1909.09223">1909.09223</a></span> [<a rel="nofollow" class="external text" href="https://arxiv.org/archive/cs.LG">cs.LG</a>].</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=preprint&amp;rft.jtitle=arXiv&amp;rft.atitle=InterpretML%3A+A+Unified+Framework+for+Machine+Learning+Interpretability&amp;rft.date=2019&amp;rft_id=info%3Aarxiv%2F1909.09223&amp;rft.aulast=Nori&amp;rft.aufirst=Harsha&amp;rft.au=Jenkins%2C+Samuel&amp;rft.au=Koch%2C+Paul&amp;rft.au=Caruana%2C+Rich&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Gu2013-25"><span class="mw-cite-backlink">^ <a href="#cite_ref-Gu2013_25-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Gu2013_25-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGu2013" class="citation book cs1">Gu, Chong (2013). <i>Smoothing Spline ANOVA Models (2nd ed.)</i>. Springer.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Smoothing+Spline+ANOVA+Models+%282nd+ed.%29&amp;rft.pub=Springer&amp;rft.date=2013&amp;rft.aulast=Gu&amp;rft.aufirst=Chong&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-bayesx-26"><span class="mw-cite-backlink">^ <a href="#cite_ref-bayesx_26-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-bayesx_26-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFUmlaufAdlerKneibLang" class="citation journal cs1">Umlauf, Nikolaus; Adler, Daniel; Kneib, Thomas; Lang, Stefan; Zeileis, Achim. <a rel="nofollow" class="external text" href="https://www.econstor.eu/bitstream/10419/73863/1/2012-10.pdf">"Structured Additive Regression Models: An R Interface to BayesX"</a> <span class="cs1-format">(PDF)</span>. <i>Journal of Statistical Software</i>. <b>63</b> (21): 1–46.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+Statistical+Software&amp;rft.atitle=Structured+Additive+Regression+Models%3A+An+R+Interface+to+BayesX&amp;rft.volume=63&amp;rft.issue=21&amp;rft.pages=1-46&amp;rft.aulast=Umlauf&amp;rft.aufirst=Nikolaus&amp;rft.au=Adler%2C+Daniel&amp;rft.au=Kneib%2C+Thomas&amp;rft.au=Lang%2C+Stefan&amp;rft.au=Zeileis%2C+Achim&amp;rft_id=https%3A%2F%2Fwww.econstor.eu%2Fbitstream%2F10419%2F73863%2F1%2F2012-10.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Augustin2012-27"><span class="mw-cite-backlink"><b><a href="#cite_ref-Augustin2012_27-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFAugustinSauleauWood2012" class="citation journal cs1">Augustin, N.H.; Sauleau, E-A; Wood, S.N. (2012). <a rel="nofollow" class="external text" href="http://opus.bath.ac.uk/27091/1/qq_gam_resub.pdf">"On quantile quantile plots for generalized linear models"</a> <span class="cs1-format">(PDF)</span>. <i>Computational Statistics and Data Analysis</i>. <b>56</b> (8): 2404–2409. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.csda.2012.01.026">10.1016/j.csda.2012.01.026</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:2960406">2960406</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Computational+Statistics+and+Data+Analysis&amp;rft.atitle=On+quantile+quantile+plots+for+generalized+linear+models&amp;rft.volume=56&amp;rft.issue=8&amp;rft.pages=2404-2409&amp;rft.date=2012&amp;rft_id=info%3Adoi%2F10.1016%2Fj.csda.2012.01.026&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A2960406%23id-name%3DS2CID&amp;rft.aulast=Augustin&amp;rft.aufirst=N.H.&amp;rft.au=Sauleau%2C+E-A&amp;rft.au=Wood%2C+S.N.&amp;rft_id=http%3A%2F%2Fopus.bath.ac.uk%2F27091%2F1%2Fqq_gam_resub.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Marra2011-28"><span class="mw-cite-backlink"><b><a href="#cite_ref-Marra2011_28-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMarraWood2011" class="citation journal cs1">Marra, G.; Wood, S.N. (2011). "Practical Variable Selection for Generalized Additive Models". <i>Computational Statistics and Data Analysis</i>. <b>55</b> (7): 2372–2387. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.csda.2011.02.004">10.1016/j.csda.2011.02.004</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Computational+Statistics+and+Data+Analysis&amp;rft.atitle=Practical+Variable+Selection+for+Generalized+Additive+Models&amp;rft.volume=55&amp;rft.issue=7&amp;rft.pages=2372-2387&amp;rft.date=2011&amp;rft_id=info%3Adoi%2F10.1016%2Fj.csda.2011.02.004&amp;rft.aulast=Marra&amp;rft.aufirst=G.&amp;rft.au=Wood%2C+S.N.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Greven2010-29"><span class="mw-cite-backlink"><b><a href="#cite_ref-Greven2010_29-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGrevenKneib2010" class="citation journal cs1">Greven, Sonja; Kneib, Thomas (2010). "On the behaviour of marginal and conditional AIC in linear mixed models". <i>Biometrika</i>. <b>97</b> (4): 773–789. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1093%2Fbiomet%2Fasq042">10.1093/biomet/asq042</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Biometrika&amp;rft.atitle=On+the+behaviour+of+marginal+and+conditional+AIC+in+linear+mixed+models&amp;rft.volume=97&amp;rft.issue=4&amp;rft.pages=773-789&amp;rft.date=2010&amp;rft_id=info%3Adoi%2F10.1093%2Fbiomet%2Fasq042&amp;rft.aulast=Greven&amp;rft.aufirst=Sonja&amp;rft.au=Kneib%2C+Thomas&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-30"><span class="mw-cite-backlink"><b><a href="#cite_ref-30">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBrian_Junker2010" class="citation web cs1">Brian Junker (March 22, 2010). <a rel="nofollow" class="external text" href="http://www.stat.cmu.edu/~cshalizi/490/10/xval-and-additive/xval-and-additive2.pdf">"Additive models and cross-validation"</a> <span class="cs1-format">(PDF)</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Additive+models+and+cross-validation&amp;rft.date=2010-03-22&amp;rft.au=Brian+Junker&amp;rft_id=http%3A%2F%2Fwww.stat.cmu.edu%2F~cshalizi%2F490%2F10%2Fxval-and-additive%2Fxval-and-additive2.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> <li id="cite_note-Reiss2009-31"><span class="mw-cite-backlink"><b><a href="#cite_ref-Reiss2009_31-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFReissOgden2009" class="citation journal cs1">Reiss, P.T.; Ogden, T.R. (2009). <a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9868.2008.00695.x">"Smoothing parameter selection for a class of semiparametric linear models"</a>. <i>Journal of the Royal Statistical Society, Series B</i>. <b>71</b> (2): 505–523. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1467-9868.2008.00695.x">10.1111/j.1467-9868.2008.00695.x</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:51945597">51945597</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+the+Royal+Statistical+Society%2C+Series+B&amp;rft.atitle=Smoothing+parameter+selection+for+a+class+of+semiparametric+linear+models&amp;rft.volume=71&amp;rft.issue=2&amp;rft.pages=505-523&amp;rft.date=2009&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1467-9868.2008.00695.x&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A51945597%23id-name%3DS2CID&amp;rft.aulast=Reiss&amp;rft.aufirst=P.T.&amp;rft.au=Ogden%2C+T.R.&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1111%252Fj.1467-9868.2008.00695.x&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AGeneralized+additive+model" class="Z3988"></span></span> </li> </ol></div></div> <div class="mw-heading mw-heading2"><h2 id="External_links">External links</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Generalized_additive_model&amp;action=edit&amp;section=13" title="Edit section: External links"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <ul><li><a rel="nofollow" class="external text" href="https://cran.r-project.org/web/packages/gam/index.html">gam</a>, an R package for GAMs by backfitting.</li> <li><a rel="nofollow" class="external text" href="https://www.statsmodels.org/devel/gam.html">gam</a>, Python module in statsmodels.gam module.</li> <li><a rel="nofollow" class="external text" href="https://github.com/interpretml/interpret">InterpretML</a>, a Python package for fitting GAMs via bagging and boosting.</li> <li><a rel="nofollow" class="external text" href="https://cran.r-project.org/web/packages/mgcv/index.html">mgcv</a>, an R package for GAMs using penalized regression splines.</li> <li><a rel="nofollow" class="external text" href="https://cran.r-project.org/web/packages/mboost/index.html">mboost</a>, an R package for boosting including additive models.</li> <li><a rel="nofollow" class="external text" href="https://cran.r-project.org/web/packages/gss/index.html">gss</a>, an R package for smoothing spline ANOVA.</li> <li><a rel="nofollow" class="external text" href="http://www.r-inla.org/">INLA</a> software for Bayesian Inference with GAMs and more.</li> <li><a rel="nofollow" class="external text" href="http://www.uni-goettingen.de/en/bayesx/550513.html">BayesX</a> software for MCMC and penalized likelihood approaches to GAMs.</li> <li><a rel="nofollow" class="external text" href="https://petolau.github.io/Analyzing-double-seasonal-time-series-with-GAM-in-R/">Doing magic and analyzing seasonal time series with GAM in R</a></li> <li><a rel="nofollow" class="external text" href="http://multithreaded.stitchfix.com/blog/2015/07/30/gam/">GAM: The Predictive Modeling Silver Bullet</a></li> <li><a rel="nofollow" class="external text" href="https://www.youtube.com/watch?v=eS_k6L638k0">Building GAM by projection descent</a></li></ul> <!-- NewPP limit report Parsed by mw‐web.codfw.main‐f69cdc8f6‐mcbjw Cached time: 20241122142000 Cache expiry: 2592000 Reduced expiry: false Complications: [vary‐revision‐sha1, show‐toc] CPU time usage: 0.528 seconds Real time usage: 0.713 seconds Preprocessor visited node count: 2612/1000000 Post‐expand include size: 64863/2097152 bytes Template argument size: 926/2097152 bytes Highest expansion depth: 12/100 Expensive parser function count: 2/500 Unstrip recursion depth: 1/20 Unstrip post‐expand size: 125006/5000000 bytes Lua time usage: 0.269/10.000 seconds Lua memory usage: 5627285/52428800 bytes Number of Wikibase entities loaded: 0/400 --> <!-- Transclusion expansion time report (%,ms,calls,template) 100.00% 490.154 1 -total 60.77% 297.865 1 Template:Reflist 25.32% 124.100 21 Template:Cite_journal 23.62% 115.769 8 Template:Cite_book 21.16% 103.732 1 Template:Short_description 14.40% 70.576 2 Template:Pagetype 9.66% 47.373 1 Template:How? 7.83% 38.392 1 Template:Fix 5.36% 26.251 1 Template:Category_handler 4.40% 21.550 3 Template:Main_other --> <!-- Saved in parser cache with key enwiki:pcache:idhash:3608284-0!canonical and timestamp 20241122142000 and revision id 1256715804. Rendering was triggered because: page-view --> </div><!--esi <esi:include src="/esitest-fa8a495983347898/content" /> --><noscript><img src="https://login.wikimedia.org/wiki/Special:CentralAutoLogin/start?type=1x1" alt="" width="1" height="1" style="border: none; position: absolute;"></noscript> <div class="printfooter" data-nosnippet="">Retrieved from "<a dir="ltr" href="https://en.wikipedia.org/w/index.php?title=Generalized_additive_model&amp;oldid=1256715804">https://en.wikipedia.org/w/index.php?title=Generalized_additive_model&amp;oldid=1256715804</a>"</div></div> <div id="catlinks" class="catlinks" data-mw="interface"><div id="mw-normal-catlinks" class="mw-normal-catlinks"><a href="/wiki/Help:Category" title="Help:Category">Categories</a>: <ul><li><a href="/wiki/Category:Generalized_linear_models" title="Category:Generalized linear models">Generalized linear models</a></li><li><a href="/wiki/Category:Nonparametric_regression" title="Category:Nonparametric regression">Nonparametric regression</a></li><li><a href="/wiki/Category:Regression_models" title="Category:Regression models">Regression models</a></li></ul></div><div id="mw-hidden-catlinks" class="mw-hidden-catlinks mw-hidden-cats-hidden">Hidden categories: <ul><li><a href="/wiki/Category:Articles_with_short_description" title="Category:Articles with short description">Articles with short description</a></li><li><a href="/wiki/Category:Short_description_is_different_from_Wikidata" title="Category:Short description is different from Wikidata">Short description is different from Wikidata</a></li><li><a href="/wiki/Category:Wikipedia_articles_needing_clarification_from_July_2019" title="Category:Wikipedia articles needing clarification from July 2019">Wikipedia articles needing clarification from July 2019</a></li><li><a href="/wiki/Category:Articles_with_example_R_code" title="Category:Articles with example R code">Articles with example R code</a></li></ul></div></div> </div> </main> </div> <div class="mw-footer-container"> <footer id="footer" class="mw-footer" > <ul id="footer-info"> <li id="footer-info-lastmod"> This page was last edited on 11 November 2024, at 06:42<span class="anonymous-show">&#160;(UTC)</span>.</li> <li id="footer-info-copyright">Text is available under the <a href="/wiki/Wikipedia:Text_of_the_Creative_Commons_Attribution-ShareAlike_4.0_International_License" title="Wikipedia:Text of the Creative Commons Attribution-ShareAlike 4.0 International License">Creative Commons Attribution-ShareAlike 4.0 License</a>; additional terms may apply. By using this site, you agree to the <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Terms_of_Use" class="extiw" title="foundation:Special:MyLanguage/Policy:Terms of Use">Terms of Use</a> and <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy" class="extiw" title="foundation:Special:MyLanguage/Policy:Privacy policy">Privacy Policy</a>. Wikipedia® is a registered trademark of the <a rel="nofollow" class="external text" href="https://wikimediafoundation.org/">Wikimedia Foundation, Inc.</a>, a non-profit organization.</li> </ul> <ul id="footer-places"> <li id="footer-places-privacy"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy">Privacy policy</a></li> <li id="footer-places-about"><a href="/wiki/Wikipedia:About">About Wikipedia</a></li> <li id="footer-places-disclaimers"><a href="/wiki/Wikipedia:General_disclaimer">Disclaimers</a></li> <li id="footer-places-contact"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us">Contact Wikipedia</a></li> <li id="footer-places-wm-codeofconduct"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Universal_Code_of_Conduct">Code of Conduct</a></li> <li id="footer-places-developers"><a href="https://developer.wikimedia.org">Developers</a></li> <li id="footer-places-statslink"><a href="https://stats.wikimedia.org/#/en.wikipedia.org">Statistics</a></li> <li id="footer-places-cookiestatement"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Cookie_statement">Cookie statement</a></li> <li id="footer-places-mobileview"><a href="//en.m.wikipedia.org/w/index.php?title=Generalized_additive_model&amp;mobileaction=toggle_view_mobile" class="noprint stopMobileRedirectToggle">Mobile view</a></li> </ul> <ul id="footer-icons" class="noprint"> <li id="footer-copyrightico"><a href="https://wikimediafoundation.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/static/images/footer/wikimedia-button.svg" width="84" height="29" alt="Wikimedia Foundation" loading="lazy"></a></li> <li id="footer-poweredbyico"><a href="https://www.mediawiki.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/w/resources/assets/poweredby_mediawiki.svg" alt="Powered by MediaWiki" width="88" height="31" loading="lazy"></a></li> </ul> </footer> </div> </div> </div> <div class="vector-settings" id="p-dock-bottom"> <ul></ul> </div><script>(RLQ=window.RLQ||[]).push(function(){mw.config.set({"wgHostname":"mw-web.codfw.main-f69cdc8f6-twlxc","wgBackendResponseTime":157,"wgPageParseReport":{"limitreport":{"cputime":"0.528","walltime":"0.713","ppvisitednodes":{"value":2612,"limit":1000000},"postexpandincludesize":{"value":64863,"limit":2097152},"templateargumentsize":{"value":926,"limit":2097152},"expansiondepth":{"value":12,"limit":100},"expensivefunctioncount":{"value":2,"limit":500},"unstrip-depth":{"value":1,"limit":20},"unstrip-size":{"value":125006,"limit":5000000},"entityaccesscount":{"value":0,"limit":400},"timingprofile":["100.00% 490.154 1 -total"," 60.77% 297.865 1 Template:Reflist"," 25.32% 124.100 21 Template:Cite_journal"," 23.62% 115.769 8 Template:Cite_book"," 21.16% 103.732 1 Template:Short_description"," 14.40% 70.576 2 Template:Pagetype"," 9.66% 47.373 1 Template:How?"," 7.83% 38.392 1 Template:Fix"," 5.36% 26.251 1 Template:Category_handler"," 4.40% 21.550 3 Template:Main_other"]},"scribunto":{"limitreport-timeusage":{"value":"0.269","limit":"10.000"},"limitreport-memusage":{"value":5627285,"limit":52428800}},"cachereport":{"origin":"mw-web.codfw.main-f69cdc8f6-mcbjw","timestamp":"20241122142000","ttl":2592000,"transientcontent":false}}});});</script> <script type="application/ld+json">{"@context":"https:\/\/schema.org","@type":"Article","name":"Generalized additive model","url":"https:\/\/en.wikipedia.org\/wiki\/Generalized_additive_model","sameAs":"http:\/\/www.wikidata.org\/entity\/Q3318054","mainEntity":"http:\/\/www.wikidata.org\/entity\/Q3318054","author":{"@type":"Organization","name":"Contributors to Wikimedia projects"},"publisher":{"@type":"Organization","name":"Wikimedia Foundation, Inc.","logo":{"@type":"ImageObject","url":"https:\/\/www.wikimedia.org\/static\/images\/wmf-hor-googpub.png"}},"datePublished":"2006-01-03T18:17:31Z","dateModified":"2024-11-11T06:42:53Z","headline":"class of statistical models"}</script> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10