CINXE.COM

GitHub - ymcui/Chinese-LLaMA-Alpaca-2: 中文LLaMA-2 & Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 & Alpaca-2 LLMs with 64K long context models)

<!DOCTYPE html> <html lang="en" data-color-mode="auto" data-light-theme="light" data-dark-theme="dark" data-a11y-animated-images="system" data-a11y-link-underlines="true" > <head> <meta charset="utf-8"> <link rel="dns-prefetch" href="https://github.githubassets.com"> <link rel="dns-prefetch" href="https://avatars.githubusercontent.com"> <link rel="dns-prefetch" href="https://github-cloud.s3.amazonaws.com"> <link rel="dns-prefetch" href="https://user-images.githubusercontent.com/"> <link rel="preconnect" href="https://github.githubassets.com" crossorigin> <link rel="preconnect" href="https://avatars.githubusercontent.com"> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/light-74231a1f3bbb.css" /><link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/dark-8a995f0bacd4.css" /><link data-color-theme="dark_dimmed" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_dimmed-f37fb7684b1f.css" /><link data-color-theme="dark_high_contrast" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_high_contrast-9ac301c3ebe5.css" /><link data-color-theme="dark_colorblind" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_colorblind-cd826e8636dc.css" /><link data-color-theme="light_colorblind" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_colorblind-f91b0f603451.css" /><link data-color-theme="light_high_contrast" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_high_contrast-83beb16e0ecf.css" /><link data-color-theme="light_tritanopia" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_tritanopia-6e122dab64fc.css" /><link data-color-theme="dark_tritanopia" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_tritanopia-18119e682df0.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-primitives-225433424a87.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-aaa714e5674d.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/global-7eaba1d4847c.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/github-ea73c9cb5377.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/repository-4fce88777fa8.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/code-0210be90f4d3.css" /> <script type="application/json" id="client-env">{"locale":"en","featureFlags":["a11y_quote_reply_fix","copilot_immersive_issue_preview","copilot_new_references_ui","copilot_chat_repo_custom_instructions_preview","copilot_no_floating_button","copilot_topics_as_references","copilot_read_shared_conversation","copilot_duplicate_thread","copilot_buffered_streaming","dotcom_chat_client_side_skills","experimentation_azure_variant_endpoint","failbot_handle_non_errors","fgpat_form_ui_updates","geojson_azure_maps","ghost_pilot_confidence_truncation_25","ghost_pilot_confidence_truncation_40","github_models_o3_mini_streaming","hovercard_accessibility","insert_before_patch","issues_react_remove_placeholders","issues_react_blur_item_picker_on_close","marketing_pages_search_explore_provider","primer_react_css_modules_ga","react_data_router_pull_requests","remove_child_patch","sample_network_conn_type","swp_enterprise_contact_form","site_proxima_australia_update","viewscreen_sandbox","issues_react_create_milestone","issues_react_cache_fix_workaround","lifecycle_label_name_updates","copilot_task_oriented_assistive_prompts","issue_types_prevent_private_type_creation","refresh_image_video_src","react_router_dispose_on_disconnect","codespaces_prebuild_region_target_update","turbo_app_id_restore","copilot_code_review_sign_up_closed"]}</script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/wp-runtime-9126c875f664.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_oddbird_popover-polyfill_dist_popover_js-9da652f58479.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_arianotify-polyfill_ariaNotify-polyfill_js-node_modules_github_mi-3abb8f-46b9f4874d95.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_failbot_failbot_ts-75968cfb5298.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/environment-f04cb2a9fc8c.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_behaviors_dist_esm_index_mjs-0dbb79f97f8f.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_selector-observer_dist_index_esm_js-f690fd9ae3d5.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_relative-time-element_dist_index_js-62d275b7ddd9.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_text-expander-element_dist_index_js-78748950cb0c.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_auto-complete-element_dist_index_js-node_modules_github_catalyst_-8e9f78-a90ac05d2469.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_filter-input-element_dist_index_js-node_modules_github_remote-inp-b5f1d7-a1760ffda83d.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_markdown-toolbar-element_dist_index_js-ceef33f593fa.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_file-attachment-element_dist_index_js-node_modules_primer_view-co-c44a69-efa32db3a345.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/github-elements-394f8eb34f19.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/element-registry-8206a1f1fc89.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_braintree_browser-detection_dist_browser-detection_js-node_modules_githu-2906d7-2a07a295af40.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_lit-html_lit-html_js-be8cb88f481b.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_mini-throttle_dist_index_js-node_modules_morphdom_dist_morphdom-e-7c534c-a4a1922eb55f.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_turbo_dist_turbo_es2017-esm_js-a03ee12d659a.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_remote-form_dist_index_js-node_modules_delegated-events_dist_inde-893f9f-b6294cf703b7.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_color-convert_index_js-e3180fe3bcb3.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_quote-selection_dist_index_js-node_modules_github_session-resume_-947061-e7a6c4a19f98.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_updatable-content_updatable-content_ts-2a55124d5c52.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_behaviors_task-list_ts-app_assets_modules_github_sso_ts-ui_packages-900dde-768abe60b1f8.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_sticky-scroll-into-view_ts-3e000c5d31a9.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_behaviors_ajax-error_ts-app_assets_modules_github_behaviors_include-87a4ae-b8865f653f6b.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_behaviors_commenting_edit_ts-app_assets_modules_github_behaviors_ht-83c235-e429cff6ceb1.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/behaviors-c8912a318570.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_delegated-events_dist_index_js-node_modules_github_catalyst_lib_index_js-f6223d90c7ba.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/notifications-global-01e85cd1be94.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_virtualized-list_es_index_js-node_modules_github_template-parts_lib_index_js-94dc7a2157c1.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_remote-form_dist_index_js-node_modules_delegated-events_dist_inde-70450e-4b93df70b903.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_ref-selector_ts-3e9d848bab5f.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/codespaces-c3bcacfe317c.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_filter-input-element_dist_index_js-node_modules_github_remote-inp-3eebbd-0763620ad7bf.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_mini-throttle_dist_decorators_js-node_modules_delegated-events_di-e161aa-9d41fb1b6c9e.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_file-attachment-element_dist_index_js-node_modules_github_remote--3c9c82-b71ef90fbdc7.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/repositories-7a0dbaa42c57.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_mini-throttle_dist_index_js-node_modules_github_catalyst_lib_inde-dbbea9-26cce2010167.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/code-menu-1c0aedc134b1.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/primer-react-e05a7c4c5398.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/react-core-aaa76995a864.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/react-lib-f1bca44e0926.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/octicons-react-cf2f2ab8dab4.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_emotion_is-prop-valid_dist_emotion-is-prop-valid_esm_js-node_modules_emo-62da9f-2df2f32ec596.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_mini-throttle_dist_index_js-node_modules_stacktrace-parser_dist_s-e7dcdd-9a233856b02c.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_oddbird_popover-polyfill_dist_popover-fn_js-55fea94174bf.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/notifications-subscriptions-menu-58a0c58bfee4.js"></script> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-react.9df1783473f10f02fb62.module.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/notifications-subscriptions-menu.1bcff9205c241e99cff2.module.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-react.9df1783473f10f02fb62.module.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/notifications-subscriptions-menu.1bcff9205c241e99cff2.module.css" /> <title>GitHub - ymcui/Chinese-LLaMA-Alpaca-2: 中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models)</title> <meta name="route-pattern" content="/:user_id/:repository" data-turbo-transient> <meta name="route-controller" content="files" data-turbo-transient> <meta name="route-action" content="disambiguate" data-turbo-transient> <meta name="current-catalog-service-hash" content="f3abb0cc802f3d7b95fc8762b94bdcb13bf39634c40c357301c4aa1d67a256fb"> <meta name="request-id" content="C61E:41BEF:144E11:19030C:67E440FB" data-pjax-transient="true"/><meta name="html-safe-nonce" content="5c1dba37390effd4d011fe072318555eecdf7ea5c8500c0187a607907aa666e6" data-pjax-transient="true"/><meta name="visitor-payload" content="eyJyZWZlcnJlciI6IiIsInJlcXVlc3RfaWQiOiJDNjFFOjQxQkVGOjE0NEUxMToxOTAzMEM6NjdFNDQwRkIiLCJ2aXNpdG9yX2lkIjoiODg1ODMxMjExOTIyNDEyMzY0MyIsInJlZ2lvbl9lZGdlIjoic291dGhlYXN0YXNpYSIsInJlZ2lvbl9yZW5kZXIiOiJzb3V0aGVhc3Rhc2lhIn0=" data-pjax-transient="true"/><meta name="visitor-hmac" content="56d953037da1fce47d3ebfd4259bdb88e45d9b22c12f195379cd016346bef1a6" data-pjax-transient="true"/> <meta name="hovercard-subject-tag" content="repository:668052031" data-turbo-transient> <meta name="github-keyboard-shortcuts" content="repository,copilot" data-turbo-transient="true" /> <meta name="selected-link" value="repo_source" data-turbo-transient> <link rel="assets" href="https://github.githubassets.com/"> <meta name="google-site-verification" content="Apib7-x98H0j5cPqHWwSMm6dNU4GmODRoqxLiDzdx9I"> <meta name="octolytics-url" content="https://collector.github.com/github/collect" /> <meta name="analytics-location" content="/&lt;user-name&gt;/&lt;repo-name&gt;" data-turbo-transient="true" /> <meta name="user-login" content=""> <meta name="viewport" content="width=device-width"> <meta name="description" content="中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models) - ymcui/Chinese-LLaMA-Alpaca-2"> <link rel="search" type="application/opensearchdescription+xml" href="/opensearch.xml" title="GitHub"> <link rel="fluid-icon" href="https://github.com/fluidicon.png" title="GitHub"> <meta property="fb:app_id" content="1401488693436528"> <meta name="apple-itunes-app" content="app-id=1477376905, app-argument=https://github.com/ymcui/Chinese-LLaMA-Alpaca-2" /> <meta name="twitter:image" content="https://repository-images.githubusercontent.com/668052031/10195ef0-7e03-4800-a57a-9e9649e1a32d" /><meta name="twitter:site" content="@github" /><meta name="twitter:card" content="summary_large_image" /><meta name="twitter:title" content="GitHub - ymcui/Chinese-LLaMA-Alpaca-2: 中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models)" /><meta name="twitter:description" content="中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models) - ymcui/Chinese-LLaMA-Alpaca-2" /> <meta property="og:image" content="https://repository-images.githubusercontent.com/668052031/10195ef0-7e03-4800-a57a-9e9649e1a32d" /><meta property="og:image:alt" content="中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models) - ymcui/Chinese-LLaMA-Alpaca-2" /><meta property="og:site_name" content="GitHub" /><meta property="og:type" content="object" /><meta property="og:title" content="GitHub - ymcui/Chinese-LLaMA-Alpaca-2: 中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models)" /><meta property="og:url" content="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2" /><meta property="og:description" content="中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models) - ymcui/Chinese-LLaMA-Alpaca-2" /> <meta name="hostname" content="github.com"> <meta name="expected-hostname" content="github.com"> <meta http-equiv="x-pjax-version" content="5c018ac88592d0ad13be8859da6f62602c00cb3cc499a126e7c30c9d942afd08" data-turbo-track="reload"> <meta http-equiv="x-pjax-csp-version" content="77190eb53eb47fc30bd2fcc17a7eefa2dfd8505869fee9299ba911be3a40a9eb" data-turbo-track="reload"> <meta http-equiv="x-pjax-css-version" content="911af613659f2a8ff08d51fd492330c9e8a6ed8f0c4eb3c6632db43599431d16" data-turbo-track="reload"> <meta http-equiv="x-pjax-js-version" content="0ad41bb84812383c89479db1d2e2a049187f00d05bda385d0ecc0f6c6bdd9d8b" data-turbo-track="reload"> <meta name="turbo-cache-control" content="no-preview" data-turbo-transient=""> <meta data-hydrostats="publish"> <meta name="go-import" content="github.com/ymcui/Chinese-LLaMA-Alpaca-2 git https://github.com/ymcui/Chinese-LLaMA-Alpaca-2.git"> <meta name="octolytics-dimension-user_id" content="16095339" /><meta name="octolytics-dimension-user_login" content="ymcui" /><meta name="octolytics-dimension-repository_id" content="668052031" /><meta name="octolytics-dimension-repository_nwo" content="ymcui/Chinese-LLaMA-Alpaca-2" /><meta name="octolytics-dimension-repository_public" content="true" /><meta name="octolytics-dimension-repository_is_fork" content="false" /><meta name="octolytics-dimension-repository_network_root_id" content="668052031" /><meta name="octolytics-dimension-repository_network_root_nwo" content="ymcui/Chinese-LLaMA-Alpaca-2" /> <link rel="canonical" href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2" data-turbo-transient> <meta name="turbo-body-classes" content="logged-out env-production page-responsive"> <meta name="browser-stats-url" content="https://api.github.com/_private/browser/stats"> <meta name="browser-errors-url" content="https://api.github.com/_private/browser/errors"> <meta name="release" content="96532fce0617dfcd7fe2a5e25c610defe99a969e"> <link rel="mask-icon" href="https://github.githubassets.com/assets/pinned-octocat-093da3e6fa40.svg" color="#000000"> <link rel="alternate icon" class="js-site-favicon" type="image/png" href="https://github.githubassets.com/favicons/favicon.png"> <link rel="icon" class="js-site-favicon" type="image/svg+xml" href="https://github.githubassets.com/favicons/favicon.svg" data-base-href="https://github.githubassets.com/favicons/favicon"> <meta name="theme-color" content="#1e2327"> <meta name="color-scheme" content="light dark" /> <link rel="manifest" href="/manifest.json" crossOrigin="use-credentials"> </head> <body class="logged-out env-production page-responsive" style="word-wrap: break-word;"> <div data-turbo-body class="logged-out env-production page-responsive" style="word-wrap: break-word;"> <div class="position-relative header-wrapper js-header-wrapper "> <a href="#start-of-content" data-skip-target-assigned="false" class="px-2 py-4 color-bg-accent-emphasis color-fg-on-emphasis show-on-focus js-skip-to-content">Skip to content</a> <span data-view-component="true" class="progress-pjax-loader Progress position-fixed width-full"> <span style="width: 0%;" data-view-component="true" class="Progress-item progress-pjax-loader-bar left-0 top-0 color-bg-accent-emphasis"></span> </span> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_ui-commands_ui-commands_ts-2ea4e93613c0.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/keyboard-shortcuts-dialog-33dfb803e078.js"></script> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-react.9df1783473f10f02fb62.module.css" /> <react-partial partial-name="keyboard-shortcuts-dialog" data-ssr="false" data-attempted-ssr="false" > <script type="application/json" data-target="react-partial.embeddedData">{"props":{"docsUrl":"https://docs.github.com/get-started/accessibility/keyboard-shortcuts"}}</script> <div data-target="react-partial.reactRoot"></div> </react-partial> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_remote-form_dist_index_js-node_modules_delegated-events_dist_inde-94fd67-4898d1bf4b51.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/sessions-730dca81d0a2.js"></script> <header class="HeaderMktg header-logged-out js-details-container js-header Details f4 py-3" role="banner" data-is-top="true" data-color-mode=light data-light-theme=light data-dark-theme=dark> <h2 class="sr-only">Navigation Menu</h2> <button type="button" class="HeaderMktg-backdrop d-lg-none border-0 position-fixed top-0 left-0 width-full height-full js-details-target" aria-label="Toggle navigation"> <span class="d-none">Toggle navigation</span> </button> <div class="d-flex flex-column flex-lg-row flex-items-center px-3 px-md-4 px-lg-5 height-full position-relative z-1"> <div class="d-flex flex-justify-between flex-items-center width-full width-lg-auto"> <div class="flex-1"> <button aria-label="Toggle navigation" aria-expanded="false" type="button" data-view-component="true" class="js-details-target js-nav-padding-recalculate js-header-menu-toggle Button--link Button--medium Button d-lg-none color-fg-inherit p-1"> <span class="Button-content"> <span class="Button-label"><div class="HeaderMenu-toggle-bar rounded my-1"></div> <div class="HeaderMenu-toggle-bar rounded my-1"></div> <div class="HeaderMenu-toggle-bar rounded my-1"></div></span> </span> </button> </div> <a class="mr-lg-3 color-fg-inherit flex-order-2 js-prevent-focus-on-mobile-nav" href="/" aria-label="Homepage" data-analytics-event="{&quot;category&quot;:&quot;Marketing nav&quot;,&quot;action&quot;:&quot;click to go to homepage&quot;,&quot;label&quot;:&quot;ref_page:Marketing;ref_cta:Logomark;ref_loc:Header&quot;}"> <svg height="32" aria-hidden="true" viewBox="0 0 24 24" version="1.1" width="32" data-view-component="true" class="octicon octicon-mark-github"> <path d="M12 1C5.9225 1 1 5.9225 1 12C1 16.8675 4.14875 20.9787 8.52125 22.4362C9.07125 22.5325 9.2775 22.2025 9.2775 21.9137C9.2775 21.6525 9.26375 20.7862 9.26375 19.865C6.5 20.3737 5.785 19.1912 5.565 18.5725C5.44125 18.2562 4.905 17.28 4.4375 17.0187C4.0525 16.8125 3.5025 16.3037 4.42375 16.29C5.29 16.2762 5.90875 17.0875 6.115 17.4175C7.105 19.0812 8.68625 18.6137 9.31875 18.325C9.415 17.61 9.70375 17.1287 10.02 16.8537C7.5725 16.5787 5.015 15.63 5.015 11.4225C5.015 10.2262 5.44125 9.23625 6.1425 8.46625C6.0325 8.19125 5.6475 7.06375 6.2525 5.55125C6.2525 5.55125 7.17375 5.2625 9.2775 6.67875C10.1575 6.43125 11.0925 6.3075 12.0275 6.3075C12.9625 6.3075 13.8975 6.43125 14.7775 6.67875C16.8813 5.24875 17.8025 5.55125 17.8025 5.55125C18.4075 7.06375 18.0225 8.19125 17.9125 8.46625C18.6138 9.23625 19.04 10.2125 19.04 11.4225C19.04 15.6437 16.4688 16.5787 14.0213 16.8537C14.42 17.1975 14.7638 17.8575 14.7638 18.8887C14.7638 20.36 14.75 21.5425 14.75 21.9137C14.75 22.2025 14.9563 22.5462 15.5063 22.4362C19.8513 20.9787 23 16.8537 23 12C23 5.9225 18.0775 1 12 1Z"></path> </svg> </a> <div class="flex-1 flex-order-2 text-right"> <a href="/login?return_to=https%3A%2F%2Fgithub.com%2Fymcui%2FChinese-LLaMA-Alpaca-2" class="HeaderMenu-link HeaderMenu-button d-inline-flex d-lg-none flex-order-1 f5 no-underline border color-border-default rounded-2 px-2 py-1 color-fg-inherit js-prevent-focus-on-mobile-nav" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;site header menu&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;SIGN_UP&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="f3ebb3646e5f2c53e132406f9579587b34e0c510aad7d811e4c27d2beab1eb15" data-analytics-event="{&quot;category&quot;:&quot;Marketing nav&quot;,&quot;action&quot;:&quot;click to Sign in&quot;,&quot;label&quot;:&quot;ref_page:Marketing;ref_cta:Sign in;ref_loc:Header&quot;}" > Sign in </a> </div> </div> <div class="HeaderMenu js-header-menu height-fit position-lg-relative d-lg-flex flex-column flex-auto top-0"> <div class="HeaderMenu-wrapper d-flex flex-column flex-self-start flex-lg-row flex-auto rounded rounded-lg-0"> <nav class="HeaderMenu-nav" aria-label="Global"> <ul class="d-lg-flex list-style-none"> <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item"> <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false"> Product <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1"> <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path> </svg> </button> <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 pt-2 pt-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 pb-2 pb-lg-4 d-lg-flex flex-wrap dropdown-menu-wide"> <div class="HeaderMenu-column px-lg-4 border-lg-right mb-4 mb-lg-0 pr-lg-7"> <div class="border-bottom pb-3 pb-lg-0 border-lg-bottom-0"> <ul class="list-style-none f5" > <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;github_copilot&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;github_copilot_link_product_navbar&quot;}" href="https://github.com/features/copilot"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-copilot color-fg-subtle mr-3"> <path d="M23.922 16.992c-.861 1.495-5.859 5.023-11.922 5.023-6.063 0-11.061-3.528-11.922-5.023A.641.641 0 0 1 0 16.736v-2.869a.841.841 0 0 1 .053-.22c.372-.935 1.347-2.292 2.605-2.656.167-.429.414-1.055.644-1.517a10.195 10.195 0 0 1-.052-1.086c0-1.331.282-2.499 1.132-3.368.397-.406.89-.717 1.474-.952 1.399-1.136 3.392-2.093 6.122-2.093 2.731 0 4.767.957 6.166 2.093.584.235 1.077.546 1.474.952.85.869 1.132 2.037 1.132 3.368 0 .368-.014.733-.052 1.086.23.462.477 1.088.644 1.517 1.258.364 2.233 1.721 2.605 2.656a.832.832 0 0 1 .053.22v2.869a.641.641 0 0 1-.078.256ZM12.172 11h-.344a4.323 4.323 0 0 1-.355.508C10.703 12.455 9.555 13 7.965 13c-1.725 0-2.989-.359-3.782-1.259a2.005 2.005 0 0 1-.085-.104L4 11.741v6.585c1.435.779 4.514 2.179 8 2.179 3.486 0 6.565-1.4 8-2.179v-6.585l-.098-.104s-.033.045-.085.104c-.793.9-2.057 1.259-3.782 1.259-1.59 0-2.738-.545-3.508-1.492a4.323 4.323 0 0 1-.355-.508h-.016.016Zm.641-2.935c.136 1.057.403 1.913.878 2.497.442.544 1.134.938 2.344.938 1.573 0 2.292-.337 2.657-.751.384-.435.558-1.15.558-2.361 0-1.14-.243-1.847-.705-2.319-.477-.488-1.319-.862-2.824-1.025-1.487-.161-2.192.138-2.533.529-.269.307-.437.808-.438 1.578v.021c0 .265.021.562.063.893Zm-1.626 0c.042-.331.063-.628.063-.894v-.02c-.001-.77-.169-1.271-.438-1.578-.341-.391-1.046-.69-2.533-.529-1.505.163-2.347.537-2.824 1.025-.462.472-.705 1.179-.705 2.319 0 1.211.175 1.926.558 2.361.365.414 1.084.751 2.657.751 1.21 0 1.902-.394 2.344-.938.475-.584.742-1.44.878-2.497Z"></path><path d="M14.5 14.25a1 1 0 0 1 1 1v2a1 1 0 0 1-2 0v-2a1 1 0 0 1 1-1Zm-5 0a1 1 0 0 1 1 1v2a1 1 0 0 1-2 0v-2a1 1 0 0 1 1-1Z"></path> </svg> <div> <div class="color-fg-default h4">GitHub Copilot</div> Write better code with AI </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;security&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;security_link_product_navbar&quot;}" href="https://github.com/features/security"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-shield-check color-fg-subtle mr-3"> <path d="M16.53 9.78a.75.75 0 0 0-1.06-1.06L11 13.19l-1.97-1.97a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l5-5Z"></path><path d="m12.54.637 8.25 2.675A1.75 1.75 0 0 1 22 4.976V10c0 6.19-3.771 10.704-9.401 12.83a1.704 1.704 0 0 1-1.198 0C5.77 20.705 2 16.19 2 10V4.976c0-.758.489-1.43 1.21-1.664L11.46.637a1.748 1.748 0 0 1 1.08 0Zm-.617 1.426-8.25 2.676a.249.249 0 0 0-.173.237V10c0 5.46 3.28 9.483 8.43 11.426a.199.199 0 0 0 .14 0C17.22 19.483 20.5 15.461 20.5 10V4.976a.25.25 0 0 0-.173-.237l-8.25-2.676a.253.253 0 0 0-.154 0Z"></path> </svg> <div> <div class="color-fg-default h4">Security</div> Find and fix vulnerabilities </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;actions&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;actions_link_product_navbar&quot;}" href="https://github.com/features/actions"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-workflow color-fg-subtle mr-3"> <path d="M1 3a2 2 0 0 1 2-2h6.5a2 2 0 0 1 2 2v6.5a2 2 0 0 1-2 2H7v4.063C7 16.355 7.644 17 8.438 17H12.5v-2.5a2 2 0 0 1 2-2H21a2 2 0 0 1 2 2V21a2 2 0 0 1-2 2h-6.5a2 2 0 0 1-2-2v-2.5H8.437A2.939 2.939 0 0 1 5.5 15.562V11.5H3a2 2 0 0 1-2-2Zm2-.5a.5.5 0 0 0-.5.5v6.5a.5.5 0 0 0 .5.5h6.5a.5.5 0 0 0 .5-.5V3a.5.5 0 0 0-.5-.5ZM14.5 14a.5.5 0 0 0-.5.5V21a.5.5 0 0 0 .5.5H21a.5.5 0 0 0 .5-.5v-6.5a.5.5 0 0 0-.5-.5Z"></path> </svg> <div> <div class="color-fg-default h4">Actions</div> Automate any workflow </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;codespaces&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;codespaces_link_product_navbar&quot;}" href="https://github.com/features/codespaces"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-codespaces color-fg-subtle mr-3"> <path d="M3.5 3.75C3.5 2.784 4.284 2 5.25 2h13.5c.966 0 1.75.784 1.75 1.75v7.5A1.75 1.75 0 0 1 18.75 13H5.25a1.75 1.75 0 0 1-1.75-1.75Zm-2 12c0-.966.784-1.75 1.75-1.75h17.5c.966 0 1.75.784 1.75 1.75v4a1.75 1.75 0 0 1-1.75 1.75H3.25a1.75 1.75 0 0 1-1.75-1.75ZM5.25 3.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h13.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Zm-2 12a.25.25 0 0 0-.25.25v4c0 .138.112.25.25.25h17.5a.25.25 0 0 0 .25-.25v-4a.25.25 0 0 0-.25-.25Z"></path><path d="M10 17.75a.75.75 0 0 1 .75-.75h6.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1-.75-.75Zm-4 0a.75.75 0 0 1 .75-.75h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1-.75-.75Z"></path> </svg> <div> <div class="color-fg-default h4">Codespaces</div> Instant dev environments </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;issues&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;issues_link_product_navbar&quot;}" href="https://github.com/features/issues"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-issue-opened color-fg-subtle mr-3"> <path d="M12 1c6.075 0 11 4.925 11 11s-4.925 11-11 11S1 18.075 1 12 5.925 1 12 1ZM2.5 12a9.5 9.5 0 0 0 9.5 9.5 9.5 9.5 0 0 0 9.5-9.5A9.5 9.5 0 0 0 12 2.5 9.5 9.5 0 0 0 2.5 12Zm9.5 2a2 2 0 1 1-.001-3.999A2 2 0 0 1 12 14Z"></path> </svg> <div> <div class="color-fg-default h4">Issues</div> Plan and track work </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;code_review&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;code_review_link_product_navbar&quot;}" href="https://github.com/features/code-review"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-code-review color-fg-subtle mr-3"> <path d="M10.3 6.74a.75.75 0 0 1-.04 1.06l-2.908 2.7 2.908 2.7a.75.75 0 1 1-1.02 1.1l-3.5-3.25a.75.75 0 0 1 0-1.1l3.5-3.25a.75.75 0 0 1 1.06.04Zm3.44 1.06a.75.75 0 1 1 1.02-1.1l3.5 3.25a.75.75 0 0 1 0 1.1l-3.5 3.25a.75.75 0 1 1-1.02-1.1l2.908-2.7-2.908-2.7Z"></path><path d="M1.5 4.25c0-.966.784-1.75 1.75-1.75h17.5c.966 0 1.75.784 1.75 1.75v12.5a1.75 1.75 0 0 1-1.75 1.75h-9.69l-3.573 3.573A1.458 1.458 0 0 1 5 21.043V18.5H3.25a1.75 1.75 0 0 1-1.75-1.75ZM3.25 4a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h2.5a.75.75 0 0 1 .75.75v3.19l3.72-3.72a.749.749 0 0 1 .53-.22h10a.25.25 0 0 0 .25-.25V4.25a.25.25 0 0 0-.25-.25Z"></path> </svg> <div> <div class="color-fg-default h4">Code Review</div> Manage code changes </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;discussions&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;discussions_link_product_navbar&quot;}" href="https://github.com/features/discussions"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-comment-discussion color-fg-subtle mr-3"> <path d="M1.75 1h12.5c.966 0 1.75.784 1.75 1.75v9.5A1.75 1.75 0 0 1 14.25 14H8.061l-2.574 2.573A1.458 1.458 0 0 1 3 15.543V14H1.75A1.75 1.75 0 0 1 0 12.25v-9.5C0 1.784.784 1 1.75 1ZM1.5 2.75v9.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h6.5a.25.25 0 0 0 .25-.25v-9.5a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25Z"></path><path d="M22.5 8.75a.25.25 0 0 0-.25-.25h-3.5a.75.75 0 0 1 0-1.5h3.5c.966 0 1.75.784 1.75 1.75v9.5A1.75 1.75 0 0 1 22.25 20H21v1.543a1.457 1.457 0 0 1-2.487 1.03L15.939 20H10.75A1.75 1.75 0 0 1 9 18.25v-1.465a.75.75 0 0 1 1.5 0v1.465c0 .138.112.25.25.25h5.5a.75.75 0 0 1 .53.22l2.72 2.72v-2.19a.75.75 0 0 1 .75-.75h2a.25.25 0 0 0 .25-.25v-9.5Z"></path> </svg> <div> <div class="color-fg-default h4">Discussions</div> Collaborate outside of code </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;code_search&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;code_search_link_product_navbar&quot;}" href="https://github.com/features/code-search"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-code-square color-fg-subtle mr-3"> <path d="M10.3 8.24a.75.75 0 0 1-.04 1.06L7.352 12l2.908 2.7a.75.75 0 1 1-1.02 1.1l-3.5-3.25a.75.75 0 0 1 0-1.1l3.5-3.25a.75.75 0 0 1 1.06.04Zm3.44 1.06a.75.75 0 1 1 1.02-1.1l3.5 3.25a.75.75 0 0 1 0 1.1l-3.5 3.25a.75.75 0 1 1-1.02-1.1l2.908-2.7-2.908-2.7Z"></path><path d="M2 3.75C2 2.784 2.784 2 3.75 2h16.5c.966 0 1.75.784 1.75 1.75v16.5A1.75 1.75 0 0 1 20.25 22H3.75A1.75 1.75 0 0 1 2 20.25Zm1.75-.25a.25.25 0 0 0-.25.25v16.5c0 .138.112.25.25.25h16.5a.25.25 0 0 0 .25-.25V3.75a.25.25 0 0 0-.25-.25Z"></path> </svg> <div> <div class="color-fg-default h4">Code Search</div> Find more, search less </div> </a></li> </ul> </div> </div> <div class="HeaderMenu-column px-lg-4"> <div class="border-bottom pb-3 pb-lg-0 border-lg-bottom-0 border-bottom-0"> <span class="d-block h4 color-fg-default my-1" id="product-explore-heading">Explore</span> <ul class="list-style-none f5" aria-labelledby="product-explore-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;all_features&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;all_features_link_product_navbar&quot;}" href="https://github.com/features"> All features </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary Link--external" target="_blank" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;documentation&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;documentation_link_product_navbar&quot;}" href="https://docs.github.com"> Documentation <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle"> <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path> </svg> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary Link--external" target="_blank" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;github_skills&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;github_skills_link_product_navbar&quot;}" href="https://skills.github.com"> GitHub Skills <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle"> <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path> </svg> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary Link--external" target="_blank" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;blog&quot;,&quot;context&quot;:&quot;product&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;blog_link_product_navbar&quot;}" href="https://github.blog"> Blog <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle"> <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path> </svg> </a></li> </ul> </div> </div> </div> </li> <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item"> <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false"> Solutions <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1"> <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path> </svg> </button> <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 pt-2 pt-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 d-lg-flex flex-wrap dropdown-menu-wide"> <div class="HeaderMenu-column px-lg-4 border-lg-right mb-4 mb-lg-0 pr-lg-7"> <div class="border-bottom pb-3 pb-lg-0 border-lg-bottom-0 pb-lg-3 mb-3 mb-lg-0"> <span class="d-block h4 color-fg-default my-1" id="solutions-by-company-size-heading">By company size</span> <ul class="list-style-none f5" aria-labelledby="solutions-by-company-size-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;enterprises&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;enterprises_link_solutions_navbar&quot;}" href="https://github.com/enterprise"> Enterprises </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;small_and_medium_teams&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;small_and_medium_teams_link_solutions_navbar&quot;}" href="https://github.com/team"> Small and medium teams </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;startups&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;startups_link_solutions_navbar&quot;}" href="https://github.com/enterprise/startups"> Startups </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;nonprofits&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;nonprofits_link_solutions_navbar&quot;}" href="/solutions/industry/nonprofits"> Nonprofits </a></li> </ul> </div> <div class="border-bottom pb-3 pb-lg-0 border-lg-bottom-0"> <span class="d-block h4 color-fg-default my-1" id="solutions-by-use-case-heading">By use case</span> <ul class="list-style-none f5" aria-labelledby="solutions-by-use-case-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;devsecops&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;devsecops_link_solutions_navbar&quot;}" href="/solutions/use-case/devsecops"> DevSecOps </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;devops&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;devops_link_solutions_navbar&quot;}" href="/solutions/use-case/devops"> DevOps </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;ci_cd&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;ci_cd_link_solutions_navbar&quot;}" href="/solutions/use-case/ci-cd"> CI/CD </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;view_all_use_cases&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;view_all_use_cases_link_solutions_navbar&quot;}" href="/solutions/use-case"> View all use cases </a></li> </ul> </div> </div> <div class="HeaderMenu-column px-lg-4"> <div class="border-bottom pb-3 pb-lg-0 border-lg-bottom-0"> <span class="d-block h4 color-fg-default my-1" id="solutions-by-industry-heading">By industry</span> <ul class="list-style-none f5" aria-labelledby="solutions-by-industry-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;healthcare&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;healthcare_link_solutions_navbar&quot;}" href="/solutions/industry/healthcare"> Healthcare </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;financial_services&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;financial_services_link_solutions_navbar&quot;}" href="/solutions/industry/financial-services"> Financial services </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;manufacturing&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;manufacturing_link_solutions_navbar&quot;}" href="/solutions/industry/manufacturing"> Manufacturing </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;government&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;government_link_solutions_navbar&quot;}" href="/solutions/industry/government"> Government </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;view_all_industries&quot;,&quot;context&quot;:&quot;solutions&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;view_all_industries_link_solutions_navbar&quot;}" href="/solutions/industry"> View all industries </a></li> </ul> </div> </div> <div class="HeaderMenu-trailing-link rounded-bottom-2 flex-shrink-0 mt-lg-4 px-lg-4 py-4 py-lg-3 f5 text-semibold"> <a href="/solutions"> View all solutions <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-right HeaderMenu-trailing-link-icon"> <path d="M6.22 3.22a.75.75 0 0 1 1.06 0l4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L9.94 8 6.22 4.28a.75.75 0 0 1 0-1.06Z"></path> </svg> </a> </div> </div> </li> <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item"> <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false"> Resources <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1"> <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path> </svg> </button> <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 pt-2 pt-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 pb-2 pb-lg-4 d-lg-flex flex-wrap dropdown-menu-wide"> <div class="HeaderMenu-column px-lg-4 border-lg-right mb-4 mb-lg-0 pr-lg-7"> <div class="border-bottom pb-3 pb-lg-0 border-lg-bottom-0"> <span class="d-block h4 color-fg-default my-1" id="resources-topics-heading">Topics</span> <ul class="list-style-none f5" aria-labelledby="resources-topics-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;ai&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;ai_link_resources_navbar&quot;}" href="/resources/articles/ai"> AI </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;devops&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;devops_link_resources_navbar&quot;}" href="/resources/articles/devops"> DevOps </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;security&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;security_link_resources_navbar&quot;}" href="/resources/articles/security"> Security </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;software_development&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;software_development_link_resources_navbar&quot;}" href="/resources/articles/software-development"> Software Development </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;view_all&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;view_all_link_resources_navbar&quot;}" href="/resources/articles"> View all </a></li> </ul> </div> </div> <div class="HeaderMenu-column px-lg-4"> <div class="border-bottom pb-3 pb-lg-0 border-lg-bottom-0 border-bottom-0"> <span class="d-block h4 color-fg-default my-1" id="resources-explore-heading">Explore</span> <ul class="list-style-none f5" aria-labelledby="resources-explore-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary Link--external" target="_blank" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;learning_pathways&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;learning_pathways_link_resources_navbar&quot;}" href="https://resources.github.com/learn/pathways"> Learning Pathways <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle"> <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path> </svg> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary Link--external" target="_blank" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;events_amp_webinars&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;events_amp_webinars_link_resources_navbar&quot;}" href="https://resources.github.com"> Events &amp; Webinars <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle"> <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path> </svg> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;ebooks_amp_whitepapers&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;ebooks_amp_whitepapers_link_resources_navbar&quot;}" href="https://github.com/resources/whitepapers"> Ebooks &amp; Whitepapers </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;customer_stories&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;customer_stories_link_resources_navbar&quot;}" href="https://github.com/customer-stories"> Customer Stories </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary Link--external" target="_blank" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;partners&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;partners_link_resources_navbar&quot;}" href="https://partner.github.com"> Partners <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle"> <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path> </svg> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;executive_insights&quot;,&quot;context&quot;:&quot;resources&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;executive_insights_link_resources_navbar&quot;}" href="https://github.com/solutions/executive-insights"> Executive Insights </a></li> </ul> </div> </div> </div> </li> <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item"> <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false"> Open Source <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1"> <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path> </svg> </button> <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 pt-2 pt-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 pb-2 pb-lg-4 px-lg-4"> <div class="HeaderMenu-column"> <div class="border-bottom pb-3 pb-lg-0 pb-lg-3 mb-3 mb-lg-0 mb-lg-3"> <ul class="list-style-none f5" > <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;github_sponsors&quot;,&quot;context&quot;:&quot;open_source&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;github_sponsors_link_open_source_navbar&quot;}" href="/sponsors"> <div> <div class="color-fg-default h4">GitHub Sponsors</div> Fund open source developers </div> </a></li> </ul> </div> <div class="border-bottom pb-3 pb-lg-0 pb-lg-3 mb-3 mb-lg-0 mb-lg-3"> <ul class="list-style-none f5" > <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;the_readme_project&quot;,&quot;context&quot;:&quot;open_source&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;the_readme_project_link_open_source_navbar&quot;}" href="https://github.com/readme"> <div> <div class="color-fg-default h4">The ReadME Project</div> GitHub community articles </div> </a></li> </ul> </div> <div class="border-bottom pb-3 pb-lg-0 border-bottom-0"> <span class="d-block h4 color-fg-default my-1" id="open-source-repositories-heading">Repositories</span> <ul class="list-style-none f5" aria-labelledby="open-source-repositories-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;topics&quot;,&quot;context&quot;:&quot;open_source&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;topics_link_open_source_navbar&quot;}" href="https://github.com/topics"> Topics </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;trending&quot;,&quot;context&quot;:&quot;open_source&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;trending_link_open_source_navbar&quot;}" href="https://github.com/trending"> Trending </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;collections&quot;,&quot;context&quot;:&quot;open_source&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;collections_link_open_source_navbar&quot;}" href="https://github.com/collections"> Collections </a></li> </ul> </div> </div> </div> </li> <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item"> <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false"> Enterprise <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1"> <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path> </svg> </button> <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 pt-2 pt-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 pb-2 pb-lg-4 px-lg-4"> <div class="HeaderMenu-column"> <div class="border-bottom pb-3 pb-lg-0 pb-lg-3 mb-3 mb-lg-0 mb-lg-3"> <ul class="list-style-none f5" > <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;enterprise_platform&quot;,&quot;context&quot;:&quot;enterprise&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;enterprise_platform_link_enterprise_navbar&quot;}" href="/enterprise"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-stack color-fg-subtle mr-3"> <path d="M11.063 1.456a1.749 1.749 0 0 1 1.874 0l8.383 5.316a1.751 1.751 0 0 1 0 2.956l-8.383 5.316a1.749 1.749 0 0 1-1.874 0L2.68 9.728a1.751 1.751 0 0 1 0-2.956Zm1.071 1.267a.25.25 0 0 0-.268 0L3.483 8.039a.25.25 0 0 0 0 .422l8.383 5.316a.25.25 0 0 0 .268 0l8.383-5.316a.25.25 0 0 0 0-.422Z"></path><path d="M1.867 12.324a.75.75 0 0 1 1.035-.232l8.964 5.685a.25.25 0 0 0 .268 0l8.964-5.685a.75.75 0 0 1 .804 1.267l-8.965 5.685a1.749 1.749 0 0 1-1.874 0l-8.965-5.685a.75.75 0 0 1-.231-1.035Z"></path><path d="M1.867 16.324a.75.75 0 0 1 1.035-.232l8.964 5.685a.25.25 0 0 0 .268 0l8.964-5.685a.75.75 0 0 1 .804 1.267l-8.965 5.685a1.749 1.749 0 0 1-1.874 0l-8.965-5.685a.75.75 0 0 1-.231-1.035Z"></path> </svg> <div> <div class="color-fg-default h4">Enterprise platform</div> AI-powered developer platform </div> </a></li> </ul> </div> <div class="border-bottom pb-3 pb-lg-0 border-bottom-0"> <span class="d-block h4 color-fg-default my-1" id="enterprise-available-add-ons-heading">Available add-ons</span> <ul class="list-style-none f5" aria-labelledby="enterprise-available-add-ons-heading"> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;advanced_security&quot;,&quot;context&quot;:&quot;enterprise&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;advanced_security_link_enterprise_navbar&quot;}" href="https://github.com/enterprise/advanced-security"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-shield-check color-fg-subtle mr-3"> <path d="M16.53 9.78a.75.75 0 0 0-1.06-1.06L11 13.19l-1.97-1.97a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l5-5Z"></path><path d="m12.54.637 8.25 2.675A1.75 1.75 0 0 1 22 4.976V10c0 6.19-3.771 10.704-9.401 12.83a1.704 1.704 0 0 1-1.198 0C5.77 20.705 2 16.19 2 10V4.976c0-.758.489-1.43 1.21-1.664L11.46.637a1.748 1.748 0 0 1 1.08 0Zm-.617 1.426-8.25 2.676a.249.249 0 0 0-.173.237V10c0 5.46 3.28 9.483 8.43 11.426a.199.199 0 0 0 .14 0C17.22 19.483 20.5 15.461 20.5 10V4.976a.25.25 0 0 0-.173-.237l-8.25-2.676a.253.253 0 0 0-.154 0Z"></path> </svg> <div> <div class="color-fg-default h4">Advanced Security</div> Enterprise-grade security features </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description pb-lg-3" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;copilot_for_business&quot;,&quot;context&quot;:&quot;enterprise&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;copilot_for_business_link_enterprise_navbar&quot;}" href="/features/copilot/copilot-business"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-copilot color-fg-subtle mr-3"> <path d="M23.922 16.992c-.861 1.495-5.859 5.023-11.922 5.023-6.063 0-11.061-3.528-11.922-5.023A.641.641 0 0 1 0 16.736v-2.869a.841.841 0 0 1 .053-.22c.372-.935 1.347-2.292 2.605-2.656.167-.429.414-1.055.644-1.517a10.195 10.195 0 0 1-.052-1.086c0-1.331.282-2.499 1.132-3.368.397-.406.89-.717 1.474-.952 1.399-1.136 3.392-2.093 6.122-2.093 2.731 0 4.767.957 6.166 2.093.584.235 1.077.546 1.474.952.85.869 1.132 2.037 1.132 3.368 0 .368-.014.733-.052 1.086.23.462.477 1.088.644 1.517 1.258.364 2.233 1.721 2.605 2.656a.832.832 0 0 1 .053.22v2.869a.641.641 0 0 1-.078.256ZM12.172 11h-.344a4.323 4.323 0 0 1-.355.508C10.703 12.455 9.555 13 7.965 13c-1.725 0-2.989-.359-3.782-1.259a2.005 2.005 0 0 1-.085-.104L4 11.741v6.585c1.435.779 4.514 2.179 8 2.179 3.486 0 6.565-1.4 8-2.179v-6.585l-.098-.104s-.033.045-.085.104c-.793.9-2.057 1.259-3.782 1.259-1.59 0-2.738-.545-3.508-1.492a4.323 4.323 0 0 1-.355-.508h-.016.016Zm.641-2.935c.136 1.057.403 1.913.878 2.497.442.544 1.134.938 2.344.938 1.573 0 2.292-.337 2.657-.751.384-.435.558-1.15.558-2.361 0-1.14-.243-1.847-.705-2.319-.477-.488-1.319-.862-2.824-1.025-1.487-.161-2.192.138-2.533.529-.269.307-.437.808-.438 1.578v.021c0 .265.021.562.063.893Zm-1.626 0c.042-.331.063-.628.063-.894v-.02c-.001-.77-.169-1.271-.438-1.578-.341-.391-1.046-.69-2.533-.529-1.505.163-2.347.537-2.824 1.025-.462.472-.705 1.179-.705 2.319 0 1.211.175 1.926.558 2.361.365.414 1.084.751 2.657.751 1.21 0 1.902-.394 2.344-.938.475-.584.742-1.44.878-2.497Z"></path><path d="M14.5 14.25a1 1 0 0 1 1 1v2a1 1 0 0 1-2 0v-2a1 1 0 0 1 1-1Zm-5 0a1 1 0 0 1 1 1v2a1 1 0 0 1-2 0v-2a1 1 0 0 1 1-1Z"></path> </svg> <div> <div class="color-fg-default h4">Copilot for business</div> Enterprise-grade AI features </div> </a></li> <li> <a class="HeaderMenu-dropdown-link d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center Link--has-description" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;premium_support&quot;,&quot;context&quot;:&quot;enterprise&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;premium_support_link_enterprise_navbar&quot;}" href="/premium-support"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-comment-discussion color-fg-subtle mr-3"> <path d="M1.75 1h12.5c.966 0 1.75.784 1.75 1.75v9.5A1.75 1.75 0 0 1 14.25 14H8.061l-2.574 2.573A1.458 1.458 0 0 1 3 15.543V14H1.75A1.75 1.75 0 0 1 0 12.25v-9.5C0 1.784.784 1 1.75 1ZM1.5 2.75v9.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h6.5a.25.25 0 0 0 .25-.25v-9.5a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25Z"></path><path d="M22.5 8.75a.25.25 0 0 0-.25-.25h-3.5a.75.75 0 0 1 0-1.5h3.5c.966 0 1.75.784 1.75 1.75v9.5A1.75 1.75 0 0 1 22.25 20H21v1.543a1.457 1.457 0 0 1-2.487 1.03L15.939 20H10.75A1.75 1.75 0 0 1 9 18.25v-1.465a.75.75 0 0 1 1.5 0v1.465c0 .138.112.25.25.25h5.5a.75.75 0 0 1 .53.22l2.72 2.72v-2.19a.75.75 0 0 1 .75-.75h2a.25.25 0 0 0 .25-.25v-9.5Z"></path> </svg> <div> <div class="color-fg-default h4">Premium Support</div> Enterprise-grade 24/7 support </div> </a></li> </ul> </div> </div> </div> </li> <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item"> <a class="HeaderMenu-link no-underline px-0 px-lg-2 py-3 py-lg-2 d-block d-lg-inline-block" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;pricing&quot;,&quot;context&quot;:&quot;global&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;pricing_link_global_navbar&quot;}" href="https://github.com/pricing">Pricing</a> </li> </ul> </nav> <div class="d-flex flex-column flex-lg-row width-full flex-justify-end flex-lg-items-center text-center mt-3 mt-lg-0 text-lg-left ml-lg-3"> <qbsearch-input class="search-input" data-scope="repo:ymcui/Chinese-LLaMA-Alpaca-2" data-custom-scopes-path="/search/custom_scopes" data-delete-custom-scopes-csrf="wTjxOJGkG0fERl-a0b0Kno5oO0kijJM_DUUeslptdWIgGF3D5lmMs8NnaH4RMhnpGxULqNb1ZiExV07OFWQWEw" data-max-custom-scopes="10" data-header-redesign-enabled="false" data-initial-value="" data-blackbird-suggestions-path="/search/suggestions" data-jump-to-suggestions-path="/_graphql/GetSuggestedNavigationDestinations" data-current-repository="ymcui/Chinese-LLaMA-Alpaca-2" data-current-org="" data-current-owner="ymcui" data-logged-in="false" data-copilot-chat-enabled="false" data-nl-search-enabled="false" data-retain-scroll-position="true"> <div class="search-input-container search-with-dialog position-relative d-flex flex-row flex-items-center mr-4 rounded" data-action="click:qbsearch-input#searchInputContainerClicked" > <button type="button" class="header-search-button placeholder input-button form-control d-flex flex-1 flex-self-stretch flex-items-center no-wrap width-full py-0 pl-2 pr-0 text-left border-0 box-shadow-none" data-target="qbsearch-input.inputButton" aria-label="Search or jump to…" aria-haspopup="dialog" placeholder="Search or jump to..." data-hotkey=s,/ autocapitalize="off" data-analytics-event="{&quot;location&quot;:&quot;navbar&quot;,&quot;action&quot;:&quot;searchbar&quot;,&quot;context&quot;:&quot;global&quot;,&quot;tag&quot;:&quot;input&quot;,&quot;label&quot;:&quot;searchbar_input_global_navbar&quot;}" data-action="click:qbsearch-input#handleExpand" > <div class="mr-2 color-fg-muted"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search"> <path d="M10.68 11.74a6 6 0 0 1-7.922-8.982 6 6 0 0 1 8.982 7.922l3.04 3.04a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215ZM11.5 7a4.499 4.499 0 1 0-8.997 0A4.499 4.499 0 0 0 11.5 7Z"></path> </svg> </div> <span class="flex-1" data-target="qbsearch-input.inputButtonText">Search or jump to...</span> <div class="d-flex" data-target="qbsearch-input.hotkeyIndicator"> <svg xmlns="http://www.w3.org/2000/svg" width="22" height="20" aria-hidden="true" class="mr-1"><path fill="none" stroke="#979A9C" opacity=".4" d="M3.5.5h12c1.7 0 3 1.3 3 3v13c0 1.7-1.3 3-3 3h-12c-1.7 0-3-1.3-3-3v-13c0-1.7 1.3-3 3-3z"></path><path fill="#979A9C" d="M11.8 6L8 15.1h-.9L10.8 6h1z"></path></svg> </div> </button> <input type="hidden" name="type" class="js-site-search-type-field"> <div class="Overlay--hidden " data-modal-dialog-overlay> <modal-dialog data-action="close:qbsearch-input#handleClose cancel:qbsearch-input#handleClose" data-target="qbsearch-input.searchSuggestionsDialog" role="dialog" id="search-suggestions-dialog" aria-modal="true" aria-labelledby="search-suggestions-dialog-header" data-view-component="true" class="Overlay Overlay--width-large Overlay--height-auto"> <h1 id="search-suggestions-dialog-header" class="sr-only">Search code, repositories, users, issues, pull requests...</h1> <div class="Overlay-body Overlay-body--paddingNone"> <div data-view-component="true"> <div class="search-suggestions position-fixed width-full color-shadow-large border color-fg-default color-bg-default overflow-hidden d-flex flex-column query-builder-container" style="border-radius: 12px;" data-target="qbsearch-input.queryBuilderContainer" hidden > <!-- '"` --><!-- </textarea></xmp> --></option></form><form id="query-builder-test-form" action="" accept-charset="UTF-8" method="get"> <query-builder data-target="qbsearch-input.queryBuilder" id="query-builder-query-builder-test" data-filter-key=":" data-view-component="true" class="QueryBuilder search-query-builder"> <div class="FormControl FormControl--fullWidth"> <label id="query-builder-test-label" for="query-builder-test" class="FormControl-label sr-only"> Search </label> <div class="QueryBuilder-StyledInput width-fit " data-target="query-builder.styledInput" > <span id="query-builder-test-leadingvisual-wrap" class="FormControl-input-leadingVisualWrap QueryBuilder-leadingVisualWrap"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search FormControl-input-leadingVisual"> <path d="M10.68 11.74a6 6 0 0 1-7.922-8.982 6 6 0 0 1 8.982 7.922l3.04 3.04a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215ZM11.5 7a4.499 4.499 0 1 0-8.997 0A4.499 4.499 0 0 0 11.5 7Z"></path> </svg> </span> <div data-target="query-builder.styledInputContainer" class="QueryBuilder-StyledInputContainer"> <div aria-hidden="true" class="QueryBuilder-StyledInputContent" data-target="query-builder.styledInputContent" ></div> <div class="QueryBuilder-InputWrapper"> <div aria-hidden="true" class="QueryBuilder-Sizer" data-target="query-builder.sizer"></div> <input id="query-builder-test" name="query-builder-test" value="" autocomplete="off" type="text" role="combobox" spellcheck="false" aria-expanded="false" aria-describedby="validation-54ded3be-8fdb-4580-86b0-4948ae2b30fd" data-target="query-builder.input" data-action=" input:query-builder#inputChange blur:query-builder#inputBlur keydown:query-builder#inputKeydown focus:query-builder#inputFocus " data-view-component="true" class="FormControl-input QueryBuilder-Input FormControl-medium" /> </div> </div> <span class="sr-only" id="query-builder-test-clear">Clear</span> <button role="button" id="query-builder-test-clear-button" aria-labelledby="query-builder-test-clear query-builder-test-label" data-target="query-builder.clearButton" data-action=" click:query-builder#clear focus:query-builder#clearButtonFocus blur:query-builder#clearButtonBlur " variant="small" hidden="hidden" type="button" data-view-component="true" class="Button Button--iconOnly Button--invisible Button--medium mr-1 px-2 py-0 d-flex flex-items-center rounded-1 color-fg-muted"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x-circle-fill Button-visual"> <path d="M2.343 13.657A8 8 0 1 1 13.658 2.343 8 8 0 0 1 2.343 13.657ZM6.03 4.97a.751.751 0 0 0-1.042.018.751.751 0 0 0-.018 1.042L6.94 8 4.97 9.97a.749.749 0 0 0 .326 1.275.749.749 0 0 0 .734-.215L8 9.06l1.97 1.97a.749.749 0 0 0 1.275-.326.749.749 0 0 0-.215-.734L9.06 8l1.97-1.97a.749.749 0 0 0-.326-1.275.749.749 0 0 0-.734.215L8 6.94Z"></path> </svg> </button> </div> <template id="search-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search"> <path d="M10.68 11.74a6 6 0 0 1-7.922-8.982 6 6 0 0 1 8.982 7.922l3.04 3.04a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215ZM11.5 7a4.499 4.499 0 1 0-8.997 0A4.499 4.499 0 0 0 11.5 7Z"></path> </svg> </template> <template id="code-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code"> <path d="m11.28 3.22 4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L13.94 8l-3.72-3.72a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215Zm-6.56 0a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042L2.06 8l3.72 3.72a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L.47 8.53a.75.75 0 0 1 0-1.06Z"></path> </svg> </template> <template id="file-code-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-file-code"> <path d="M4 1.75C4 .784 4.784 0 5.75 0h5.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v8.586A1.75 1.75 0 0 1 14.25 15h-9a.75.75 0 0 1 0-1.5h9a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 10 4.25V1.5H5.75a.25.25 0 0 0-.25.25v2.5a.75.75 0 0 1-1.5 0Zm1.72 4.97a.75.75 0 0 1 1.06 0l2 2a.75.75 0 0 1 0 1.06l-2 2a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734l1.47-1.47-1.47-1.47a.75.75 0 0 1 0-1.06ZM3.28 7.78 1.81 9.25l1.47 1.47a.751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018l-2-2a.75.75 0 0 1 0-1.06l2-2a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Zm8.22-6.218V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path> </svg> </template> <template id="history-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-history"> <path d="m.427 1.927 1.215 1.215a8.002 8.002 0 1 1-1.6 5.685.75.75 0 1 1 1.493-.154 6.5 6.5 0 1 0 1.18-4.458l1.358 1.358A.25.25 0 0 1 3.896 6H.25A.25.25 0 0 1 0 5.75V2.104a.25.25 0 0 1 .427-.177ZM7.75 4a.75.75 0 0 1 .75.75v2.992l2.028.812a.75.75 0 0 1-.557 1.392l-2.5-1A.751.751 0 0 1 7 8.25v-3.5A.75.75 0 0 1 7.75 4Z"></path> </svg> </template> <template id="repo-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo"> <path d="M2 2.5A2.5 2.5 0 0 1 4.5 0h8.75a.75.75 0 0 1 .75.75v12.5a.75.75 0 0 1-.75.75h-2.5a.75.75 0 0 1 0-1.5h1.75v-2h-8a1 1 0 0 0-.714 1.7.75.75 0 1 1-1.072 1.05A2.495 2.495 0 0 1 2 11.5Zm10.5-1h-8a1 1 0 0 0-1 1v6.708A2.486 2.486 0 0 1 4.5 9h8ZM5 12.25a.25.25 0 0 1 .25-.25h3.5a.25.25 0 0 1 .25.25v3.25a.25.25 0 0 1-.4.2l-1.45-1.087a.249.249 0 0 0-.3 0L5.4 15.7a.25.25 0 0 1-.4-.2Z"></path> </svg> </template> <template id="bookmark-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bookmark"> <path d="M3 2.75C3 1.784 3.784 1 4.75 1h6.5c.966 0 1.75.784 1.75 1.75v11.5a.75.75 0 0 1-1.227.579L8 11.722l-3.773 3.107A.751.751 0 0 1 3 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v9.91l3.023-2.489a.75.75 0 0 1 .954 0l3.023 2.49V2.75a.25.25 0 0 0-.25-.25Z"></path> </svg> </template> <template id="plus-circle-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-plus-circle"> <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm7.25-3.25v2.5h2.5a.75.75 0 0 1 0 1.5h-2.5v2.5a.75.75 0 0 1-1.5 0v-2.5h-2.5a.75.75 0 0 1 0-1.5h2.5v-2.5a.75.75 0 0 1 1.5 0Z"></path> </svg> </template> <template id="circle-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill"> <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path> </svg> </template> <template id="trash-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-trash"> <path d="M11 1.75V3h2.25a.75.75 0 0 1 0 1.5H2.75a.75.75 0 0 1 0-1.5H5V1.75C5 .784 5.784 0 6.75 0h2.5C10.216 0 11 .784 11 1.75ZM4.496 6.675l.66 6.6a.25.25 0 0 0 .249.225h5.19a.25.25 0 0 0 .249-.225l.66-6.6a.75.75 0 0 1 1.492.149l-.66 6.6A1.748 1.748 0 0 1 10.595 15h-5.19a1.75 1.75 0 0 1-1.741-1.575l-.66-6.6a.75.75 0 1 1 1.492-.15ZM6.5 1.75V3h3V1.75a.25.25 0 0 0-.25-.25h-2.5a.25.25 0 0 0-.25.25Z"></path> </svg> </template> <template id="team-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-people"> <path d="M2 5.5a3.5 3.5 0 1 1 5.898 2.549 5.508 5.508 0 0 1 3.034 4.084.75.75 0 1 1-1.482.235 4 4 0 0 0-7.9 0 .75.75 0 0 1-1.482-.236A5.507 5.507 0 0 1 3.102 8.05 3.493 3.493 0 0 1 2 5.5ZM11 4a3.001 3.001 0 0 1 2.22 5.018 5.01 5.01 0 0 1 2.56 3.012.749.749 0 0 1-.885.954.752.752 0 0 1-.549-.514 3.507 3.507 0 0 0-2.522-2.372.75.75 0 0 1-.574-.73v-.352a.75.75 0 0 1 .416-.672A1.5 1.5 0 0 0 11 5.5.75.75 0 0 1 11 4Zm-5.5-.5a2 2 0 1 0-.001 3.999A2 2 0 0 0 5.5 3.5Z"></path> </svg> </template> <template id="project-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project"> <path d="M1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25V1.75C0 .784.784 0 1.75 0ZM1.5 1.75v12.5c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25ZM11.75 3a.75.75 0 0 1 .75.75v7.5a.75.75 0 0 1-1.5 0v-7.5a.75.75 0 0 1 .75-.75Zm-8.25.75a.75.75 0 0 1 1.5 0v5.5a.75.75 0 0 1-1.5 0ZM8 3a.75.75 0 0 1 .75.75v3.5a.75.75 0 0 1-1.5 0v-3.5A.75.75 0 0 1 8 3Z"></path> </svg> </template> <template id="pencil-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pencil"> <path d="M11.013 1.427a1.75 1.75 0 0 1 2.474 0l1.086 1.086a1.75 1.75 0 0 1 0 2.474l-8.61 8.61c-.21.21-.47.364-.756.445l-3.251.93a.75.75 0 0 1-.927-.928l.929-3.25c.081-.286.235-.547.445-.758l8.61-8.61Zm.176 4.823L9.75 4.81l-6.286 6.287a.253.253 0 0 0-.064.108l-.558 1.953 1.953-.558a.253.253 0 0 0 .108-.064Zm1.238-3.763a.25.25 0 0 0-.354 0L10.811 3.75l1.439 1.44 1.263-1.263a.25.25 0 0 0 0-.354Z"></path> </svg> </template> <template id="copilot-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copilot"> <path d="M7.998 15.035c-4.562 0-7.873-2.914-7.998-3.749V9.338c.085-.628.677-1.686 1.588-2.065.013-.07.024-.143.036-.218.029-.183.06-.384.126-.612-.201-.508-.254-1.084-.254-1.656 0-.87.128-1.769.693-2.484.579-.733 1.494-1.124 2.724-1.261 1.206-.134 2.262.034 2.944.765.05.053.096.108.139.165.044-.057.094-.112.143-.165.682-.731 1.738-.899 2.944-.765 1.23.137 2.145.528 2.724 1.261.566.715.693 1.614.693 2.484 0 .572-.053 1.148-.254 1.656.066.228.098.429.126.612.012.076.024.148.037.218.924.385 1.522 1.471 1.591 2.095v1.872c0 .766-3.351 3.795-8.002 3.795Zm0-1.485c2.28 0 4.584-1.11 5.002-1.433V7.862l-.023-.116c-.49.21-1.075.291-1.727.291-1.146 0-2.059-.327-2.71-.991A3.222 3.222 0 0 1 8 6.303a3.24 3.24 0 0 1-.544.743c-.65.664-1.563.991-2.71.991-.652 0-1.236-.081-1.727-.291l-.023.116v4.255c.419.323 2.722 1.433 5.002 1.433ZM6.762 2.83c-.193-.206-.637-.413-1.682-.297-1.019.113-1.479.404-1.713.7-.247.312-.369.789-.369 1.554 0 .793.129 1.171.308 1.371.162.181.519.379 1.442.379.853 0 1.339-.235 1.638-.54.315-.322.527-.827.617-1.553.117-.935-.037-1.395-.241-1.614Zm4.155-.297c-1.044-.116-1.488.091-1.681.297-.204.219-.359.679-.242 1.614.091.726.303 1.231.618 1.553.299.305.784.54 1.638.54.922 0 1.28-.198 1.442-.379.179-.2.308-.578.308-1.371 0-.765-.123-1.242-.37-1.554-.233-.296-.693-.587-1.713-.7Z"></path><path d="M6.25 9.037a.75.75 0 0 1 .75.75v1.501a.75.75 0 0 1-1.5 0V9.787a.75.75 0 0 1 .75-.75Zm4.25.75v1.501a.75.75 0 0 1-1.5 0V9.787a.75.75 0 0 1 1.5 0Z"></path> </svg> </template> <template id="copilot-error-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copilot-error"> <path d="M16 11.24c0 .112-.072.274-.21.467L13 9.688V7.862l-.023-.116c-.49.21-1.075.291-1.727.291-.198 0-.388-.009-.571-.029L6.833 5.226a4.01 4.01 0 0 0 .17-.782c.117-.935-.037-1.395-.241-1.614-.193-.206-.637-.413-1.682-.297-.683.076-1.115.231-1.395.415l-1.257-.91c.579-.564 1.413-.877 2.485-.996 1.206-.134 2.262.034 2.944.765.05.053.096.108.139.165.044-.057.094-.112.143-.165.682-.731 1.738-.899 2.944-.765 1.23.137 2.145.528 2.724 1.261.566.715.693 1.614.693 2.484 0 .572-.053 1.148-.254 1.656.066.228.098.429.126.612.012.076.024.148.037.218.924.385 1.522 1.471 1.591 2.095Zm-5.083-8.707c-1.044-.116-1.488.091-1.681.297-.204.219-.359.679-.242 1.614.091.726.303 1.231.618 1.553.299.305.784.54 1.638.54.922 0 1.28-.198 1.442-.379.179-.2.308-.578.308-1.371 0-.765-.123-1.242-.37-1.554-.233-.296-.693-.587-1.713-.7Zm2.511 11.074c-1.393.776-3.272 1.428-5.43 1.428-4.562 0-7.873-2.914-7.998-3.749V9.338c.085-.628.677-1.686 1.588-2.065.013-.07.024-.143.036-.218.029-.183.06-.384.126-.612-.18-.455-.241-.963-.252-1.475L.31 4.107A.747.747 0 0 1 0 3.509V3.49a.748.748 0 0 1 .625-.73c.156-.026.306.047.435.139l14.667 10.578a.592.592 0 0 1 .227.264.752.752 0 0 1 .046.249v.022a.75.75 0 0 1-1.19.596Zm-1.367-.991L5.635 7.964a5.128 5.128 0 0 1-.889.073c-.652 0-1.236-.081-1.727-.291l-.023.116v4.255c.419.323 2.722 1.433 5.002 1.433 1.539 0 3.089-.505 4.063-.934Z"></path> </svg> </template> <template id="workflow-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-workflow"> <path d="M0 1.75C0 .784.784 0 1.75 0h3.5C6.216 0 7 .784 7 1.75v3.5A1.75 1.75 0 0 1 5.25 7H4v4a1 1 0 0 0 1 1h4v-1.25C9 9.784 9.784 9 10.75 9h3.5c.966 0 1.75.784 1.75 1.75v3.5A1.75 1.75 0 0 1 14.25 16h-3.5A1.75 1.75 0 0 1 9 14.25v-.75H5A2.5 2.5 0 0 1 2.5 11V7h-.75A1.75 1.75 0 0 1 0 5.25Zm1.75-.25a.25.25 0 0 0-.25.25v3.5c0 .138.112.25.25.25h3.5a.25.25 0 0 0 .25-.25v-3.5a.25.25 0 0 0-.25-.25Zm9 9a.25.25 0 0 0-.25.25v3.5c0 .138.112.25.25.25h3.5a.25.25 0 0 0 .25-.25v-3.5a.25.25 0 0 0-.25-.25Z"></path> </svg> </template> <template id="book-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book"> <path d="M0 1.75A.75.75 0 0 1 .75 1h4.253c1.227 0 2.317.59 3 1.501A3.743 3.743 0 0 1 11.006 1h4.245a.75.75 0 0 1 .75.75v10.5a.75.75 0 0 1-.75.75h-4.507a2.25 2.25 0 0 0-1.591.659l-.622.621a.75.75 0 0 1-1.06 0l-.622-.621A2.25 2.25 0 0 0 5.258 13H.75a.75.75 0 0 1-.75-.75Zm7.251 10.324.004-5.073-.002-2.253A2.25 2.25 0 0 0 5.003 2.5H1.5v9h3.757a3.75 3.75 0 0 1 1.994.574ZM8.755 4.75l-.004 7.322a3.752 3.752 0 0 1 1.992-.572H14.5v-9h-3.495a2.25 2.25 0 0 0-2.25 2.25Z"></path> </svg> </template> <template id="code-review-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code-review"> <path d="M1.75 1h12.5c.966 0 1.75.784 1.75 1.75v8.5A1.75 1.75 0 0 1 14.25 13H8.061l-2.574 2.573A1.458 1.458 0 0 1 3 14.543V13H1.75A1.75 1.75 0 0 1 0 11.25v-8.5C0 1.784.784 1 1.75 1ZM1.5 2.75v8.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h6.5a.25.25 0 0 0 .25-.25v-8.5a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25Zm5.28 1.72a.75.75 0 0 1 0 1.06L5.31 7l1.47 1.47a.751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018l-2-2a.75.75 0 0 1 0-1.06l2-2a.75.75 0 0 1 1.06 0Zm2.44 0a.75.75 0 0 1 1.06 0l2 2a.75.75 0 0 1 0 1.06l-2 2a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L10.69 7 9.22 5.53a.75.75 0 0 1 0-1.06Z"></path> </svg> </template> <template id="codespaces-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-codespaces"> <path d="M0 11.25c0-.966.784-1.75 1.75-1.75h12.5c.966 0 1.75.784 1.75 1.75v3A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25Zm2-9.5C2 .784 2.784 0 3.75 0h8.5C13.216 0 14 .784 14 1.75v5a1.75 1.75 0 0 1-1.75 1.75h-8.5A1.75 1.75 0 0 1 2 6.75Zm1.75-.25a.25.25 0 0 0-.25.25v5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-5a.25.25 0 0 0-.25-.25Zm-2 9.5a.25.25 0 0 0-.25.25v3c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-3a.25.25 0 0 0-.25-.25Z"></path><path d="M7 12.75a.75.75 0 0 1 .75-.75h4.5a.75.75 0 0 1 0 1.5h-4.5a.75.75 0 0 1-.75-.75Zm-4 0a.75.75 0 0 1 .75-.75h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1-.75-.75Z"></path> </svg> </template> <template id="comment-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-comment"> <path d="M1 2.75C1 1.784 1.784 1 2.75 1h10.5c.966 0 1.75.784 1.75 1.75v7.5A1.75 1.75 0 0 1 13.25 12H9.06l-2.573 2.573A1.458 1.458 0 0 1 4 13.543V12H2.75A1.75 1.75 0 0 1 1 10.25Zm1.75-.25a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h4.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Z"></path> </svg> </template> <template id="comment-discussion-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-comment-discussion"> <path d="M1.75 1h8.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 10.25 10H7.061l-2.574 2.573A1.458 1.458 0 0 1 2 11.543V10h-.25A1.75 1.75 0 0 1 0 8.25v-5.5C0 1.784.784 1 1.75 1ZM1.5 2.75v5.5c0 .138.112.25.25.25h1a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h3.5a.25.25 0 0 0 .25-.25v-5.5a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25Zm13 2a.25.25 0 0 0-.25-.25h-.5a.75.75 0 0 1 0-1.5h.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 14.25 12H14v1.543a1.458 1.458 0 0 1-2.487 1.03L9.22 12.28a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215l2.22 2.22v-2.19a.75.75 0 0 1 .75-.75h1a.25.25 0 0 0 .25-.25Z"></path> </svg> </template> <template id="organization-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-organization"> <path d="M1.75 16A1.75 1.75 0 0 1 0 14.25V1.75C0 .784.784 0 1.75 0h8.5C11.216 0 12 .784 12 1.75v12.5c0 .085-.006.168-.018.25h2.268a.25.25 0 0 0 .25-.25V8.285a.25.25 0 0 0-.111-.208l-1.055-.703a.749.749 0 1 1 .832-1.248l1.055.703c.487.325.779.871.779 1.456v5.965A1.75 1.75 0 0 1 14.25 16h-3.5a.766.766 0 0 1-.197-.026c-.099.017-.2.026-.303.026h-3a.75.75 0 0 1-.75-.75V14h-1v1.25a.75.75 0 0 1-.75.75Zm-.25-1.75c0 .138.112.25.25.25H4v-1.25a.75.75 0 0 1 .75-.75h2.5a.75.75 0 0 1 .75.75v1.25h2.25a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25ZM3.75 6h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5ZM3 3.75A.75.75 0 0 1 3.75 3h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 3.75Zm4 3A.75.75 0 0 1 7.75 6h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 7 6.75ZM7.75 3h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5ZM3 9.75A.75.75 0 0 1 3.75 9h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 9.75ZM7.75 9h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5Z"></path> </svg> </template> <template id="rocket-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-rocket"> <path d="M14.064 0h.186C15.216 0 16 .784 16 1.75v.186a8.752 8.752 0 0 1-2.564 6.186l-.458.459c-.314.314-.641.616-.979.904v3.207c0 .608-.315 1.172-.833 1.49l-2.774 1.707a.749.749 0 0 1-1.11-.418l-.954-3.102a1.214 1.214 0 0 1-.145-.125L3.754 9.816a1.218 1.218 0 0 1-.124-.145L.528 8.717a.749.749 0 0 1-.418-1.11l1.71-2.774A1.748 1.748 0 0 1 3.31 4h3.204c.288-.338.59-.665.904-.979l.459-.458A8.749 8.749 0 0 1 14.064 0ZM8.938 3.623h-.002l-.458.458c-.76.76-1.437 1.598-2.02 2.5l-1.5 2.317 2.143 2.143 2.317-1.5c.902-.583 1.74-1.26 2.499-2.02l.459-.458a7.25 7.25 0 0 0 2.123-5.127V1.75a.25.25 0 0 0-.25-.25h-.186a7.249 7.249 0 0 0-5.125 2.123ZM3.56 14.56c-.732.732-2.334 1.045-3.005 1.148a.234.234 0 0 1-.201-.064.234.234 0 0 1-.064-.201c.103-.671.416-2.273 1.15-3.003a1.502 1.502 0 1 1 2.12 2.12Zm6.94-3.935c-.088.06-.177.118-.266.175l-2.35 1.521.548 1.783 1.949-1.2a.25.25 0 0 0 .119-.213ZM3.678 8.116 5.2 5.766c.058-.09.117-.178.176-.266H3.309a.25.25 0 0 0-.213.119l-1.2 1.95ZM12 5a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path> </svg> </template> <template id="shield-check-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield-check"> <path d="m8.533.133 5.25 1.68A1.75 1.75 0 0 1 15 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.697 1.697 0 0 1-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 0 1 1.217-1.667l5.25-1.68a1.748 1.748 0 0 1 1.066 0Zm-.61 1.429.001.001-5.25 1.68a.251.251 0 0 0-.174.237V7c0 1.36.275 2.666 1.057 3.859.784 1.194 2.121 2.342 4.366 3.298a.196.196 0 0 0 .154 0c2.245-.957 3.582-2.103 4.366-3.297C13.225 9.666 13.5 8.358 13.5 7V3.48a.25.25 0 0 0-.174-.238l-5.25-1.68a.25.25 0 0 0-.153 0ZM11.28 6.28l-3.5 3.5a.75.75 0 0 1-1.06 0l-1.5-1.5a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215l.97.97 2.97-2.97a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path> </svg> </template> <template id="heart-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-heart"> <path d="m8 14.25.345.666a.75.75 0 0 1-.69 0l-.008-.004-.018-.01a7.152 7.152 0 0 1-.31-.17 22.055 22.055 0 0 1-3.434-2.414C2.045 10.731 0 8.35 0 5.5 0 2.836 2.086 1 4.25 1 5.797 1 7.153 1.802 8 3.02 8.847 1.802 10.203 1 11.75 1 13.914 1 16 2.836 16 5.5c0 2.85-2.045 5.231-3.885 6.818a22.066 22.066 0 0 1-3.744 2.584l-.018.01-.006.003h-.002ZM4.25 2.5c-1.336 0-2.75 1.164-2.75 3 0 2.15 1.58 4.144 3.365 5.682A20.58 20.58 0 0 0 8 13.393a20.58 20.58 0 0 0 3.135-2.211C12.92 9.644 14.5 7.65 14.5 5.5c0-1.836-1.414-3-2.75-3-1.373 0-2.609.986-3.029 2.456a.749.749 0 0 1-1.442 0C6.859 3.486 5.623 2.5 4.25 2.5Z"></path> </svg> </template> <template id="server-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-server"> <path d="M1.75 1h12.5c.966 0 1.75.784 1.75 1.75v4c0 .372-.116.717-.314 1 .198.283.314.628.314 1v4a1.75 1.75 0 0 1-1.75 1.75H1.75A1.75 1.75 0 0 1 0 12.75v-4c0-.358.109-.707.314-1a1.739 1.739 0 0 1-.314-1v-4C0 1.784.784 1 1.75 1ZM1.5 2.75v4c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-4a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25Zm.25 5.75a.25.25 0 0 0-.25.25v4c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-4a.25.25 0 0 0-.25-.25ZM7 4.75A.75.75 0 0 1 7.75 4h4.5a.75.75 0 0 1 0 1.5h-4.5A.75.75 0 0 1 7 4.75ZM7.75 10h4.5a.75.75 0 0 1 0 1.5h-4.5a.75.75 0 0 1 0-1.5ZM3 4.75A.75.75 0 0 1 3.75 4h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 4.75ZM3.75 10h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5Z"></path> </svg> </template> <template id="globe-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-globe"> <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM5.78 8.75a9.64 9.64 0 0 0 1.363 4.177c.255.426.542.832.857 1.215.245-.296.551-.705.857-1.215A9.64 9.64 0 0 0 10.22 8.75Zm4.44-1.5a9.64 9.64 0 0 0-1.363-4.177c-.307-.51-.612-.919-.857-1.215a9.927 9.927 0 0 0-.857 1.215A9.64 9.64 0 0 0 5.78 7.25Zm-5.944 1.5H1.543a6.507 6.507 0 0 0 4.666 5.5c-.123-.181-.24-.365-.352-.552-.715-1.192-1.437-2.874-1.581-4.948Zm-2.733-1.5h2.733c.144-2.074.866-3.756 1.58-4.948.12-.197.237-.381.353-.552a6.507 6.507 0 0 0-4.666 5.5Zm10.181 1.5c-.144 2.074-.866 3.756-1.58 4.948-.12.197-.237.381-.353.552a6.507 6.507 0 0 0 4.666-5.5Zm2.733-1.5a6.507 6.507 0 0 0-4.666-5.5c.123.181.24.365.353.552.714 1.192 1.436 2.874 1.58 4.948Z"></path> </svg> </template> <template id="issue-opened-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened"> <path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path><path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Z"></path> </svg> </template> <template id="device-mobile-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-mobile"> <path d="M3.75 0h8.5C13.216 0 14 .784 14 1.75v12.5A1.75 1.75 0 0 1 12.25 16h-8.5A1.75 1.75 0 0 1 2 14.25V1.75C2 .784 2.784 0 3.75 0ZM3.5 1.75v12.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25ZM8 13a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z"></path> </svg> </template> <template id="package-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-package"> <path d="m8.878.392 5.25 3.045c.54.314.872.89.872 1.514v6.098a1.75 1.75 0 0 1-.872 1.514l-5.25 3.045a1.75 1.75 0 0 1-1.756 0l-5.25-3.045A1.75 1.75 0 0 1 1 11.049V4.951c0-.624.332-1.201.872-1.514L7.122.392a1.75 1.75 0 0 1 1.756 0ZM7.875 1.69l-4.63 2.685L8 7.133l4.755-2.758-4.63-2.685a.248.248 0 0 0-.25 0ZM2.5 5.677v5.372c0 .09.047.171.125.216l4.625 2.683V8.432Zm6.25 8.271 4.625-2.683a.25.25 0 0 0 .125-.216V5.677L8.75 8.432Z"></path> </svg> </template> <template id="credit-card-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-credit-card"> <path d="M10.75 9a.75.75 0 0 0 0 1.5h1.5a.75.75 0 0 0 0-1.5h-1.5Z"></path><path d="M0 3.75C0 2.784.784 2 1.75 2h12.5c.966 0 1.75.784 1.75 1.75v8.5A1.75 1.75 0 0 1 14.25 14H1.75A1.75 1.75 0 0 1 0 12.25ZM14.5 6.5h-13v5.75c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25Zm0-2.75a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25V5h13Z"></path> </svg> </template> <template id="play-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play"> <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm4.879-2.773 4.264 2.559a.25.25 0 0 1 0 .428l-4.264 2.559A.25.25 0 0 1 6 10.559V5.442a.25.25 0 0 1 .379-.215Z"></path> </svg> </template> <template id="gift-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-gift"> <path d="M2 2.75A2.75 2.75 0 0 1 4.75 0c.983 0 1.873.42 2.57 1.232.268.318.497.668.68 1.042.183-.375.411-.725.68-1.044C9.376.42 10.266 0 11.25 0a2.75 2.75 0 0 1 2.45 4h.55c.966 0 1.75.784 1.75 1.75v2c0 .698-.409 1.301-1 1.582v4.918A1.75 1.75 0 0 1 13.25 16H2.75A1.75 1.75 0 0 1 1 14.25V9.332C.409 9.05 0 8.448 0 7.75v-2C0 4.784.784 4 1.75 4h.55c-.192-.375-.3-.8-.3-1.25ZM7.25 9.5H2.5v4.75c0 .138.112.25.25.25h4.5Zm1.5 0v5h4.5a.25.25 0 0 0 .25-.25V9.5Zm0-4V8h5.5a.25.25 0 0 0 .25-.25v-2a.25.25 0 0 0-.25-.25Zm-7 0a.25.25 0 0 0-.25.25v2c0 .138.112.25.25.25h5.5V5.5h-5.5Zm3-4a1.25 1.25 0 0 0 0 2.5h2.309c-.233-.818-.542-1.401-.878-1.793-.43-.502-.915-.707-1.431-.707ZM8.941 4h2.309a1.25 1.25 0 0 0 0-2.5c-.516 0-1 .205-1.43.707-.337.392-.646.975-.879 1.793Z"></path> </svg> </template> <template id="code-square-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code-square"> <path d="M0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25Zm7.47 3.97a.75.75 0 0 1 1.06 0l2 2a.75.75 0 0 1 0 1.06l-2 2a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L10.69 8 9.22 6.53a.75.75 0 0 1 0-1.06ZM6.78 6.53 5.31 8l1.47 1.47a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215l-2-2a.75.75 0 0 1 0-1.06l2-2a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path> </svg> </template> <template id="device-desktop-icon"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-desktop"> <path d="M14.25 1c.966 0 1.75.784 1.75 1.75v7.5A1.75 1.75 0 0 1 14.25 12h-3.727c.099 1.041.52 1.872 1.292 2.757A.752.752 0 0 1 11.25 16h-6.5a.75.75 0 0 1-.565-1.243c.772-.885 1.192-1.716 1.292-2.757H1.75A1.75 1.75 0 0 1 0 10.25v-7.5C0 1.784.784 1 1.75 1ZM1.75 2.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25ZM9.018 12H6.982a5.72 5.72 0 0 1-.765 2.5h3.566a5.72 5.72 0 0 1-.765-2.5Z"></path> </svg> </template> <div class="position-relative"> <ul role="listbox" class="ActionListWrap QueryBuilder-ListWrap" aria-label="Suggestions" data-action=" combobox-commit:query-builder#comboboxCommit mousedown:query-builder#resultsMousedown " data-target="query-builder.resultsList" data-persist-list=false id="query-builder-test-results" ></ul> </div> <div class="FormControl-inlineValidation" id="validation-54ded3be-8fdb-4580-86b0-4948ae2b30fd" hidden="hidden"> <span class="FormControl-inlineValidation--visual"> <svg aria-hidden="true" height="12" viewBox="0 0 12 12" version="1.1" width="12" data-view-component="true" class="octicon octicon-alert-fill"> <path d="M4.855.708c.5-.896 1.79-.896 2.29 0l4.675 8.351a1.312 1.312 0 0 1-1.146 1.954H1.33A1.313 1.313 0 0 1 .183 9.058ZM7 7V3H5v4Zm-1 3a1 1 0 1 0 0-2 1 1 0 0 0 0 2Z"></path> </svg> </span> <span></span> </div> </div> <div data-target="query-builder.screenReaderFeedback" aria-live="polite" aria-atomic="true" class="sr-only"></div> </query-builder></form> <div class="d-flex flex-row color-fg-muted px-3 text-small color-bg-default search-feedback-prompt"> <a target="_blank" href="https://docs.github.com/search-github/github-code-search/understanding-github-code-search-syntax" data-view-component="true" class="Link color-fg-accent text-normal ml-2">Search syntax tips</a> <div class="d-flex flex-1"></div> </div> </div> </div> </div> </modal-dialog></div> </div> <div data-action="click:qbsearch-input#retract" class="dark-backdrop position-fixed" hidden data-target="qbsearch-input.darkBackdrop"></div> <div class="color-fg-default"> <dialog-helper> <dialog data-target="qbsearch-input.feedbackDialog" data-action="close:qbsearch-input#handleDialogClose cancel:qbsearch-input#handleDialogClose" id="feedback-dialog" aria-modal="true" aria-labelledby="feedback-dialog-title" aria-describedby="feedback-dialog-description" data-view-component="true" class="Overlay Overlay-whenNarrow Overlay--size-medium Overlay--motion-scaleFade Overlay--disableScroll"> <div data-view-component="true" class="Overlay-header"> <div class="Overlay-headerContentWrap"> <div class="Overlay-titleWrap"> <h1 class="Overlay-title " id="feedback-dialog-title"> Provide feedback </h1> </div> <div class="Overlay-actionWrap"> <button data-close-dialog-id="feedback-dialog" aria-label="Close" type="button" data-view-component="true" class="close-button Overlay-closeButton"><svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path> </svg></button> </div> </div> </div> <scrollable-region data-labelled-by="feedback-dialog-title"> <div data-view-component="true" class="Overlay-body"> <!-- '"` --><!-- </textarea></xmp> --></option></form><form id="code-search-feedback-form" data-turbo="false" action="/search/feedback" accept-charset="UTF-8" method="post"><input type="hidden" data-csrf="true" name="authenticity_token" value="IWOa6oUoAte50J8upFBnQaqeSa4QEavaZh60IKZoG3E0gx3GB34W0MccSFOWiFuf2JhelT//JvLXXqKdSDuzvw==" /> <p>We read every piece of feedback, and take your input very seriously.</p> <textarea name="feedback" class="form-control width-full mb-2" style="height: 120px" id="feedback"></textarea> <input name="include_email" id="include_email" aria-label="Include my email address so I can be contacted" class="form-control mr-2" type="checkbox"> <label for="include_email" style="font-weight: normal">Include my email address so I can be contacted</label> </form></div> </scrollable-region> <div data-view-component="true" class="Overlay-footer Overlay-footer--alignEnd"> <button data-close-dialog-id="feedback-dialog" type="button" data-view-component="true" class="btn"> Cancel </button> <button form="code-search-feedback-form" data-action="click:qbsearch-input#submitFeedback" type="submit" data-view-component="true" class="btn-primary btn"> Submit feedback </button> </div> </dialog></dialog-helper> <custom-scopes data-target="qbsearch-input.customScopesManager"> <dialog-helper> <dialog data-target="custom-scopes.customScopesModalDialog" data-action="close:qbsearch-input#handleDialogClose cancel:qbsearch-input#handleDialogClose" id="custom-scopes-dialog" aria-modal="true" aria-labelledby="custom-scopes-dialog-title" aria-describedby="custom-scopes-dialog-description" data-view-component="true" class="Overlay Overlay-whenNarrow Overlay--size-medium Overlay--motion-scaleFade Overlay--disableScroll"> <div data-view-component="true" class="Overlay-header Overlay-header--divided"> <div class="Overlay-headerContentWrap"> <div class="Overlay-titleWrap"> <h1 class="Overlay-title " id="custom-scopes-dialog-title"> Saved searches </h1> <h2 id="custom-scopes-dialog-description" class="Overlay-description">Use saved searches to filter your results more quickly</h2> </div> <div class="Overlay-actionWrap"> <button data-close-dialog-id="custom-scopes-dialog" aria-label="Close" type="button" data-view-component="true" class="close-button Overlay-closeButton"><svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path> </svg></button> </div> </div> </div> <scrollable-region data-labelled-by="custom-scopes-dialog-title"> <div data-view-component="true" class="Overlay-body"> <div data-target="custom-scopes.customScopesModalDialogFlash"></div> <div hidden class="create-custom-scope-form" data-target="custom-scopes.createCustomScopeForm"> <!-- '"` --><!-- </textarea></xmp> --></option></form><form id="custom-scopes-dialog-form" data-turbo="false" action="/search/custom_scopes" accept-charset="UTF-8" method="post"><input type="hidden" data-csrf="true" name="authenticity_token" value="RZTDt9mVc6J44J99UWkV9ETWfgZAaNlU77SAg2ffV4CLP8Oszbe6EvrLH/tiY6LEp71W2KfwXpXSZBkINM1NoQ==" /> <div data-target="custom-scopes.customScopesModalDialogFlash"></div> <input type="hidden" id="custom_scope_id" name="custom_scope_id" data-target="custom-scopes.customScopesIdField"> <div class="form-group"> <label for="custom_scope_name">Name</label> <auto-check src="/search/custom_scopes/check_name" required only-validate-on-blur="false"> <input type="text" name="custom_scope_name" id="custom_scope_name" data-target="custom-scopes.customScopesNameField" class="form-control" autocomplete="off" placeholder="github-ruby" required maxlength="50"> <input type="hidden" data-csrf="true" value="rpnBSWcZg/AxaFl6XadHMmrYmnBAKqY2rO7fWIymdDfKrC4aefUG503IWDXYWPRlfzaG1QDapM29DKl9iyIzIQ==" /> </auto-check> </div> <div class="form-group"> <label for="custom_scope_query">Query</label> <input type="text" name="custom_scope_query" id="custom_scope_query" data-target="custom-scopes.customScopesQueryField" class="form-control" autocomplete="off" placeholder="(repo:mona/a OR repo:mona/b) AND lang:python" required maxlength="500"> </div> <p class="text-small color-fg-muted"> To see all available qualifiers, see our <a class="Link--inTextBlock" href="https://docs.github.com/search-github/github-code-search/understanding-github-code-search-syntax">documentation</a>. </p> </form> </div> <div data-target="custom-scopes.manageCustomScopesForm"> <div data-target="custom-scopes.list"></div> </div> </div> </scrollable-region> <div data-view-component="true" class="Overlay-footer Overlay-footer--alignEnd Overlay-footer--divided"> <button data-action="click:custom-scopes#customScopesCancel" type="button" data-view-component="true" class="btn"> Cancel </button> <button form="custom-scopes-dialog-form" data-action="click:custom-scopes#customScopesSubmit" data-target="custom-scopes.customScopesSubmitButton" type="submit" data-view-component="true" class="btn-primary btn"> Create saved search </button> </div> </dialog></dialog-helper> </custom-scopes> </div> </qbsearch-input> <div class="position-relative HeaderMenu-link-wrap d-lg-inline-block"> <a href="/login?return_to=https%3A%2F%2Fgithub.com%2Fymcui%2FChinese-LLaMA-Alpaca-2" class="HeaderMenu-link HeaderMenu-link--sign-in HeaderMenu-button flex-shrink-0 no-underline d-none d-lg-inline-flex border border-lg-0 rounded rounded-lg-0 px-2 py-1" style="margin-left: 12px;" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;site header menu&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;SIGN_UP&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="f3ebb3646e5f2c53e132406f9579587b34e0c510aad7d811e4c27d2beab1eb15" data-analytics-event="{&quot;category&quot;:&quot;Marketing nav&quot;,&quot;action&quot;:&quot;click to go to homepage&quot;,&quot;label&quot;:&quot;ref_page:Marketing;ref_cta:Sign in;ref_loc:Header&quot;}" > Sign in </a> </div> <a href="/signup?ref_cta=Sign+up&amp;ref_loc=header+logged+out&amp;ref_page=%2F%3Cuser-name%3E%2F%3Crepo-name%3E&amp;source=header-repo&amp;source_repo=ymcui%2FChinese-LLaMA-Alpaca-2" class="HeaderMenu-link HeaderMenu-link--sign-up HeaderMenu-button flex-shrink-0 d-flex d-lg-inline-flex no-underline border color-border-default rounded px-2 py-1" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;site header menu&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;SIGN_UP&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="f3ebb3646e5f2c53e132406f9579587b34e0c510aad7d811e4c27d2beab1eb15" data-analytics-event="{&quot;category&quot;:&quot;Sign up&quot;,&quot;action&quot;:&quot;click to sign up for account&quot;,&quot;label&quot;:&quot;ref_page:/&lt;user-name&gt;/&lt;repo-name&gt;;ref_cta:Sign up;ref_loc:header logged out&quot;}" > Sign up </a> <button type="button" class="sr-only js-header-menu-focus-trap d-block d-lg-none">Reseting focus</button> </div> </div> </div> </div> </header> <div hidden="hidden" data-view-component="true" class="js-stale-session-flash stale-session-flash flash flash-warn flash-full"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path d="M6.457 1.047c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0 1 14.082 15H1.918a1.75 1.75 0 0 1-1.543-2.575Zm1.763.707a.25.25 0 0 0-.44 0L1.698 13.132a.25.25 0 0 0 .22.368h12.164a.25.25 0 0 0 .22-.368Zm.53 3.996v2.5a.75.75 0 0 1-1.5 0v-2.5a.75.75 0 0 1 1.5 0ZM9 11a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path> </svg> <span class="js-stale-session-flash-signed-in" hidden>You signed in with another tab or window. <a class="Link--inTextBlock" href="">Reload</a> to refresh your session.</span> <span class="js-stale-session-flash-signed-out" hidden>You signed out in another tab or window. <a class="Link--inTextBlock" href="">Reload</a> to refresh your session.</span> <span class="js-stale-session-flash-switched" hidden>You switched accounts on another tab or window. <a class="Link--inTextBlock" href="">Reload</a> to refresh your session.</span> <button id="icon-button-e6bfa4af-890b-408d-9067-e3ae7f6b7f46" aria-labelledby="tooltip-634bd21a-3a53-4462-a911-98e050250a0d" type="button" data-view-component="true" class="Button Button--iconOnly Button--invisible Button--medium flash-close js-flash-close"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x Button-visual"> <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path> </svg> </button><tool-tip id="tooltip-634bd21a-3a53-4462-a911-98e050250a0d" for="icon-button-e6bfa4af-890b-408d-9067-e3ae7f6b7f46" popover="manual" data-direction="s" data-type="label" data-view-component="true" class="sr-only position-absolute">Dismiss alert</tool-tip> </div> </div> <div id="start-of-content" class="show-on-focus"></div> <div id="js-flash-container" class="flash-container" data-turbo-replace> <template class="js-flash-template"> <div class="flash flash-full {{ className }}"> <div > <button autofocus class="flash-close js-flash-close" type="button" aria-label="Dismiss this message"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path> </svg> </button> <div aria-atomic="true" role="alert" class="js-flash-alert"> <div>{{ message }}</div> </div> </div> </div> </template> </div> <div class="application-main " data-commit-hovercards-enabled data-discussion-hovercards-enabled data-issue-and-pr-hovercards-enabled data-project-hovercards-enabled > <div itemscope itemtype="http://schema.org/SoftwareSourceCode" class=""> <main id="js-repo-pjax-container" > <div id="repository-container-header" class="pt-3 hide-full-screen" style="background-color: var(--page-header-bgColor, var(--color-page-header-bg));" data-turbo-replace> <div class="d-flex flex-nowrap flex-justify-end mb-3 px-3 px-lg-5" style="gap: 1rem;"> <div class="flex-auto min-width-0 width-fit"> <div class=" d-flex flex-wrap flex-items-center wb-break-word f3 text-normal"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo color-fg-muted mr-2"> <path d="M2 2.5A2.5 2.5 0 0 1 4.5 0h8.75a.75.75 0 0 1 .75.75v12.5a.75.75 0 0 1-.75.75h-2.5a.75.75 0 0 1 0-1.5h1.75v-2h-8a1 1 0 0 0-.714 1.7.75.75 0 1 1-1.072 1.05A2.495 2.495 0 0 1 2 11.5Zm10.5-1h-8a1 1 0 0 0-1 1v6.708A2.486 2.486 0 0 1 4.5 9h8ZM5 12.25a.25.25 0 0 1 .25-.25h3.5a.25.25 0 0 1 .25.25v3.25a.25.25 0 0 1-.4.2l-1.45-1.087a.249.249 0 0 0-.3 0L5.4 15.7a.25.25 0 0 1-.4-.2Z"></path> </svg> <span class="author flex-self-stretch" itemprop="author"> <a class="url fn" rel="author" data-hovercard-type="user" data-hovercard-url="/users/ymcui/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self" href="/ymcui"> ymcui </a> </span> <span class="mx-1 flex-self-stretch color-fg-muted">/</span> <strong itemprop="name" class="mr-2 flex-self-stretch"> <a data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" href="/ymcui/Chinese-LLaMA-Alpaca-2">Chinese-LLaMA-Alpaca-2</a> </strong> <span></span><span class="Label Label--secondary v-align-middle mr-1">Public</span> </div> </div> <div id="repository-details-container" class="flex-shrink-0" data-turbo-replace style="max-width: 70%;"> <ul class="pagehead-actions flex-shrink-0 d-none d-md-inline" style="padding: 2px 0;"> <li> <a href="/login?return_to=%2Fymcui%2FChinese-LLaMA-Alpaca-2" rel="nofollow" id="repository-details-watch-button" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;notification subscription menu watch&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="baf53787ec37d50b693d2400f802f468337b36c22bfb74bbf096f5bf9daf27fc" aria-label="You must be signed in to change notification settings" data-view-component="true" class="btn-sm btn"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell mr-2"> <path d="M8 16a2 2 0 0 0 1.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 0 0 8 16ZM3 5a5 5 0 0 1 10 0v2.947c0 .05.015.098.042.139l1.703 2.555A1.519 1.519 0 0 1 13.482 13H2.518a1.516 1.516 0 0 1-1.263-2.36l1.703-2.554A.255.255 0 0 0 3 7.947Zm5-3.5A3.5 3.5 0 0 0 4.5 5v2.947c0 .346-.102.683-.294.97l-1.703 2.556a.017.017 0 0 0-.003.01l.001.006c0 .002.002.004.004.006l.006.004.007.001h10.964l.007-.001.006-.004.004-.006.001-.007a.017.017 0 0 0-.003-.01l-1.703-2.554a1.745 1.745 0 0 1-.294-.97V5A3.5 3.5 0 0 0 8 1.5Z"></path> </svg>Notifications </a> <tool-tip id="tooltip-38edcdc2-3893-4a79-b0de-a8a394a07b8b" for="repository-details-watch-button" popover="manual" data-direction="s" data-type="description" data-view-component="true" class="sr-only position-absolute">You must be signed in to change notification settings</tool-tip> </li> <li> <a icon="repo-forked" id="fork-button" href="/login?return_to=%2Fymcui%2FChinese-LLaMA-Alpaca-2" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;repo details fork button&quot;,&quot;repository_id&quot;:668052031,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="8459c61174c8b6f0b97cb409989dc0fb790f2e2951adad85e0a18588a1a21d44" data-view-component="true" class="btn-sm btn"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked mr-2"> <path d="M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z"></path> </svg>Fork <span id="repo-network-counter" data-pjax-replace="true" data-turbo-replace="true" title="574" data-view-component="true" class="Counter">574</span> </a> </li> <li> <div data-view-component="true" class="BtnGroup d-flex"> <a href="/login?return_to=%2Fymcui%2FChinese-LLaMA-Alpaca-2" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;star button&quot;,&quot;repository_id&quot;:668052031,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="f0508e1ab73de4e17cfd962cacc5f06c06a8bdad302a0fe2e205ceec9a783025" aria-label="You must be signed in to star a repository" data-view-component="true" class="tooltipped tooltipped-sw btn-sm btn"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star v-align-text-bottom d-inline-block mr-2"> <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path> </svg><span data-view-component="true" class="d-inline"> Star </span> <span id="repo-stars-counter-star" aria-label="7155 users starred this repository" data-singular-suffix="user starred this repository" data-plural-suffix="users starred this repository" data-turbo-replace="true" title="7,155" data-view-component="true" class="Counter js-social-count">7.2k</span> </a></div> </li> </ul> </div> </div> <div id="responsive-meta-container" data-turbo-replace> <div class="d-block d-md-none mb-2 px-3 px-md-4 px-lg-5"> <p class="f4 mb-3 "> 中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models) </p> <h3 class="sr-only">License</h3> <div class="mb-2"> <a href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/LICENSE" class="Link--muted" data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:license&quot;}" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-law mr-2"> <path d="M8.75.75V2h.985c.304 0 .603.08.867.231l1.29.736c.038.022.08.033.124.033h2.234a.75.75 0 0 1 0 1.5h-.427l2.111 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.006.005-.01.01-.045.04c-.21.176-.441.327-.686.45C14.556 10.78 13.88 11 13 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L12.178 4.5h-.162c-.305 0-.604-.079-.868-.231l-1.29-.736a.245.245 0 0 0-.124-.033H8.75V13h2.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1 0-1.5h2.5V3.5h-.984a.245.245 0 0 0-.124.033l-1.289.737c-.265.15-.564.23-.869.23h-.162l2.112 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.016.015-.045.04c-.21.176-.441.327-.686.45C4.556 10.78 3.88 11 3 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L2.178 4.5H1.75a.75.75 0 0 1 0-1.5h2.234a.249.249 0 0 0 .125-.033l1.288-.737c.265-.15.564-.23.869-.23h.984V.75a.75.75 0 0 1 1.5 0Zm2.945 8.477c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L13 6.327Zm-10 0c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L3 6.327Z"></path> </svg> Apache-2.0 license </a> </div> <div class="mb-3"> <a class="Link--secondary no-underline mr-3" href="/ymcui/Chinese-LLaMA-Alpaca-2/stargazers"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star mr-1"> <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path> </svg> <span class="text-bold">7.2k</span> stars </a> <a class="Link--secondary no-underline mr-3" href="/ymcui/Chinese-LLaMA-Alpaca-2/forks"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked mr-1"> <path d="M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z"></path> </svg> <span class="text-bold">574</span> forks </a> <a class="Link--secondary no-underline mr-3 d-inline-block" href="/ymcui/Chinese-LLaMA-Alpaca-2/branches"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-branch mr-1"> <path d="M9.5 3.25a2.25 2.25 0 1 1 3 2.122V6A2.5 2.5 0 0 1 10 8.5H6a1 1 0 0 0-1 1v1.128a2.251 2.251 0 1 1-1.5 0V5.372a2.25 2.25 0 1 1 1.5 0v1.836A2.493 2.493 0 0 1 6 7h4a1 1 0 0 0 1-1v-.628A2.25 2.25 0 0 1 9.5 3.25Zm-6 0a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Zm8.25-.75a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5ZM4.25 12a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Z"></path> </svg> <span>Branches</span> </a> <a class="Link--secondary no-underline d-inline-block" href="/ymcui/Chinese-LLaMA-Alpaca-2/tags"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-tag mr-1"> <path d="M1 7.775V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 0 1 0 2.474l-5.026 5.026a1.75 1.75 0 0 1-2.474 0l-6.25-6.25A1.752 1.752 0 0 1 1 7.775Zm1.5 0c0 .066.026.13.073.177l6.25 6.25a.25.25 0 0 0 .354 0l5.025-5.025a.25.25 0 0 0 0-.354l-6.25-6.25a.25.25 0 0 0-.177-.073H2.75a.25.25 0 0 0-.25.25ZM6 5a1 1 0 1 1 0 2 1 1 0 0 1 0-2Z"></path> </svg> <span>Tags</span> </a> <a class="Link--secondary no-underline d-inline-block" href="/ymcui/Chinese-LLaMA-Alpaca-2/activity"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pulse mr-1"> <path d="M6 2c.306 0 .582.187.696.471L10 10.731l1.304-3.26A.751.751 0 0 1 12 7h3.25a.75.75 0 0 1 0 1.5h-2.742l-1.812 4.528a.751.751 0 0 1-1.392 0L6 4.77 4.696 8.03A.75.75 0 0 1 4 8.5H.75a.75.75 0 0 1 0-1.5h2.742l1.812-4.529A.751.751 0 0 1 6 2Z"></path> </svg> <span>Activity</span> </a> </div> <div class="d-flex flex-wrap gap-2"> <div class="flex-1"> <div data-view-component="true" class="BtnGroup d-flex"> <a href="/login?return_to=%2Fymcui%2FChinese-LLaMA-Alpaca-2" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;star button&quot;,&quot;repository_id&quot;:668052031,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="f0508e1ab73de4e17cfd962cacc5f06c06a8bdad302a0fe2e205ceec9a783025" aria-label="You must be signed in to star a repository" data-view-component="true" class="tooltipped tooltipped-sw btn-sm btn btn-block"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star v-align-text-bottom d-inline-block mr-2"> <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path> </svg><span data-view-component="true" class="d-inline"> Star </span> </a></div> </div> <div class="flex-1"> <a href="/login?return_to=%2Fymcui%2FChinese-LLaMA-Alpaca-2" rel="nofollow" id="files-overview-watch-button" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;notification subscription menu watch&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/ymcui/Chinese-LLaMA-Alpaca-2&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="baf53787ec37d50b693d2400f802f468337b36c22bfb74bbf096f5bf9daf27fc" aria-label="You must be signed in to change notification settings" data-view-component="true" class="btn-sm btn btn-block"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell mr-2"> <path d="M8 16a2 2 0 0 0 1.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 0 0 8 16ZM3 5a5 5 0 0 1 10 0v2.947c0 .05.015.098.042.139l1.703 2.555A1.519 1.519 0 0 1 13.482 13H2.518a1.516 1.516 0 0 1-1.263-2.36l1.703-2.554A.255.255 0 0 0 3 7.947Zm5-3.5A3.5 3.5 0 0 0 4.5 5v2.947c0 .346-.102.683-.294.97l-1.703 2.556a.017.017 0 0 0-.003.01l.001.006c0 .002.002.004.004.006l.006.004.007.001h10.964l.007-.001.006-.004.004-.006.001-.007a.017.017 0 0 0-.003-.01l-1.703-2.554a1.745 1.745 0 0 1-.294-.97V5A3.5 3.5 0 0 0 8 1.5Z"></path> </svg>Notifications </a> <tool-tip id="tooltip-eceb9afc-07cb-4b8a-82e9-649a051df48a" for="files-overview-watch-button" popover="manual" data-direction="s" data-type="description" data-view-component="true" class="sr-only position-absolute">You must be signed in to change notification settings</tool-tip> </div> <span> </span> </div> </div> </div> <nav data-pjax="#js-repo-pjax-container" aria-label="Repository" data-view-component="true" class="js-repo-nav js-sidenav-container-pjax js-responsive-underlinenav overflow-hidden UnderlineNav px-3 px-md-4 px-lg-5"> <ul data-view-component="true" class="UnderlineNav-body list-style-none"> <li data-view-component="true" class="d-inline-flex"> <a id="code-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2" data-tab-item="i0code-tab" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches repo_packages repo_deployments repo_attestations /ymcui/Chinese-LLaMA-Alpaca-2" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g c" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Code&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" aria-current="page" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item selected"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code UnderlineNav-octicon d-none d-sm-inline"> <path d="m11.28 3.22 4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L13.94 8l-3.72-3.72a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215Zm-6.56 0a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042L2.06 8l3.72 3.72a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L.47 8.53a.75.75 0 0 1 0-1.06Z"></path> </svg> <span data-content="Code">Code</span> <span id="code-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="issues-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2/issues" data-tab-item="i1issues-tab" data-selected-links="repo_issues repo_labels repo_milestones /ymcui/Chinese-LLaMA-Alpaca-2/issues" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g i" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Issues&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened UnderlineNav-octicon d-none d-sm-inline"> <path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path><path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Z"></path> </svg> <span data-content="Issues">Issues</span> <span id="issues-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="1" data-view-component="true" class="Counter">1</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="pull-requests-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2/pulls" data-tab-item="i2pull-requests-tab" data-selected-links="repo_pulls checks /ymcui/Chinese-LLaMA-Alpaca-2/pulls" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g p" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Pull requests&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-pull-request UnderlineNav-octicon d-none d-sm-inline"> <path d="M1.5 3.25a2.25 2.25 0 1 1 3 2.122v5.256a2.251 2.251 0 1 1-1.5 0V5.372A2.25 2.25 0 0 1 1.5 3.25Zm5.677-.177L9.573.677A.25.25 0 0 1 10 .854V2.5h1A2.5 2.5 0 0 1 13.5 5v5.628a2.251 2.251 0 1 1-1.5 0V5a1 1 0 0 0-1-1h-1v1.646a.25.25 0 0 1-.427.177L7.177 3.427a.25.25 0 0 1 0-.354ZM3.75 2.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm0 9.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm8.25.75a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Z"></path> </svg> <span data-content="Pull requests">Pull requests</span> <span id="pull-requests-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="6" data-view-component="true" class="Counter">6</span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="discussions-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2/discussions" data-tab-item="i3discussions-tab" data-selected-links="repo_discussions /ymcui/Chinese-LLaMA-Alpaca-2/discussions" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g g" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Discussions&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-comment-discussion UnderlineNav-octicon d-none d-sm-inline"> <path d="M1.75 1h8.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 10.25 10H7.061l-2.574 2.573A1.458 1.458 0 0 1 2 11.543V10h-.25A1.75 1.75 0 0 1 0 8.25v-5.5C0 1.784.784 1 1.75 1ZM1.5 2.75v5.5c0 .138.112.25.25.25h1a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h3.5a.25.25 0 0 0 .25-.25v-5.5a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25Zm13 2a.25.25 0 0 0-.25-.25h-.5a.75.75 0 0 1 0-1.5h.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 14.25 12H14v1.543a1.458 1.458 0 0 1-2.487 1.03L9.22 12.28a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215l2.22 2.22v-2.19a.75.75 0 0 1 .75-.75h1a.25.25 0 0 0 .25-.25Z"></path> </svg> <span data-content="Discussions">Discussions</span> <span id="discussions-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="actions-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2/actions" data-tab-item="i4actions-tab" data-selected-links="repo_actions /ymcui/Chinese-LLaMA-Alpaca-2/actions" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g a" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Actions&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play UnderlineNav-octicon d-none d-sm-inline"> <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm4.879-2.773 4.264 2.559a.25.25 0 0 1 0 .428l-4.264 2.559A.25.25 0 0 1 6 10.559V5.442a.25.25 0 0 1 .379-.215Z"></path> </svg> <span data-content="Actions">Actions</span> <span id="actions-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="wiki-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2/wiki" data-tab-item="i5wiki-tab" data-selected-links="repo_wiki /ymcui/Chinese-LLaMA-Alpaca-2/wiki" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g w" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Wiki&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book UnderlineNav-octicon d-none d-sm-inline"> <path d="M0 1.75A.75.75 0 0 1 .75 1h4.253c1.227 0 2.317.59 3 1.501A3.743 3.743 0 0 1 11.006 1h4.245a.75.75 0 0 1 .75.75v10.5a.75.75 0 0 1-.75.75h-4.507a2.25 2.25 0 0 0-1.591.659l-.622.621a.75.75 0 0 1-1.06 0l-.622-.621A2.25 2.25 0 0 0 5.258 13H.75a.75.75 0 0 1-.75-.75Zm7.251 10.324.004-5.073-.002-2.253A2.25 2.25 0 0 0 5.003 2.5H1.5v9h3.757a3.75 3.75 0 0 1 1.994.574ZM8.755 4.75l-.004 7.322a3.752 3.752 0 0 1 1.992-.572H14.5v-9h-3.495a2.25 2.25 0 0 0-2.25 2.25Z"></path> </svg> <span data-content="Wiki">Wiki</span> <span id="wiki-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="security-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2/security" data-tab-item="i6security-tab" data-selected-links="security overview alerts policy token_scanning code_scanning /ymcui/Chinese-LLaMA-Alpaca-2/security" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g s" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Security&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield UnderlineNav-octicon d-none d-sm-inline"> <path d="M7.467.133a1.748 1.748 0 0 1 1.066 0l5.25 1.68A1.75 1.75 0 0 1 15 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.697 1.697 0 0 1-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 0 1 1.217-1.667Zm.61 1.429a.25.25 0 0 0-.153 0l-5.25 1.68a.25.25 0 0 0-.174.238V7c0 1.358.275 2.666 1.057 3.86.784 1.194 2.121 2.34 4.366 3.297a.196.196 0 0 0 .154 0c2.245-.956 3.582-2.104 4.366-3.298C13.225 9.666 13.5 8.36 13.5 7V3.48a.251.251 0 0 0-.174-.237l-5.25-1.68ZM8.75 4.75v3a.75.75 0 0 1-1.5 0v-3a.75.75 0 0 1 1.5 0ZM9 10.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path> </svg> <span data-content="Security">Security</span> <include-fragment src="/ymcui/Chinese-LLaMA-Alpaca-2/security/overall-count" accept="text/fragment+html"></include-fragment> </a></li> <li data-view-component="true" class="d-inline-flex"> <a id="insights-tab" href="/ymcui/Chinese-LLaMA-Alpaca-2/pulse" data-tab-item="i7insights-tab" data-selected-links="repo_graphs repo_contributors dependency_graph dependabot_updates pulse people community /ymcui/Chinese-LLaMA-Alpaca-2/pulse" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Insights&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-graph UnderlineNav-octicon d-none d-sm-inline"> <path d="M1.5 1.75V13.5h13.75a.75.75 0 0 1 0 1.5H.75a.75.75 0 0 1-.75-.75V1.75a.75.75 0 0 1 1.5 0Zm14.28 2.53-5.25 5.25a.75.75 0 0 1-1.06 0L7 7.06 4.28 9.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.25-3.25a.75.75 0 0 1 1.06 0L10 7.94l4.72-4.72a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path> </svg> <span data-content="Insights">Insights</span> <span id="insights-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span> </a></li> </ul> <div style="visibility:hidden;" data-view-component="true" class="UnderlineNav-actions js-responsive-underlinenav-overflow position-absolute pr-3 pr-md-4 pr-lg-5 right-0"> <action-menu data-select-variant="none" data-view-component="true"> <focus-group direction="vertical" mnemonics retain> <button id="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-button" popovertarget="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-overlay" aria-controls="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-list" aria-haspopup="true" aria-labelledby="tooltip-20679a8f-e569-41db-b581-0190cf540a93" type="button" data-view-component="true" class="Button Button--iconOnly Button--secondary Button--medium UnderlineNav-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal Button-visual"> <path d="M8 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3ZM1.5 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Zm13 0a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path> </svg> </button><tool-tip id="tooltip-20679a8f-e569-41db-b581-0190cf540a93" for="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-button" popover="manual" data-direction="s" data-type="label" data-view-component="true" class="sr-only position-absolute">Additional navigation options</tool-tip> <anchored-position data-target="action-menu.overlay" id="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-overlay" anchor="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-button" align="start" side="outside-bottom" anchor-offset="normal" popover="auto" data-view-component="true"> <div data-view-component="true" class="Overlay Overlay--size-auto"> <div data-view-component="true" class="Overlay-body Overlay-body--paddingNone"> <action-list> <div data-view-component="true"> <ul aria-labelledby="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-button" id="action-menu-f9b49077-a7d0-48a3-b6ce-1252fd0affb0-list" role="menu" data-view-component="true" class="ActionListWrap--inset ActionListWrap"> <li hidden="hidden" data-menu-item="i0code-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-cac068ee-63dd-4294-b20e-ae1dfe3a2814" href="/ymcui/Chinese-LLaMA-Alpaca-2" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code"> <path d="m11.28 3.22 4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L13.94 8l-3.72-3.72a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215Zm-6.56 0a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042L2.06 8l3.72 3.72a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L.47 8.53a.75.75 0 0 1 0-1.06Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Code </span> </a> </li> <li hidden="hidden" data-menu-item="i1issues-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-c85f300e-9016-4731-91a2-d571bf251b34" href="/ymcui/Chinese-LLaMA-Alpaca-2/issues" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened"> <path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path><path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Issues </span> </a> </li> <li hidden="hidden" data-menu-item="i2pull-requests-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-e7a28079-c8d0-486b-92c9-55ff44e5a16d" href="/ymcui/Chinese-LLaMA-Alpaca-2/pulls" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-pull-request"> <path d="M1.5 3.25a2.25 2.25 0 1 1 3 2.122v5.256a2.251 2.251 0 1 1-1.5 0V5.372A2.25 2.25 0 0 1 1.5 3.25Zm5.677-.177L9.573.677A.25.25 0 0 1 10 .854V2.5h1A2.5 2.5 0 0 1 13.5 5v5.628a2.251 2.251 0 1 1-1.5 0V5a1 1 0 0 0-1-1h-1v1.646a.25.25 0 0 1-.427.177L7.177 3.427a.25.25 0 0 1 0-.354ZM3.75 2.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm0 9.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm8.25.75a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Pull requests </span> </a> </li> <li hidden="hidden" data-menu-item="i3discussions-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-cb818286-635f-40d3-b63b-a1c1b358f788" href="/ymcui/Chinese-LLaMA-Alpaca-2/discussions" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-comment-discussion"> <path d="M1.75 1h8.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 10.25 10H7.061l-2.574 2.573A1.458 1.458 0 0 1 2 11.543V10h-.25A1.75 1.75 0 0 1 0 8.25v-5.5C0 1.784.784 1 1.75 1ZM1.5 2.75v5.5c0 .138.112.25.25.25h1a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h3.5a.25.25 0 0 0 .25-.25v-5.5a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25Zm13 2a.25.25 0 0 0-.25-.25h-.5a.75.75 0 0 1 0-1.5h.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 14.25 12H14v1.543a1.458 1.458 0 0 1-2.487 1.03L9.22 12.28a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215l2.22 2.22v-2.19a.75.75 0 0 1 .75-.75h1a.25.25 0 0 0 .25-.25Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Discussions </span> </a> </li> <li hidden="hidden" data-menu-item="i4actions-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-5f639d8f-b6a8-450d-a0fd-9fddf7be23fc" href="/ymcui/Chinese-LLaMA-Alpaca-2/actions" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play"> <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm4.879-2.773 4.264 2.559a.25.25 0 0 1 0 .428l-4.264 2.559A.25.25 0 0 1 6 10.559V5.442a.25.25 0 0 1 .379-.215Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Actions </span> </a> </li> <li hidden="hidden" data-menu-item="i5wiki-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-fe5f9ee5-bbe1-47ba-b895-d094f62659ff" href="/ymcui/Chinese-LLaMA-Alpaca-2/wiki" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book"> <path d="M0 1.75A.75.75 0 0 1 .75 1h4.253c1.227 0 2.317.59 3 1.501A3.743 3.743 0 0 1 11.006 1h4.245a.75.75 0 0 1 .75.75v10.5a.75.75 0 0 1-.75.75h-4.507a2.25 2.25 0 0 0-1.591.659l-.622.621a.75.75 0 0 1-1.06 0l-.622-.621A2.25 2.25 0 0 0 5.258 13H.75a.75.75 0 0 1-.75-.75Zm7.251 10.324.004-5.073-.002-2.253A2.25 2.25 0 0 0 5.003 2.5H1.5v9h3.757a3.75 3.75 0 0 1 1.994.574ZM8.755 4.75l-.004 7.322a3.752 3.752 0 0 1 1.992-.572H14.5v-9h-3.495a2.25 2.25 0 0 0-2.25 2.25Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Wiki </span> </a> </li> <li hidden="hidden" data-menu-item="i6security-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-74b96959-116a-4c3f-9d57-d31380aa78cc" href="/ymcui/Chinese-LLaMA-Alpaca-2/security" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield"> <path d="M7.467.133a1.748 1.748 0 0 1 1.066 0l5.25 1.68A1.75 1.75 0 0 1 15 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.697 1.697 0 0 1-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 0 1 1.217-1.667Zm.61 1.429a.25.25 0 0 0-.153 0l-5.25 1.68a.25.25 0 0 0-.174.238V7c0 1.358.275 2.666 1.057 3.86.784 1.194 2.121 2.34 4.366 3.297a.196.196 0 0 0 .154 0c2.245-.956 3.582-2.104 4.366-3.298C13.225 9.666 13.5 8.36 13.5 7V3.48a.251.251 0 0 0-.174-.237l-5.25-1.68ZM8.75 4.75v3a.75.75 0 0 1-1.5 0v-3a.75.75 0 0 1 1.5 0ZM9 10.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Security </span> </a> </li> <li hidden="hidden" data-menu-item="i7insights-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem"> <a tabindex="-1" id="item-758db2c7-facf-4319-954f-c0edcd4921fb" href="/ymcui/Chinese-LLaMA-Alpaca-2/pulse" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16"> <span class="ActionListItem-visual ActionListItem-visual--leading"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-graph"> <path d="M1.5 1.75V13.5h13.75a.75.75 0 0 1 0 1.5H.75a.75.75 0 0 1-.75-.75V1.75a.75.75 0 0 1 1.5 0Zm14.28 2.53-5.25 5.25a.75.75 0 0 1-1.06 0L7 7.06 4.28 9.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.25-3.25a.75.75 0 0 1 1.06 0L10 7.94l4.72-4.72a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path> </svg> </span> <span data-view-component="true" class="ActionListItem-label"> Insights </span> </a> </li> </ul> </div></action-list> </div> </div></anchored-position> </focus-group> </action-menu></div> </nav> </div> <turbo-frame id="repo-content-turbo-frame" target="_top" data-turbo-action="advance" class=""> <div id="repo-content-pjax-container" class="repository-content " > <h1 class='sr-only'>ymcui/Chinese-LLaMA-Alpaca-2</h1> <div class="clearfix container-xl px-md-4 px-lg-5 px-3"> <div> <div style="max-width: 100%" data-view-component="true" class="Layout Layout--flowRow-until-md react-repos-overview-margin Layout--sidebarPosition-end Layout--sidebarPosition-flowRow-end"> <div data-view-component="true" class="Layout-main"> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_dompurify_dist_purify_es_mjs-dd1d3ea6a436.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_tanstack_query-core_build_modern_queryObserver_js-node_modules_tanstack_-defd52-843b41414e0e.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_aria-live_aria-live_ts-ui_packages_promise-with-resolvers-polyfill_promise-with-r-17c672-34345cb18aac.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_paths_index_ts-e019c54eb886.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_ref-selector_RefSelector_tsx-7496afc3784d.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_commit-attribution_index_ts-ui_packages_commit-checks-status_index_ts-ui_packages-7094d4-15017f02e61c.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_code-view-shared_hooks_use-canonical-object_ts-ui_packages_code-view-shared_hooks-5f1d09-1ee828c2d6e8.js"></script> <script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/repos-overview-d245eae13daf.js"></script> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-react.9df1783473f10f02fb62.module.css" /> <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/repos-overview.0ee7cac3ab511a65d9f9.module.css" /> <react-partial partial-name="repos-overview" data-ssr="true" data-attempted-ssr="true" > <script type="application/json" data-target="react-partial.embeddedData">{"props":{"initialPayload":{"allShortcutsEnabled":false,"path":"/","repo":{"id":668052031,"defaultBranch":"main","name":"Chinese-LLaMA-Alpaca-2","ownerLogin":"ymcui","currentUserCanPush":false,"isFork":false,"isEmpty":false,"createdAt":"2023-07-18T23:09:10.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/16095339?v=4","public":true,"private":false,"isOrgOwned":false},"currentUser":null,"refInfo":{"name":"main","listCacheKey":"v0:1705989107.0","canEdit":false,"refType":"branch","currentOid":"2a334d1634c857a7f02f885026d02ac4b469479d"},"tree":{"items":[{"name":".github","path":".github","contentType":"directory"},{"name":"examples","path":"examples","contentType":"directory"},{"name":"notebooks","path":"notebooks","contentType":"directory"},{"name":"pics","path":"pics","contentType":"directory"},{"name":"prompts","path":"prompts","contentType":"directory"},{"name":"scripts","path":"scripts","contentType":"directory"},{"name":".gitattributes","path":".gitattributes","contentType":"file"},{"name":".gitignore","path":".gitignore","contentType":"file"},{"name":"CITATION.cff","path":"CITATION.cff","contentType":"file"},{"name":"LICENSE","path":"LICENSE","contentType":"file"},{"name":"README.md","path":"README.md","contentType":"file"},{"name":"README_EN.md","path":"README_EN.md","contentType":"file"},{"name":"requirements.txt","path":"requirements.txt","contentType":"file"}],"templateDirectorySuggestionUrl":null,"readme":null,"totalCount":13,"showBranchInfobar":false},"fileTree":null,"fileTreeProcessingTime":null,"foldersToFetch":[],"treeExpanded":false,"symbolsExpanded":false,"isOverview":true,"overview":{"banners":{"shouldRecommendReadme":false,"isPersonalRepo":false,"showUseActionBanner":false,"actionSlug":null,"actionId":null,"showProtectBranchBanner":false,"publishBannersInfo":{"dismissActionNoticePath":"/settings/dismiss-notice/publish_action_from_repo","releasePath":"/ymcui/Chinese-LLaMA-Alpaca-2/releases/new?marketplace=true","showPublishActionBanner":false},"interactionLimitBanner":null,"showInvitationBanner":false,"inviterName":null,"actionsMigrationBannerInfo":{"releaseTags":[],"showImmutableActionsMigrationBanner":false,"initialMigrationStatus":null}},"codeButton":{"contactPath":"/contact","isEnterprise":false,"local":{"protocolInfo":{"httpAvailable":true,"sshAvailable":null,"httpUrl":"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2.git","showCloneWarning":null,"sshUrl":null,"sshCertificatesRequired":null,"sshCertificatesAvailable":null,"ghCliUrl":"gh repo clone ymcui/Chinese-LLaMA-Alpaca-2","defaultProtocol":"http","newSshKeyUrl":"/settings/ssh/new","setProtocolPath":"/users/set_protocol"},"platformInfo":{"cloneUrl":"https://desktop.github.com","showVisualStudioCloneButton":false,"visualStudioCloneUrl":"https://windows.github.com","showXcodeCloneButton":false,"xcodeCloneUrl":"xcode://clone?repo=https%3A%2F%2Fgithub.com%2Fymcui%2FChinese-LLaMA-Alpaca-2","zipballUrl":"/ymcui/Chinese-LLaMA-Alpaca-2/archive/refs/heads/main.zip"}},"newCodespacePath":"/codespaces/new?hide_repo_select=true\u0026repo=668052031"},"popovers":{"rename":null,"renamedParentRepo":null},"commitCount":"264","overviewFiles":[{"displayName":"README.md","repoName":"Chinese-LLaMA-Alpaca-2","refName":"main","path":"README.md","preferredFileType":"readme","tabName":"README","richText":"\u003carticle class=\"markdown-body entry-content container-lg\" itemprop=\"text\"\u003e\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch1 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-3\"\u003eChinese-LLaMA-Alpaca-3\u003c/a\u003e项目启动!\u003c/h1\u003e\u003ca id=\"user-content-chinese-llama-alpaca-3项目启动\" class=\"anchor\" aria-label=\"Permalink: Chinese-LLaMA-Alpaca-3项目启动!\" href=\"#chinese-llama-alpaca-3项目启动\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e\u003ca href=\"/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README.md\"\u003e\u003cstrong\u003e🇨🇳中文\u003c/strong\u003e\u003c/a\u003e | \u003ca href=\"/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README_EN.md\"\u003e\u003cstrong\u003e🌐English\u003c/strong\u003e\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki\"\u003e\u003cstrong\u003e📖文档/Docs\u003c/strong\u003e\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/issues\"\u003e\u003cstrong\u003e❓提问/Issues\u003c/strong\u003e\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/discussions\"\u003e\u003cstrong\u003e💬讨论/Discussions\u003c/strong\u003e\u003c/a\u003e | \u003ca href=\"http://llm-arena.ymcui.com/\" rel=\"nofollow\"\u003e\u003cstrong\u003e⚔️竞技场/Arena\u003c/strong\u003e\u003c/a\u003e\u003c/p\u003e\n\u003cp align=\"center\" dir=\"auto\"\u003e\n \u003cbr\u003e\n \u003ca target=\"_blank\" rel=\"noopener noreferrer\" href=\"/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/pics/banner.png\"\u003e\u003cimg src=\"/ymcui/Chinese-LLaMA-Alpaca-2/raw/main/pics/banner.png\" width=\"800\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n \u003cbr\u003e\n\u003c/p\u003e\n\u003cp align=\"center\" dir=\"auto\"\u003e\n \u003ca target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https://camo.githubusercontent.com/31cd0ec9b466a08fc3b292763f2130bb9367df59508160e848f3a17bd058e595/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c6963656e73652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d322e7376673f636f6c6f723d626c7565267374796c653d666c61742d737175617265\"\u003e\u003cimg alt=\"GitHub\" src=\"https://camo.githubusercontent.com/31cd0ec9b466a08fc3b292763f2130bb9367df59508160e848f3a17bd058e595/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c6963656e73652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d322e7376673f636f6c6f723d626c7565267374796c653d666c61742d737175617265\" data-canonical-src=\"https://img.shields.io/github/license/ymcui/Chinese-LLaMA-Alpaca-2.svg?color=blue\u0026amp;style=flat-square\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n \u003ca target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https://camo.githubusercontent.com/24dcc1a4b8254cbc773e6f7a3860e3481282ffc8f863f09ea0766cb580782d6f/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f762f72656c656173652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32\"\u003e\u003cimg alt=\"GitHub release (latest by date)\" src=\"https://camo.githubusercontent.com/24dcc1a4b8254cbc773e6f7a3860e3481282ffc8f863f09ea0766cb580782d6f/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f762f72656c656173652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32\" data-canonical-src=\"https://img.shields.io/github/v/release/ymcui/Chinese-LLaMA-Alpaca-2\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n \u003ca target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https://camo.githubusercontent.com/ae50bb200144d74e4430372f7bd93305547d17775caaab21b3e8aa541b157d20/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c616e6775616765732f746f702f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32\"\u003e\u003cimg alt=\"GitHub top language\" src=\"https://camo.githubusercontent.com/ae50bb200144d74e4430372f7bd93305547d17775caaab21b3e8aa541b157d20/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c616e6775616765732f746f702f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32\" data-canonical-src=\"https://img.shields.io/github/languages/top/ymcui/Chinese-LLaMA-Alpaca-2\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n \u003ca href=\"https://app.codacy.com/gh/ymcui/Chinese-LLaMA-Alpaca-2/dashboard?utm_source=gh\u0026amp;utm_medium=referral\u0026amp;utm_content=\u0026amp;utm_campaign=Badge_grade\" rel=\"nofollow\"\u003e\u003cimg src=\"https://camo.githubusercontent.com/805d9d39942744f73da9db6bfc252b112e3f52673a42cdc0491db3bed02f524f/68747470733a2f2f6170702e636f646163792e636f6d2f70726f6a6563742f62616467652f47726164652f3137313066616163356536333461636161626663323662306137373863646465\" data-canonical-src=\"https://app.codacy.com/project/badge/Grade/1710faac5e634acaabfc26b0a778cdde\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e本项目基于Meta发布的可商用大模型\u003ca href=\"https://github.com/facebookresearch/llama\"\u003eLlama-2\u003c/a\u003e开发,是\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e中文LLaMA\u0026amp;Alpaca大模型\u003c/a\u003e的第二期项目,开源了\u003cstrong\u003e中文LLaMA-2基座模型和Alpaca-2指令精调大模型\u003c/strong\u003e。这些模型\u003cstrong\u003e在原版Llama-2的基础上扩充并优化了中文词表\u003c/strong\u003e,使用了大规模中文数据进行增量预训练,进一步提升了中文基础语义和指令理解能力,相比一代相关模型获得了显著性能提升。相关模型\u003cstrong\u003e支持FlashAttention-2训练\u003c/strong\u003e。标准版模型支持4K上下文长度,\u003cstrong\u003e长上下文版模型支持16K、64k上下文长度\u003c/strong\u003e。\u003cstrong\u003eRLHF系列模型\u003c/strong\u003e为标准版模型基础上进行人类偏好对齐精调,相比标准版模型在\u003cstrong\u003e正确价值观体现\u003c/strong\u003e方面获得了显著性能提升。\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e本项目主要内容\u003c/h4\u003e\u003ca id=\"user-content-本项目主要内容\" class=\"anchor\" aria-label=\"Permalink: 本项目主要内容\" href=\"#本项目主要内容\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e🚀 针对Llama-2模型扩充了\u003cstrong\u003e新版中文词表\u003c/strong\u003e,开源了中文LLaMA-2和Alpaca-2大模型\u003c/li\u003e\n\u003cli\u003e🚀 开源了预训练脚本、指令精调脚本,用户可根据需要进一步训练模型\u003c/li\u003e\n\u003cli\u003e🚀 使用个人电脑的CPU/GPU快速在本地进行大模型量化和部署体验\u003c/li\u003e\n\u003cli\u003e🚀 支持\u003ca href=\"https://github.com/huggingface/transformers\"\u003e🤗transformers\u003c/a\u003e, \u003ca href=\"https://github.com/ggerganov/llama.cpp\"\u003ellama.cpp\u003c/a\u003e, \u003ca href=\"https://github.com/oobabooga/text-generation-webui\"\u003etext-generation-webui\u003c/a\u003e, \u003ca href=\"https://github.com/hwchase17/langchain\"\u003eLangChain\u003c/a\u003e, \u003ca href=\"https://github.com/imartinez/privateGPT\"\u003eprivateGPT\u003c/a\u003e, \u003ca href=\"https://github.com/vllm-project/vllm\"\u003evLLM\u003c/a\u003e等LLaMA生态\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e已开源的模型\u003c/h4\u003e\u003ca id=\"user-content-已开源的模型\" class=\"anchor\" aria-label=\"Permalink: 已开源的模型\" href=\"#已开源的模型\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e基座模型(4K上下文):Chinese-LLaMA-2 (1.3B, 7B, 13B)\u003c/li\u003e\n\u003cli\u003e聊天模型(4K上下文):Chinese-Alpaca-2 (1.3B, 7B, 13B)\u003c/li\u003e\n\u003cli\u003e长上下文模型(16K/64K):\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003eChinese-LLaMA-2-16K (7B, 13B) 、Chinese-Alpaca-2-16K (7B, 13B)\u003c/li\u003e\n\u003cli\u003eChinese-LLaMA-2-64K (7B)、Chinese-Alpaca-2-64K (7B)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e偏好对齐模型:Chinese-Alpaca-2-RLHF (1.3B, 7B)\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp dir=\"auto\"\u003e\u003ca target=\"_blank\" rel=\"noopener noreferrer\" href=\"/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/pics/screencast.gif\"\u003e\u003cimg src=\"/ymcui/Chinese-LLaMA-Alpaca-2/raw/main/pics/screencast.gif\" alt=\"\" data-animated-image=\"\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\u003c/p\u003e\n\u003chr\u003e\n\u003cp dir=\"auto\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e中文LLaMA\u0026amp;Alpaca大模型\u003c/a\u003e | \u003ca href=\"https://github.com/airaria/Visual-Chinese-LLaMA-Alpaca\"\u003e多模态中文LLaMA\u0026amp;Alpaca大模型\u003c/a\u003e | \u003ca href=\"https://github.com/iflytek/VLE\"\u003e多模态VLE\u003c/a\u003e | \u003ca href=\"https://github.com/iflytek/MiniRBT\"\u003e中文MiniRBT\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/LERT\"\u003e中文LERT\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/PERT\"\u003e中英文PERT\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/MacBERT\"\u003e中文MacBERT\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/Chinese-ELECTRA\"\u003e中文ELECTRA\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/Chinese-XLNet\"\u003e中文XLNet\u003c/a\u003e | \u003ca href=\"https://github.com/ymcui/Chinese-BERT-wwm\"\u003e中文BERT\u003c/a\u003e | \u003ca href=\"https://github.com/airaria/TextBrewer\"\u003e知识蒸馏工具TextBrewer\u003c/a\u003e | \u003ca href=\"https://github.com/airaria/TextPruner\"\u003e模型裁剪工具TextPruner\u003c/a\u003e | \u003ca href=\"https://github.com/airaria/GRAIN\"\u003e蒸馏裁剪一体化GRAIN\u003c/a\u003e\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e新闻\u003c/h2\u003e\u003ca id=\"user-content-新闻\" class=\"anchor\" aria-label=\"Permalink: 新闻\" href=\"#新闻\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e\u003cstrong\u003e[2024/04/30] Chinese-LLaMA-Alpaca-3 已正式发布,开源基于Llama-3的Llama-3-Chinese-8B和Llama-3-Chinese-8B-Instruct,推荐所有一期、二期项目用户升级至三代模型,请参阅:\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-3\"\u003ehttps://github.com/ymcui/Chinese-LLaMA-Alpaca-3\u003c/a\u003e\u003c/strong\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2024/03/27] 本项目已入驻机器之心SOTA!模型平台,欢迎关注:\u003ca href=\"https://sota.jiqizhixin.com/project/chinese-llama-alpaca-2\" rel=\"nofollow\"\u003ehttps://sota.jiqizhixin.com/project/chinese-llama-alpaca-2\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2024/01/23] 添加新版GGUF模型(imatrix量化)、AWQ量化模型,支持vLLM下加载YaRN长上下文模型。详情查看\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v4.1\"\u003e📚 v4.1版本发布日志\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2023/12/29] 发布长上下文模型Chinese-LLaMA-2-7B-64K和Chinese-Alpaca-2-7B-64K,同时发布经过人类偏好对齐(RLHF)的Chinese-Alpaca-2-RLHF(1.3B/7B)。详情查看\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v4.0\"\u003e📚 v4.0版本发布日志\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2023/09/01] 发布长上下文模型Chinese-Alpaca-2-7B-16K和Chinese-Alpaca-2-13B-16K,该模型可直接应用于下游任务,例如privateGPT等。详情查看\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v3.1\"\u003e📚 v3.1版本发布日志\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2023/08/25] 发布长上下文模型Chinese-LLaMA-2-7B-16K和Chinese-LLaMA-2-13B-16K,支持16K上下文,并可通过NTK方法进一步扩展至24K+。详情查看\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v3.0\"\u003e📚 v3.0版本发布日志\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2023/08/14] 发布Chinese-LLaMA-2-13B和Chinese-Alpaca-2-13B,添加text-generation-webui/LangChain/privateGPT支持,添加CFG Sampling解码方法等。详情查看\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v2.0\"\u003e📚 v2.0版本发布日志\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2023/08/02] 添加FlashAttention-2训练支持,基于vLLM的推理加速支持,提供长回复系统提示语模板等。详情查看\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v1.1\"\u003e📚 v1.1版本发布日志\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2023/07/31] 正式发布Chinese-LLaMA-2-7B(基座模型),使用120G中文语料增量训练(与一代Plus系列相同);进一步通过5M条指令数据精调(相比一代略微增加),得到Chinese-Alpaca-2-7B(指令/chat模型)。详情查看\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v1.0\"\u003e📚 v1.0版本发布日志\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e[2023/07/19] 🚀启动\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2\"\u003e中文LLaMA-2、Alpaca-2开源大模型项目\u003c/a\u003e\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e内容导引\u003c/h2\u003e\u003ca id=\"user-content-内容导引\" class=\"anchor\" aria-label=\"Permalink: 内容导引\" href=\"#内容导引\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003e章节\u003c/th\u003e\n\u003cth\u003e描述\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"#%E6%A8%A1%E5%9E%8B%E7%AE%80%E4%BB%8B\"\u003e💁🏻‍♂️模型简介\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e简要介绍本项目相关模型的技术特点\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"#%E6%A8%A1%E5%9E%8B%E4%B8%8B%E8%BD%BD\"\u003e⏬模型下载\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e中文LLaMA-2、Alpaca-2大模型下载地址\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"#%E6%8E%A8%E7%90%86%E4%B8%8E%E9%83%A8%E7%BD%B2\"\u003e💻推理与部署\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e介绍了如何对模型进行量化并使用个人电脑部署并体验大模型\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"#%E7%B3%BB%E7%BB%9F%E6%95%88%E6%9E%9C\"\u003e💯系统效果\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e介绍了模型在部分任务上的效果\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"#%E8%AE%AD%E7%BB%83%E4%B8%8E%E7%B2%BE%E8%B0%83\"\u003e📝训练与精调\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e介绍了如何训练和精调中文LLaMA-2、Alpaca-2大模型\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"#%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98\"\u003e❓常见问题\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e一些常见问题的回复\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e模型简介\u003c/h2\u003e\u003ca id=\"user-content-模型简介\" class=\"anchor\" aria-label=\"Permalink: 模型简介\" href=\"#模型简介\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e本项目推出了基于Llama-2的中文LLaMA-2以及Alpaca-2系列模型,相比\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e一期项目\u003c/a\u003e其主要特点如下:\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e📖 经过优化的中文词表\u003c/h4\u003e\u003ca id=\"user-content--经过优化的中文词表\" class=\"anchor\" aria-label=\"Permalink: 📖 经过优化的中文词表\" href=\"#-经过优化的中文词表\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e一期项目\u003c/a\u003e中,我们针对一代LLaMA模型的32K词表扩展了中文字词(LLaMA:49953,Alpaca:49954)\u003c/li\u003e\n\u003cli\u003e在本项目中,我们\u003cstrong\u003e重新设计了新词表\u003c/strong\u003e(大小:55296),进一步提升了中文字词的覆盖程度,同时统一了LLaMA/Alpaca的词表,避免了因混用词表带来的问题,以期进一步提升模型对中文文本的编解码效率\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e⚡ 基于FlashAttention-2的高效注意力\u003c/h4\u003e\u003ca id=\"user-content--基于flashattention-2的高效注意力\" class=\"anchor\" aria-label=\"Permalink: ⚡ 基于FlashAttention-2的高效注意力\" href=\"#-基于flashattention-2的高效注意力\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e\u003ca href=\"https://github.com/Dao-AILab/flash-attention\"\u003eFlashAttention-2\u003c/a\u003e是高效注意力机制的一种实现,相比其一代技术具有\u003cstrong\u003e更快的速度和更优化的显存占用\u003c/strong\u003e\u003c/li\u003e\n\u003cli\u003e当上下文长度更长时,为了避免显存爆炸式的增长,使用此类高效注意力技术尤为重要\u003c/li\u003e\n\u003cli\u003e本项目的所有模型均使用了FlashAttention-2技术进行训练\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e🚄 基于PI和YaRN的超长上下文扩展技术\u003c/h4\u003e\u003ca id=\"user-content--基于pi和yarn的超长上下文扩展技术\" class=\"anchor\" aria-label=\"Permalink: 🚄 基于PI和YaRN的超长上下文扩展技术\" href=\"#-基于pi和yarn的超长上下文扩展技术\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e一期项目\u003c/a\u003e中,我们实现了\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca/pull/743\" data-hovercard-type=\"pull_request\" data-hovercard-url=\"/ymcui/Chinese-LLaMA-Alpaca/pull/743/hovercard\"\u003e基于NTK的上下文扩展技术\u003c/a\u003e,可在不继续训练模型的情况下支持更长的上下文\u003c/li\u003e\n\u003cli\u003e基于\u003ca href=\"https://arxiv.org/abs/2306.15595\" rel=\"nofollow\"\u003e位置插值PI\u003c/a\u003e和NTK等方法推出了16K长上下文版模型,支持16K上下文,并可通过NTK方法最高扩展至24K-32K\u003c/li\u003e\n\u003cli\u003e基于\u003ca href=\"https://arxiv.org/abs/2309.00071\" rel=\"nofollow\"\u003eYaRN\u003c/a\u003e方法进一步推出了64K长上下文版模型,支持64K上下文\u003c/li\u003e\n\u003cli\u003e进一步设计了\u003cstrong\u003e方便的自适应经验公式\u003c/strong\u003e,无需针对不同的上下文长度设置NTK超参,降低了使用难度\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e🤖 简化的中英双语系统提示语\u003c/h4\u003e\u003ca id=\"user-content--简化的中英双语系统提示语\" class=\"anchor\" aria-label=\"Permalink: 🤖 简化的中英双语系统提示语\" href=\"#-简化的中英双语系统提示语\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e一期项目\u003c/a\u003e中,中文Alpaca系列模型使用了\u003ca href=\"https://github.com/tatsu-lab/stanford_alpaca\"\u003eStanford Alpaca\u003c/a\u003e的指令模板和系统提示语\u003c/li\u003e\n\u003cli\u003e初步实验发现,Llama-2-Chat系列模型的默认系统提示语未能带来统计显著的性能提升,且其内容过于冗长\u003c/li\u003e\n\u003cli\u003e本项目中的Alpaca-2系列模型简化了系统提示语,同时遵循Llama-2-Chat指令模板,以便更好地适配相关生态\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e👮 人类偏好对齐\u003c/h4\u003e\u003ca id=\"user-content--人类偏好对齐\" class=\"anchor\" aria-label=\"Permalink: 👮 人类偏好对齐\" href=\"#-人类偏好对齐\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e一期项目\u003c/a\u003e中,中文Alpaca系列模型仅完成预训练和指令精调,获得了基本的对话能力\u003c/li\u003e\n\u003cli\u003e通过基于人类反馈的强化学习(RLHF)实验,发现可显著提升模型传递正确价值观的能力\u003c/li\u003e\n\u003cli\u003e本项目推出了Alpaca-2-RLHF系列模型,使用方式与SFT模型一致\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp dir=\"auto\"\u003e下图展示了本项目以及\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003e一期项目\u003c/a\u003e推出的所有大模型之间的关系。\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e\u003ca target=\"_blank\" rel=\"noopener noreferrer\" href=\"/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/pics/models.png\"\u003e\u003cimg src=\"/ymcui/Chinese-LLaMA-Alpaca-2/raw/main/pics/models.png\" alt=\"\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e模型下载\u003c/h2\u003e\u003ca id=\"user-content-模型下载\" class=\"anchor\" aria-label=\"Permalink: 模型下载\" href=\"#模型下载\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e模型选择指引\u003c/h3\u003e\u003ca id=\"user-content-模型选择指引\" class=\"anchor\" aria-label=\"Permalink: 模型选择指引\" href=\"#模型选择指引\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e以下是中文LLaMA-2和Alpaca-2模型的对比以及建议使用场景。\u003cstrong\u003e如需聊天交互,请选择Alpaca而不是LLaMA。\u003c/strong\u003e\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e对比项\u003c/th\u003e\n\u003cth align=\"center\"\u003e中文LLaMA-2\u003c/th\u003e\n\u003cth align=\"center\"\u003e中文Alpaca-2\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e模型类型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003cstrong\u003e基座模型\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003cstrong\u003e指令/Chat模型(类ChatGPT)\u003c/strong\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e已开源大小\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.3B、7B、13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.3B、7B、13B\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e训练类型\u003c/td\u003e\n\u003ctd align=\"center\"\u003eCausal-LM (CLM)\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令精调\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e训练方式\u003c/td\u003e\n\u003ctd align=\"center\"\u003e7B、13B:LoRA + 全量emb/lm-head\u003cbr\u003e1.3B:全量\u003c/td\u003e\n\u003ctd align=\"center\"\u003e7B、13B:LoRA + 全量emb/lm-head\u003cbr\u003e1.3B:全量\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e基于什么模型训练\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://github.com/facebookresearch/llama\"\u003e原版Llama-2\u003c/a\u003e(非chat版)\u003c/td\u003e\n\u003ctd align=\"center\"\u003e中文LLaMA-2\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e训练语料\u003c/td\u003e\n\u003ctd align=\"center\"\u003e无标注通用语料(120G纯文本)\u003c/td\u003e\n\u003ctd align=\"center\"\u003e有标注指令数据(500万条)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e词表大小\u003csup\u003e[1]\u003c/sup\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e55,296\u003c/td\u003e\n\u003ctd align=\"center\"\u003e55,296\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e上下文长度\u003csup\u003e[2]\u003c/sup\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e标准版:4K(12K-18K)\u003cbr\u003e长上下文版(PI):16K(24K-32K)\u003cbr\u003e长上下文版(YaRN):64K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e标准版:4K(12K-18K)\u003cbr\u003e长上下文版(PI):16K(24K-32K)\u003cbr\u003e长上下文版(YaRN):64K\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e输入模板\u003c/td\u003e\n\u003ctd align=\"center\"\u003e不需要\u003c/td\u003e\n\u003ctd align=\"center\"\u003e需要套用特定模板\u003csup\u003e[3]\u003c/sup\u003e,类似Llama-2-Chat\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e适用场景\u003c/td\u003e\n\u003ctd align=\"center\"\u003e文本续写:给定上文,让模型生成下文\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令理解:问答、写作、聊天、交互等\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e不适用场景\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令理解 、多轮聊天等\u003c/td\u003e\n\u003ctd align=\"center\"\u003e文本无限制自由生成\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e偏好对齐\u003c/td\u003e\n\u003ctd align=\"center\"\u003e无\u003c/td\u003e\n\u003ctd align=\"center\"\u003eRLHF版本(1.3B、7B)\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-alert markdown-alert-note\" dir=\"auto\"\u003e\u003cp class=\"markdown-alert-title\" dir=\"auto\"\u003e\u003csvg class=\"octicon octicon-info mr-2\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"M0 8a8 8 0 1 1 16 0A8 8 0 0 1 0 8Zm8-6.5a6.5 6.5 0 1 0 0 13 6.5 6.5 0 0 0 0-13ZM6.5 7.75A.75.75 0 0 1 7.25 7h1a.75.75 0 0 1 .75.75v2.75h.25a.75.75 0 0 1 0 1.5h-2a.75.75 0 0 1 0-1.5h.25v-2h-.25a.75.75 0 0 1-.75-.75ZM8 6a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z\"\u003e\u003c/path\u003e\u003c/svg\u003eNote\u003c/p\u003e\u003cp dir=\"auto\"\u003e[1] \u003cem\u003e本项目一代模型和二代模型的词表不同,请勿混用。二代LLaMA和Alpaca的词表相同。\u003c/em\u003e\u003cbr\u003e\n[2] \u003cem\u003e括号内表示基于NTK上下文扩展支持的最大长度。\u003c/em\u003e\u003cbr\u003e\n[3] \u003cem\u003eAlpaca-2采用了Llama-2-chat系列模板(格式相同,提示语不同),而不是一代Alpaca的模板,请勿混用。\u003c/em\u003e\u003cbr\u003e\n[4] \u003cem\u003e不建议单独使用1.3B模型,而是通过投机采样搭配更大的模型(7B、13B)使用。\u003c/em\u003e\u003cbr\u003e\u003c/p\u003e\n\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e完整模型下载\u003c/h3\u003e\u003ca id=\"user-content-完整模型下载\" class=\"anchor\" aria-label=\"Permalink: 完整模型下载\" href=\"#完整模型下载\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e以下是完整版模型,直接下载即可使用,无需其他合并步骤。推荐网络带宽充足的用户。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e模型名称\u003c/th\u003e\n\u003cth align=\"center\"\u003e类型\u003c/th\u003e\n\u003cth align=\"center\"\u003e大小\u003c/th\u003e\n\u003cth align=\"center\"\u003e下载地址\u003c/th\u003e\n\u003cth align=\"center\"\u003eGGUF\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e24.7 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1T3RqEUSmyg6ZuBwMhwSmoQ?pwd=e9qy\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1YNa5qJ0x59OEOI7tNODxea-1YvMPoH05?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-13b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-13b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-13b-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1E5NI3nlQpx1j8z3eIzbIlg?pwd=n8k3\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/18pp4I-mvQxRA7b8vF9gP-2cH_ocnXVKh?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-7b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-7b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-7b-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e2.4 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1hEuOCllnJJ5NMEZJf8OkRw?pwd=nwjg\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1Sd3PA_gs6JctXtBg5HwmHXh9GX93riMP?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-1.3b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-1.3b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-1.3b-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e24.7 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1MT_Zlap1OtdYMgoBNTS3dg?pwd=9xja\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1MTsKlzR61xmbTR4hBWzQas_MOpUZsogN?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-13b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-13b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-13b-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1wxx-CdgbMupXVRBcaN4Slw?pwd=kpn9\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1JsJDVs7tE2y31PBNleBlDPsB7S0ZrY8d?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e2.4 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1PD7Ng-ltOIdUGHNorveptA?pwd=ar1p\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1h6qOy-Unvqs1_CJ8uPp0eKC61Gbbn8n7?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-1.3b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-1.3b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-1.3b-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e长上下文版模型\u003c/h4\u003e\u003ca id=\"user-content-长上下文版模型\" class=\"anchor\" aria-label=\"Permalink: 长上下文版模型\" href=\"#长上下文版模型\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e以下是长上下文版模型,\u003cstrong\u003e推荐以长文本为主的下游任务使用\u003c/strong\u003e,否则建议使用上述标准版。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e模型名称\u003c/th\u003e\n\u003cth align=\"center\"\u003e类型\u003c/th\u003e\n\u003cth align=\"center\"\u003e大小\u003c/th\u003e\n\u003cth align=\"center\"\u003e下载地址\u003c/th\u003e\n\u003cth align=\"center\"\u003eGGUF\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-7B-64K 🆕\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1ShDQ2FG2QUJrvfnxCn4hwQ?pwd=xe5k\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/17l9xJx55L2YNpqt7NiLVQzOZ6fV4rzJ-?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-7b-64k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-7b-64k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-7b-64k-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-7B-64K 🆕\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1KBAr9PCGvX2oQkYfCuLEjw?pwd=sgp6\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/13G_d5xcDnhtaMOaulj1BFiZbVoVwJ-Cu?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b-64k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b-64k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b-64k-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-13B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e24.7 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1XWrh3Ru9x4UI4-XmocVT2w?pwd=f7ik\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1nii6lF0DgB1u81CnsE4cCK2jD5oq_OW-?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-13b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-13b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-13b-16k-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-7B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1ZH7T7KU_up61ugarSIXw2g?pwd=pquq\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1Zc6jI5bl3myQbQsY79dWJJ8mP_fyf3iF?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-7b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-7b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-7b-16k-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-13B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e24.7 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1gIzRM1eg-Xx1xV-3nXW27A?pwd=qi7c\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1mOkYQCvEqtGoZ9DaIpYFweSkSia2Q0vl?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-13b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-13b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-13b-16k-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-7B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1Qk3U1LyvMb1RSr5AbiatPw?pwd=bfis\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1KBRSd2xAhiVQmamfA5wpm5ovYFRKuMdr?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b-16k-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eRLHF版模型\u003c/h4\u003e\u003ca id=\"user-content-rlhf版模型\" class=\"anchor\" aria-label=\"Permalink: RLHF版模型\" href=\"#rlhf版模型\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e以下是人类偏好对齐版模型,对涉及法律、道德的问题较标准版有更优的价值导向。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e模型名称\u003c/th\u003e\n\u003cth align=\"center\"\u003e类型\u003c/th\u003e\n\u003cth align=\"center\"\u003e大小\u003c/th\u003e\n\u003cth align=\"center\"\u003e下载地址\u003c/th\u003e\n\u003cth align=\"center\"\u003eGGUF\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-7B-RLHF 🆕\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/17GJ1y4rpPDuvWlvPaWgnqw?pwd=4feb\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1OHZVVtwM5McVEIZzyOYgGYLAxcZNVK4D?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b-rlhf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b-rlhf\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-7b-rlhf-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-1.3B-RLHF 🆕\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e2.4 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1cLKJKieNitWbOggUXXaamw?pwd=cprp\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/drive/folders/1zcvPUPPkq69SgqRu6YBurAZ9ptcPSZNx?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-1.3b-rlhf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-1.3b-rlhf\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-1.3b-rlhf-gguf\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eAWQ版模型\u003c/h4\u003e\u003ca id=\"user-content-awq版模型\" class=\"anchor\" aria-label=\"Permalink: AWQ版模型\" href=\"#awq版模型\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eAWQ(Activation-aware Weight Quantization)是一种高效的模型量化方案,目前可兼容🤗transformers、llama.cpp等主流框架。\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e本项目模型的AWQ预搜索结果可通过以下链接获取:\u003ca href=\"https://huggingface.co/hfl/chinese-llama-alpaca-2-awq\" rel=\"nofollow\"\u003ehttps://huggingface.co/hfl/chinese-llama-alpaca-2-awq\u003c/a\u003e\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e生成AWQ量化模型(AWQ官方目录):\u003ca href=\"https://github.com/mit-han-lab/llm-awq\"\u003ehttps://github.com/mit-han-lab/llm-awq\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003ellama.cpp中使用AWQ:\u003ca href=\"https://github.com/ggerganov/llama.cpp/tree/master/awq-py\"\u003ehttps://github.com/ggerganov/llama.cpp/tree/master/awq-py\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eLoRA模型下载\u003c/h3\u003e\u003ca id=\"user-content-lora模型下载\" class=\"anchor\" aria-label=\"Permalink: LoRA模型下载\" href=\"#lora模型下载\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e以下是LoRA模型(含emb/lm-head),与上述完整模型一一对应。需要注意的是\u003cstrong\u003eLoRA模型无法直接使用\u003c/strong\u003e,必须按照教程与重构模型进行合并。推荐网络带宽不足,手头有原版Llama-2且需要轻量下载的用户。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e模型名称\u003c/th\u003e\n\u003cth align=\"center\"\u003e类型\u003c/th\u003e\n\u003cth align=\"center\"\u003e合并所需基模型\u003c/th\u003e\n\u003cth align=\"center\"\u003e大小\u003c/th\u003e\n\u003cth align=\"center\"\u003eLoRA下载地址\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-LoRA-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-13b-hf\" rel=\"nofollow\"\u003eLlama-2-13B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.5 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1PFKTBn54GjAjzWeQISKruw?pwd=we6s\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/10Z_k9A9N9D_6RHrMTmbHQRCuI6s1iMb1/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-lora-13b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-13b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-LoRA-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-7b-hf\" rel=\"nofollow\"\u003eLlama-2-7B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.1 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1bmgqdyRh9E3a2uqOGyNqiQ?pwd=7kvq\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1njJGSU_PRbzjYRNw5RSbC5-4fBOXTVY3/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-lora-7b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-7b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-LoRA-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-13b-hf\" rel=\"nofollow\"\u003eLlama-2-13B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.5 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1Y5giIXOUUzI4Na6JOcviVA?pwd=tc2j\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1z2FIInsYJBTXipgztc-Mv7kkeqscx442/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-lora-13b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-13b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-LoRA-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-7b-hf\" rel=\"nofollow\"\u003eLlama-2-7B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.1 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1g0olPxkB_rlZ9UUVfOnbcw?pwd=5e7w\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1MzJL-ZIzdJW7MIcAiYIDIDJ5dlMi8Kkk/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-lora-7b\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-7b\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cp dir=\"auto\"\u003e以下是长上下文版模型,\u003cstrong\u003e推荐以长文本为主的下游任务使用\u003c/strong\u003e,否则建议使用上述标准版。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e模型名称\u003c/th\u003e\n\u003cth align=\"center\"\u003e类型\u003c/th\u003e\n\u003cth align=\"center\"\u003e合并所需基模型\u003c/th\u003e\n\u003cth align=\"center\"\u003e大小\u003c/th\u003e\n\u003cth align=\"center\"\u003eLoRA下载地址\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-LoRA-7B-64K 🆕\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-7b-hf\" rel=\"nofollow\"\u003eLlama-2-7B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.1 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1QjqKNM9Xez5g6koUrbII_w?pwd=94pk\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1-NuGqfduUZARRquFjGLpTmI5J-HlXYSR/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-lora-7b-64k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-7b-64k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-LoRA-7B-64K 🆕\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-7b-hf\" rel=\"nofollow\"\u003eLlama-2-7B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.1 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1t6bPpMlJCrs9Ce7LXs09-w?pwd=37it\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1qESorx2PHtIsnj53JJ7XBsdOGHuLNjoI/view?usp=sharing\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-lora-7b-64k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-7b-64k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-LoRA-13B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-13b-hf\" rel=\"nofollow\"\u003eLlama-2-13B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.5 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1VrfOJmhDnXxrXcdnfX00fA?pwd=4t2j\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1mSpigmHcN9YX1spa4QN3IPtx43Vfs55H/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-lora-13b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-13b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-LoRA-7B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e基座模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-7b-hf\" rel=\"nofollow\"\u003eLlama-2-7B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.1 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/14Jnm7QmcDx3XsK_NHZz6Uw?pwd=5b7i\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1yUdyQuBMAmxmUEAvGiKbjKuxTYPPI-or/view?usp=sharing\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-llama-2-lora-7b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-7b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-LoRA-13B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-13b-hf\" rel=\"nofollow\"\u003eLlama-2-13B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.5 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1g42_X7Z0QWDyrrDqv2jifQ?pwd=bq7n\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1ppGNyMWnuLDcClXN7DBTbKxVehsn3Gd2/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-lora-13b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-13b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-LoRA-7B-16K\u003c/td\u003e\n\u003ctd align=\"center\"\u003e指令模型\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://huggingface.co/meta-llama/Llama-2-7b-hf\" rel=\"nofollow\"\u003eLlama-2-7B-hf\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1.1 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://pan.baidu.com/s/1E7GEZ6stp8EavhkhR06FwA?pwd=ewwy\" rel=\"nofollow\"\u003e[Baidu]\u003c/a\u003e \u003ca href=\"https://drive.google.com/file/d/1GTgDNfMdcQhHEAfMPaP-EOEk_fwDvNEK/view?usp=share_link\" rel=\"nofollow\"\u003e[Google]\u003c/a\u003e \u003cbr\u003e\u003ca href=\"https://huggingface.co/hfl/chinese-alpaca-2-lora-7b-16k\" rel=\"nofollow\"\u003e[🤗HF]\u003c/a\u003e \u003ca href=\"https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-7b-16k\" rel=\"nofollow\"\u003e[🤖ModelScope]\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-alert markdown-alert-important\" dir=\"auto\"\u003e\u003cp class=\"markdown-alert-title\" dir=\"auto\"\u003e\u003csvg class=\"octicon octicon-report mr-2\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"M0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v9.5A1.75 1.75 0 0 1 14.25 13H8.06l-2.573 2.573A1.458 1.458 0 0 1 3 14.543V13H1.75A1.75 1.75 0 0 1 0 11.25Zm1.75-.25a.25.25 0 0 0-.25.25v9.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h6.5a.25.25 0 0 0 .25-.25v-9.5a.25.25 0 0 0-.25-.25Zm7 2.25v2.5a.75.75 0 0 1-1.5 0v-2.5a.75.75 0 0 1 1.5 0ZM9 9a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z\"\u003e\u003c/path\u003e\u003c/svg\u003eImportant\u003c/p\u003e\u003cp dir=\"auto\"\u003eLoRA模型无法单独使用,必须与原版Llama-2进行合并才能转为完整模型。请通过以下方法对模型进行合并。\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/online_conversion_zh\"\u003e\u003cstrong\u003e在线转换\u003c/strong\u003e\u003c/a\u003e:Colab用户可利用本项目提供的notebook进行在线转换并量化模型\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/manual_conversion_zh\"\u003e\u003cstrong\u003e手动转换\u003c/strong\u003e\u003c/a\u003e:离线方式转换,生成不同格式的模型,以便进行量化或进一步精调\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e推理与部署\u003c/h2\u003e\u003ca id=\"user-content-推理与部署\" class=\"anchor\" aria-label=\"Permalink: 推理与部署\" href=\"#推理与部署\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e本项目中的相关模型主要支持以下量化、推理和部署方式,具体内容请参考对应教程。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e工具\u003c/th\u003e\n\u003cth\u003e特点\u003c/th\u003e\n\u003cth align=\"center\"\u003eCPU\u003c/th\u003e\n\u003cth align=\"center\"\u003eGPU\u003c/th\u003e\n\u003cth align=\"center\"\u003e量化\u003c/th\u003e\n\u003cth align=\"center\"\u003eGUI\u003c/th\u003e\n\u003cth align=\"center\"\u003eAPI\u003c/th\u003e\n\u003cth align=\"center\"\u003evLLM\u003csup\u003e§\u003c/sup\u003e\u003c/th\u003e\n\u003cth align=\"center\"\u003e16K\u003csup\u003e‡\u003c/sup\u003e\u003c/th\u003e\n\u003cth align=\"center\"\u003e64K\u003csup\u003e‡\u003c/sup\u003e\u003c/th\u003e\n\u003cth align=\"center\"\u003e投机采样\u003c/th\u003e\n\u003cth align=\"center\"\u003e教程\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e\u003ca href=\"https://github.com/ggerganov/llama.cpp\"\u003e\u003cstrong\u003ellama.cpp\u003c/strong\u003e\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e丰富的量化选项和高效本地推理\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/llamacpp_zh\"\u003elink\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e\u003ca href=\"https://github.com/huggingface/transformers\"\u003e\u003cstrong\u003e🤗Transformers\u003c/strong\u003e\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e原生transformers推理接口\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/inference_with_transformers_zh\"\u003elink\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e\u003ca href=\"https://colab.research.google.com/drive/1yu0eZ3a66by8Zqm883LLtRQrguBAb9MR?usp=sharing\" rel=\"nofollow\"\u003e\u003cstrong\u003eColab Demo\u003c/strong\u003e\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e在Colab中启动交互界面\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://colab.research.google.com/drive/1yu0eZ3a66by8Zqm883LLtRQrguBAb9MR?usp=sharing\" rel=\"nofollow\"\u003elink\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e\u003ca href=\"https://platform.openai.com/docs/api-reference\" rel=\"nofollow\"\u003e\u003cstrong\u003e仿OpenAI API调用\u003c/strong\u003e\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e仿OpenAI API接口的服务器Demo\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/api_calls_zh\"\u003elink\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e\u003ca href=\"https://github.com/oobabooga/text-generation-webui\"\u003e\u003cstrong\u003etext-generation-webui\u003c/strong\u003e\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e前端Web UI界面的部署方式\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003csup\u003e†\u003c/sup\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/text-generation-webui_zh\"\u003elink\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e\u003ca href=\"https://github.com/hwchase17/langchain\"\u003e\u003cstrong\u003eLangChain\u003c/strong\u003e\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e适合二次开发的大模型应用开源框架\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003csup\u003e†\u003c/sup\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003csup\u003e†\u003c/sup\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/langchain_zh\"\u003elink\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e\u003ca href=\"https://github.com/imartinez/privateGPT\"\u003e\u003cstrong\u003eprivateGPT\u003c/strong\u003e\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003e基于LangChain的多文档本地问答框架\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e✅\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e❌\u003c/td\u003e\n\u003ctd align=\"center\"\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/privategpt_zh\"\u003elink\u003c/a\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-alert markdown-alert-note\" dir=\"auto\"\u003e\u003cp class=\"markdown-alert-title\" dir=\"auto\"\u003e\u003csvg class=\"octicon octicon-info mr-2\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"M0 8a8 8 0 1 1 16 0A8 8 0 0 1 0 8Zm8-6.5a6.5 6.5 0 1 0 0 13 6.5 6.5 0 0 0 0-13ZM6.5 7.75A.75.75 0 0 1 7.25 7h1a.75.75 0 0 1 .75.75v2.75h.25a.75.75 0 0 1 0 1.5h-2a.75.75 0 0 1 0-1.5h.25v-2h-.25a.75.75 0 0 1-.75-.75ZM8 6a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z\"\u003e\u003c/path\u003e\u003c/svg\u003eNote\u003c/p\u003e\u003cp dir=\"auto\"\u003e\u003csup\u003e†\u003c/sup\u003e 工具支持该特性,但教程中未实现,详细说明请参考对应官方文档\u003cbr\u003e\n\u003csup\u003e‡\u003c/sup\u003e 指是否支持长上下文版本模型(需要第三方库支持自定义RoPE)\u003cbr\u003e\n\u003csup\u003e§\u003c/sup\u003e vLLM后端不支持长上下文版本模型\u003cbr\u003e\u003c/p\u003e\n\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e系统效果\u003c/h2\u003e\u003ca id=\"user-content-系统效果\" class=\"anchor\" aria-label=\"Permalink: 系统效果\" href=\"#系统效果\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e为了评测相关模型的效果,本项目分别进行了生成效果评测和客观效果评测(NLU类),从不同角度对大模型进行评估。需要注意的是,综合评估大模型能力仍然是亟待解决的重要课题,单个数据集的结果并不能综合评估模型性能。推荐用户在自己关注的任务上进行测试,选择适配相关任务的模型。\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e生成效果评测\u003c/h3\u003e\u003ca id=\"user-content-生成效果评测\" class=\"anchor\" aria-label=\"Permalink: 生成效果评测\" href=\"#生成效果评测\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e为了更加直观地了解模型的生成效果,本项目仿照\u003ca href=\"https://chat.lmsys.org/?arena\" rel=\"nofollow\"\u003eFastchat Chatbot Arena\u003c/a\u003e推出了模型在线对战平台,可浏览和评测模型回复质量。对战平台提供了胜率、Elo评分等评测指标,并且可以查看两两模型的对战胜率等结果。题库来自于\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca/tree/main/examples/f16-p7b-p13b-33b\"\u003e一期项目人工制作的200题\u003c/a\u003e,以及在此基础上额外增加的题目。生成回复具有随机性,受解码超参、随机种子等因素影响,因此相关评测并非绝对严谨,结果仅供晾晒参考,欢迎自行体验。部分生成样例请查看\u003ca href=\"/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/examples\"\u003eexamples目录\u003c/a\u003e。\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e\u003cstrong\u003e⚔️ 模型竞技场:\u003ca href=\"http://llm-arena.ymcui.com/\" rel=\"nofollow\"\u003ehttp://llm-arena.ymcui.com\u003c/a\u003e\u003c/strong\u003e\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003e系统\u003c/th\u003e\n\u003cth align=\"center\"\u003e对战胜率(无平局) ↓\u003c/th\u003e\n\u003cth align=\"center\"\u003eElo评分\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-13B-16K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e86.84%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1580\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-13B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e72.01%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1579\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003eChinese-Alpaca-Pro-33B\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e64.87%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1548\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e64.11%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1572\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003eChinese-Alpaca-Pro-7B\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e62.05%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1500\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B-16K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e61.67%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1540\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003eChinese-Alpaca-Pro-13B\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e61.26%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1567\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003eChinese-Alpaca-Plus-33B\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e31.29%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1401\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003eChinese-Alpaca-Plus-13B\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e23.43%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1329\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca\"\u003eChinese-Alpaca-Plus-7B\u003c/a\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e20.92%\u003c/td\u003e\n\u003ctd align=\"center\"\u003e1379\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-alert markdown-alert-note\" dir=\"auto\"\u003e\u003cp class=\"markdown-alert-title\" dir=\"auto\"\u003e\u003csvg class=\"octicon octicon-info mr-2\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"M0 8a8 8 0 1 1 16 0A8 8 0 0 1 0 8Zm8-6.5a6.5 6.5 0 1 0 0 13 6.5 6.5 0 0 0 0-13ZM6.5 7.75A.75.75 0 0 1 7.25 7h1a.75.75 0 0 1 .75.75v2.75h.25a.75.75 0 0 1 0 1.5h-2a.75.75 0 0 1 0-1.5h.25v-2h-.25a.75.75 0 0 1-.75-.75ZM8 6a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z\"\u003e\u003c/path\u003e\u003c/svg\u003eNote\u003c/p\u003e\u003cp dir=\"auto\"\u003e以上结果截至2023年9月1日。最新结果请进入\u003ca href=\"http://llm-arena.ymcui.com/\" rel=\"nofollow\"\u003e\u003cstrong\u003e⚔️竞技场\u003c/strong\u003e\u003c/a\u003e进行查看。\u003c/p\u003e\n\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e客观效果评测:C-Eval\u003c/h3\u003e\u003ca id=\"user-content-客观效果评测c-eval\" class=\"anchor\" aria-label=\"Permalink: 客观效果评测:C-Eval\" href=\"#客观效果评测c-eval\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e\u003ca href=\"https://cevalbenchmark.com\" rel=\"nofollow\"\u003eC-Eval\u003c/a\u003e是一个全面的中文基础模型评估套件,其中验证集和测试集分别包含1.3K和12.3K个选择题,涵盖52个学科。实验结果以“zero-shot / 5-shot”进行呈现。C-Eval推理代码请参考本项目:\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/ceval_zh\"\u003e📖GitHub Wiki\u003c/a\u003e\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eLLaMA Models\u003c/th\u003e\n\u003cth align=\"center\"\u003eValid\u003c/th\u003e\n\u003cth align=\"center\"\u003eTest\u003c/th\u003e\n\u003cth\u003eAlpaca Models\u003c/th\u003e\n\u003cth align=\"center\"\u003eValid\u003c/th\u003e\n\u003cth align=\"center\"\u003eTest\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-LLaMA-2-13B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e40.6 / 42.7\u003c/td\u003e\n\u003ctd align=\"center\"\u003e38.0 / 41.6\u003c/td\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-13B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e44.3 / 45.9\u003c/td\u003e\n\u003ctd align=\"center\"\u003e42.6 / 44.0\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-LLaMA-2-7B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e28.2 / 36.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e30.3 / 34.2\u003c/td\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e41.3 / 42.9\u003c/td\u003e\n\u003ctd align=\"center\"\u003e40.3 / 39.5\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-Plus-33B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e37.4 / 40.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e35.7 / 38.3\u003c/td\u003e\n\u003ctd\u003eChinese-Alpaca-Plus-33B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e46.5 / 46.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e44.9 / 43.5\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-Plus-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e27.3 / 34.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e27.8 / 33.3\u003c/td\u003e\n\u003ctd\u003eChinese-Alpaca-Plus-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e43.3 / 42.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e41.5 / 39.9\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-Plus-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e27.3 / 28.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e26.9 / 28.4\u003c/td\u003e\n\u003ctd\u003eChinese-Alpaca-Plus-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e36.7 / 32.9\u003c/td\u003e\n\u003ctd align=\"center\"\u003e36.4 / 32.3\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e客观效果评测:CMMLU\u003c/h3\u003e\u003ca id=\"user-content-客观效果评测cmmlu\" class=\"anchor\" aria-label=\"Permalink: 客观效果评测:CMMLU\" href=\"#客观效果评测cmmlu\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e\u003ca href=\"https://github.com/haonan-li/CMMLU\"\u003eCMMLU\u003c/a\u003e是另一个综合性中文评测数据集,专门用于评估语言模型在中文语境下的知识和推理能力,涵盖了从基础学科到高级专业水平的67个主题,共计11.5K个选择题。CMMLU推理代码请参考本项目:\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/cmmlu_zh\"\u003e📖GitHub Wiki\u003c/a\u003e\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eLLaMA Models\u003c/th\u003e\n\u003cth align=\"center\"\u003eTest (0/few-shot)\u003c/th\u003e\n\u003cth\u003eAlpaca Models\u003c/th\u003e\n\u003cth align=\"center\"\u003eTest (0/few-shot)\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-LLaMA-2-13B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e38.9 / 42.5\u003c/td\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-13B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e43.2 / 45.5\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-LLaMA-2-7B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e27.9 / 34.1\u003c/td\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e40.0 / 41.8\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-Plus-33B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e35.2 / 38.8\u003c/td\u003e\n\u003ctd\u003eChinese-Alpaca-Plus-33B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e46.6 / 45.3\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-Plus-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e29.6 / 34.0\u003c/td\u003e\n\u003ctd\u003eChinese-Alpaca-Plus-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e40.6 / 39.9\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-Plus-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e25.4 / 26.3\u003c/td\u003e\n\u003ctd\u003eChinese-Alpaca-Plus-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e36.8 / 32.6\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e长上下文版模型评测\u003c/h3\u003e\u003ca id=\"user-content-长上下文版模型评测\" class=\"anchor\" aria-label=\"Permalink: 长上下文版模型评测\" href=\"#长上下文版模型评测\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e\u003ca href=\"https://github.com/THUDM/LongBench\"\u003eLongBench\u003c/a\u003e是一个大模型长文本理解能力的评测基准,由6大类、20个不同的任务组成,多数任务的平均长度在5K-15K之间,共包含约4.75K条测试数据。以下是本项目长上下文版模型在该中文任务(含代码任务)上的评测效果。LongBench推理代码请参考本项目:\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/longbench_zh\"\u003e📖GitHub Wiki\u003c/a\u003e\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eModels\u003c/th\u003e\n\u003cth align=\"center\"\u003e单文档QA\u003c/th\u003e\n\u003cth align=\"center\"\u003e多文档QA\u003c/th\u003e\n\u003cth align=\"center\"\u003e摘要\u003c/th\u003e\n\u003cth align=\"center\"\u003eFew-shot学习\u003c/th\u003e\n\u003cth align=\"center\"\u003e代码补全\u003c/th\u003e\n\u003cth align=\"center\"\u003e合成任务\u003c/th\u003e\n\u003cth align=\"center\"\u003eAvg\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B-64K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e44.7\u003c/td\u003e\n\u003ctd align=\"center\"\u003e28.1\u003c/td\u003e\n\u003ctd align=\"center\"\u003e14.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e39.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e44.6\u003c/td\u003e\n\u003ctd align=\"center\"\u003e5.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e29.3\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-LLaMA-2-7B-64K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e27.2\u003c/td\u003e\n\u003ctd align=\"center\"\u003e16.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e6.5\u003c/td\u003e\n\u003ctd align=\"center\"\u003e33.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e7.8\u003c/td\u003e\n\u003ctd align=\"center\"\u003e5.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e16.0\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-13B-16K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e47.9\u003c/td\u003e\n\u003ctd align=\"center\"\u003e26.7\u003c/td\u003e\n\u003ctd align=\"center\"\u003e13.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e22.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e46.6\u003c/td\u003e\n\u003ctd align=\"center\"\u003e21.5\u003c/td\u003e\n\u003ctd align=\"center\"\u003e29.7\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-Alpaca-2-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e38.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e20.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e11.9\u003c/td\u003e\n\u003ctd align=\"center\"\u003e17.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e46.5\u003c/td\u003e\n\u003ctd align=\"center\"\u003e8.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e23.7\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B-16K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e46.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e23.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e14.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e29.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e49.6\u003c/td\u003e\n\u003ctd align=\"center\"\u003e9.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e28.6\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-Alpaca-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e34.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e17.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e11.8\u003c/td\u003e\n\u003ctd align=\"center\"\u003e21.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e50.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e4.5\u003c/td\u003e\n\u003ctd align=\"center\"\u003e23.2\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-LLaMA-2-13B-16K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e36.7\u003c/td\u003e\n\u003ctd align=\"center\"\u003e17.7\u003c/td\u003e\n\u003ctd align=\"center\"\u003e3.1\u003c/td\u003e\n\u003ctd align=\"center\"\u003e29.8\u003c/td\u003e\n\u003ctd align=\"center\"\u003e13.8\u003c/td\u003e\n\u003ctd align=\"center\"\u003e3.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e17.3\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-2-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e28.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e14.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e4.6\u003c/td\u003e\n\u003ctd align=\"center\"\u003e16.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e10.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e5.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e13.2\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-LLaMA-2-7B-16K\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e33.2\u003c/td\u003e\n\u003ctd align=\"center\"\u003e15.9\u003c/td\u003e\n\u003ctd align=\"center\"\u003e6.5\u003c/td\u003e\n\u003ctd align=\"center\"\u003e23.5\u003c/td\u003e\n\u003ctd align=\"center\"\u003e10.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e5.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e15.8\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-LLaMA-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e19.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e13.9\u003c/td\u003e\n\u003ctd align=\"center\"\u003e6.4\u003c/td\u003e\n\u003ctd align=\"center\"\u003e11.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e11.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e4.7\u003c/td\u003e\n\u003ctd align=\"center\"\u003e11.0\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e量化效果评测\u003c/h3\u003e\u003ca id=\"user-content-量化效果评测\" class=\"anchor\" aria-label=\"Permalink: 量化效果评测\" href=\"#量化效果评测\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e以Chinese-LLaMA-2-7B为例,对比不同精度下的模型大小、PPL(困惑度)、C-Eval效果,方便用户了解量化精度损失。PPL以4K上下文大小计算,C-Eval汇报的是valid集合上zero-shot和5-shot结果。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e精度\u003c/th\u003e\n\u003cth align=\"center\"\u003e模型大小\u003c/th\u003e\n\u003cth align=\"center\"\u003ePPL\u003c/th\u003e\n\u003cth align=\"center\"\u003eC-Eval\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eFP16\u003c/td\u003e\n\u003ctd align=\"center\"\u003e12.9 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e9.373\u003c/td\u003e\n\u003ctd align=\"center\"\u003e28.2 / 36.0\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e8-bit量化\u003c/td\u003e\n\u003ctd align=\"center\"\u003e6.8 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e9.476\u003c/td\u003e\n\u003ctd align=\"center\"\u003e26.8 / 35.4\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003e4-bit量化\u003c/td\u003e\n\u003ctd align=\"center\"\u003e3.7 GB\u003c/td\u003e\n\u003ctd align=\"center\"\u003e10.132\u003c/td\u003e\n\u003ctd align=\"center\"\u003e25.5 / 32.8\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cp dir=\"auto\"\u003e特别地,以下是在llama.cpp下不同量化方法的评测数据,供用户参考,速度以ms/tok计,测试设备为M1 Max。具体细节见\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/llamacpp_zh#%E5%85%B3%E4%BA%8E%E9%87%8F%E5%8C%96%E6%96%B9%E6%B3%95%E9%80%89%E6%8B%A9%E5%8F%8A%E6%8E%A8%E7%90%86%E9%80%9F%E5%BA%A6\"\u003e📖GitHub Wiki\u003c/a\u003e\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003ellama.cpp\u003c/th\u003e\n\u003cth align=\"right\"\u003eF16\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ2_K\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ3_K\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ4_0\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ4_1\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ4_K\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ5_0\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ5_1\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ5_K\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ6_K\u003c/th\u003e\n\u003cth align=\"right\"\u003eQ8_0\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003ePPL\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.128\u003c/td\u003e\n\u003ctd align=\"right\"\u003e11.107\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.576\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.476\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.576\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.240\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.156\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.213\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.168\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.133\u003c/td\u003e\n\u003ctd align=\"right\"\u003e9.129\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eSize\u003c/td\u003e\n\u003ctd align=\"right\"\u003e12.91G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e2.41G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e3.18G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e3.69G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e4.08G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e3.92G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e4.47G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e4.86G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e4.59G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e5.30G\u003c/td\u003e\n\u003ctd align=\"right\"\u003e6.81G\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eCPU Speed\u003c/td\u003e\n\u003ctd align=\"right\"\u003e117\u003c/td\u003e\n\u003ctd align=\"right\"\u003e42\u003c/td\u003e\n\u003ctd align=\"right\"\u003e51\u003c/td\u003e\n\u003ctd align=\"right\"\u003e39\u003c/td\u003e\n\u003ctd align=\"right\"\u003e44\u003c/td\u003e\n\u003ctd align=\"right\"\u003e43\u003c/td\u003e\n\u003ctd align=\"right\"\u003e48\u003c/td\u003e\n\u003ctd align=\"right\"\u003e51\u003c/td\u003e\n\u003ctd align=\"right\"\u003e50\u003c/td\u003e\n\u003ctd align=\"right\"\u003e54\u003c/td\u003e\n\u003ctd align=\"right\"\u003e65\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eGPU Speed\u003c/td\u003e\n\u003ctd align=\"right\"\u003e53\u003c/td\u003e\n\u003ctd align=\"right\"\u003e19\u003c/td\u003e\n\u003ctd align=\"right\"\u003e21\u003c/td\u003e\n\u003ctd align=\"right\"\u003e17\u003c/td\u003e\n\u003ctd align=\"right\"\u003e18\u003c/td\u003e\n\u003ctd align=\"right\"\u003e20\u003c/td\u003e\n\u003ctd align=\"right\"\u003ex\u003c/td\u003e\n\u003ctd align=\"right\"\u003ex\u003c/td\u003e\n\u003ctd align=\"right\"\u003e25\u003c/td\u003e\n\u003ctd align=\"right\"\u003e26\u003c/td\u003e\n\u003ctd align=\"right\"\u003ex\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e投机采样加速效果评测\u003c/h3\u003e\u003ca id=\"user-content-投机采样加速效果评测\" class=\"anchor\" aria-label=\"Permalink: 投机采样加速效果评测\" href=\"#投机采样加速效果评测\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e通过投机采样方法并借助Chinese-LLaMA-2-1.3B和Chinese-Alpaca-2-1.3B,可以分别加速7B、13B的LLaMA和Alpaca模型的推理速度。以下是使用\u003ca href=\"/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/scripts/inference/speculative_sample.py\"\u003e投机采样脚本\u003c/a\u003e在1*A40-48G上解码\u003ca href=\"#%E7%94%9F%E6%88%90%E6%95%88%E6%9E%9C%E8%AF%84%E6%B5%8B\"\u003e生成效果评测\u003c/a\u003e中的问题测得的平均速度(速度以ms/token计,模型均为fp16精度),供用户参考。详细说明见\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/inference_with_transformers_zh#%E6%8A%95%E6%9C%BA%E9%87%87%E6%A0%B7%E8%A7%A3%E7%A0%81\"\u003e📖GitHub Wiki\u003c/a\u003e。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth align=\"left\"\u003e草稿模型\u003c/th\u003e\n\u003cth align=\"center\"\u003e草稿模型速度\u003c/th\u003e\n\u003cth align=\"left\"\u003e目标模型\u003c/th\u003e\n\u003cth align=\"center\"\u003e目标模型速度\u003c/th\u003e\n\u003cth align=\"center\"\u003e投机采样速度(加速比)\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e7.6\u003c/td\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e49.3\u003c/td\u003e\n\u003ctd align=\"center\"\u003e36.0(1.37x)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e7.6\u003c/td\u003e\n\u003ctd align=\"left\"\u003eChinese-LLaMA-2-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e66.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e47.1(1.40x)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e8.1\u003c/td\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e50.2\u003c/td\u003e\n\u003ctd align=\"center\"\u003e34.9(1.44x)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e8.2\u003c/td\u003e\n\u003ctd align=\"left\"\u003eChinese-Alpaca-2-13B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e67.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e41.6(1.61x)\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e人类偏好对齐(RLHF)版本评测\u003c/h3\u003e\u003ca id=\"user-content-人类偏好对齐rlhf版本评测\" class=\"anchor\" aria-label=\"Permalink: 人类偏好对齐(RLHF)版本评测\" href=\"#人类偏好对齐rlhf版本评测\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e对齐水平\u003c/h4\u003e\u003ca id=\"user-content-对齐水平\" class=\"anchor\" aria-label=\"Permalink: 对齐水平\" href=\"#对齐水平\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e为评估中文模型与人类价值偏好对齐程度,我们自行构建了评测数据集,覆盖了道德、色情、毒品、暴力等人类价值偏好重点关注的多个方面。实验结果以价值体现正确率进行呈现(体现正确价值观题目数 / 总题数)。\u003c/p\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eAlpaca Models\u003c/th\u003e\n\u003cth align=\"center\"\u003eAccuracy\u003c/th\u003e\n\u003cth\u003eAlpaca Models\u003c/th\u003e\n\u003cth align=\"center\"\u003eAccuracy\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-Alpaca-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e79.3%\u003c/td\u003e\n\u003ctd\u003eChinese-Alpaca-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e88.3%\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-1.3B-RLHF\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e95.8%\u003c/td\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B-RLHF\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e97.5%\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e客观效果评测:C-Eval \u0026amp; CMMLU\u003c/h4\u003e\u003ca id=\"user-content-客观效果评测c-eval--cmmlu\" class=\"anchor\" aria-label=\"Permalink: 客观效果评测:C-Eval \u0026amp; CMMLU\" href=\"#客观效果评测c-eval--cmmlu\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cmarkdown-accessiblity-table\u003e\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eAlpaca Models\u003c/th\u003e\n\u003cth align=\"center\"\u003eC-Eval (0/few-shot)\u003c/th\u003e\n\u003cth align=\"center\"\u003eCMMLU (0/few-shot)\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-Alpaca-2-1.3B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e23.8 / 26.8\u003c/td\u003e\n\u003ctd align=\"center\"\u003e24.8 / 25.1\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eChinese-Alpaca-2-7B\u003c/td\u003e\n\u003ctd align=\"center\"\u003e42.1 / 41.0\u003c/td\u003e\n\u003ctd align=\"center\"\u003e40.0 / 41.8\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-1.3B-RLHF\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e23.6 / 27.1\u003c/td\u003e\n\u003ctd align=\"center\"\u003e24.9 / 25.0\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003cstrong\u003eChinese-Alpaca-2-7B-RLHF\u003c/strong\u003e\u003c/td\u003e\n\u003ctd align=\"center\"\u003e40.6 / 41.2\u003c/td\u003e\n\u003ctd align=\"center\"\u003e39.5 / 41.0\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\u003c/markdown-accessiblity-table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e训练与精调\u003c/h2\u003e\u003ca id=\"user-content-训练与精调\" class=\"anchor\" aria-label=\"Permalink: 训练与精调\" href=\"#训练与精调\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e预训练\u003c/h3\u003e\u003ca id=\"user-content-预训练\" class=\"anchor\" aria-label=\"Permalink: 预训练\" href=\"#预训练\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在原版Llama-2的基础上,利用大规模无标注数据进行增量训练,得到Chinese-LLaMA-2系列基座模型\u003c/li\u003e\n\u003cli\u003e训练数据采用了一期项目中Plus版本模型一致的数据,其总量约120G纯文本文件\u003c/li\u003e\n\u003cli\u003e训练代码参考了🤗transformers中的\u003ca href=\"https://github.com/huggingface/transformers/blob/main/examples/pytorch/language-modeling/run_clm.py\"\u003erun_clm.py\u003c/a\u003e,使用方法见\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/pt_scripts_zh\"\u003e📖预训练脚本Wiki\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e指令精调\u003c/h3\u003e\u003ca id=\"user-content-指令精调\" class=\"anchor\" aria-label=\"Permalink: 指令精调\" href=\"#指令精调\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在Chinese-LLaMA-2的基础上,利用有标注指令数据进行进一步精调,得到Chinese-Alpaca-2系列模型\u003c/li\u003e\n\u003cli\u003e训练数据采用了一期项目中Pro版本模型使用的指令数据,其总量约500万条指令数据(相比一期略增加)\u003c/li\u003e\n\u003cli\u003e训练代码参考了\u003ca href=\"https://github.com/tatsu-lab/stanford_alpaca\"\u003eStanford Alpaca\u003c/a\u003e项目中数据集处理的相关部分,使用方法见\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/sft_scripts_zh\"\u003e📖指令精调脚本Wiki\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eRLHF精调\u003c/h3\u003e\u003ca id=\"user-content-rlhf精调\" class=\"anchor\" aria-label=\"Permalink: RLHF精调\" href=\"#rlhf精调\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在Chinese-Alpaca-2系列模型基础上,利用偏好数据和PPO算法进行人类偏好对齐精调,得到Chinese-Alpaca-2-RLHF系列模型\u003c/li\u003e\n\u003cli\u003e训练数据基于多个开源项目中的人类偏好数据和本项目指令精调数据进行采样,奖励模型阶段、强化学习阶段分别约69.5K、25.6K条样本\u003c/li\u003e\n\u003cli\u003e训练代码基于\u003ca href=\"https://github.com/microsoft/DeepSpeedExamples/tree/master/applications/DeepSpeed-Chat\"\u003eDeepSpeed-Chat\u003c/a\u003e开发,具体流程见\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/rm_zh\"\u003e📖奖励模型Wiki\u003c/a\u003e和\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/rl_zh\"\u003e📖强化学习Wiki\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e常见问题\u003c/h2\u003e\u003ca id=\"user-content-常见问题\" class=\"anchor\" aria-label=\"Permalink: 常见问题\" href=\"#常见问题\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e请在提Issue前务必先查看FAQ中是否已存在解决方案。具体问题和解答请参考本项目 \u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/faq_zh\"\u003e📖GitHub Wiki\u003c/a\u003e\u003c/p\u003e\n\u003cdiv class=\"snippet-clipboard-content notranslate position-relative overflow-auto\" data-snippet-clipboard-copy-content=\"问题1:本项目和一期项目的区别?\n问题2:模型能否商用?\n问题3:接受第三方Pull Request吗?\n问题4:为什么不对模型做全量预训练而是用LoRA?\n问题5:二代模型支不支持某些支持一代LLaMA的工具?\n问题6:Chinese-Alpaca-2是Llama-2-Chat训练得到的吗?\n问题7:为什么24G显存微调Chinese-Alpaca-2-7B会OOM?\n问题8:可以使用16K长上下文版模型替代标准版模型吗?\n问题9:如何解读第三方公开榜单的结果?\n问题10:会出34B或者70B级别的模型吗?\n问题11:为什么长上下文版模型是16K,不是32K或者100K?\n问题12:为什么Alpaca模型会回复说自己是ChatGPT?\n问题13:为什么pt_lora_model或者sft_lora_model下的adapter_model.bin只有几百k?\"\u003e\u003cpre class=\"notranslate\"\u003e\u003ccode\u003e问题1:本项目和一期项目的区别?\n问题2:模型能否商用?\n问题3:接受第三方Pull Request吗?\n问题4:为什么不对模型做全量预训练而是用LoRA?\n问题5:二代模型支不支持某些支持一代LLaMA的工具?\n问题6:Chinese-Alpaca-2是Llama-2-Chat训练得到的吗?\n问题7:为什么24G显存微调Chinese-Alpaca-2-7B会OOM?\n问题8:可以使用16K长上下文版模型替代标准版模型吗?\n问题9:如何解读第三方公开榜单的结果?\n问题10:会出34B或者70B级别的模型吗?\n问题11:为什么长上下文版模型是16K,不是32K或者100K?\n问题12:为什么Alpaca模型会回复说自己是ChatGPT?\n问题13:为什么pt_lora_model或者sft_lora_model下的adapter_model.bin只有几百k?\n\u003c/code\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e引用\u003c/h2\u003e\u003ca id=\"user-content-引用\" class=\"anchor\" aria-label=\"Permalink: 引用\" href=\"#引用\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e如果您使用了本项目的相关资源,请参考引用本项目的技术报告:\u003ca href=\"https://arxiv.org/abs/2304.08177\" rel=\"nofollow\"\u003ehttps://arxiv.org/abs/2304.08177\u003c/a\u003e\u003c/p\u003e\n\u003cdiv class=\"snippet-clipboard-content notranslate position-relative overflow-auto\" data-snippet-clipboard-copy-content=\"@article{Chinese-LLaMA-Alpaca,\n title={Efficient and Effective Text Encoding for Chinese LLaMA and Alpaca},\n author={Cui, Yiming and Yang, Ziqing and Yao, Xin},\n journal={arXiv preprint arXiv:2304.08177},\n url={https://arxiv.org/abs/2304.08177},\n year={2023}\n}\"\u003e\u003cpre class=\"notranslate\"\u003e\u003ccode\u003e@article{Chinese-LLaMA-Alpaca,\n title={Efficient and Effective Text Encoding for Chinese LLaMA and Alpaca},\n author={Cui, Yiming and Yang, Ziqing and Yao, Xin},\n journal={arXiv preprint arXiv:2304.08177},\n url={https://arxiv.org/abs/2304.08177},\n year={2023}\n}\n\u003c/code\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e致谢\u003c/h2\u003e\u003ca id=\"user-content-致谢\" class=\"anchor\" aria-label=\"Permalink: 致谢\" href=\"#致谢\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e本项目主要基于以下开源项目二次开发,在此对相关项目和研究开发人员表示感谢。\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e\u003ca href=\"https://github.com/facebookresearch/llama\"\u003eLlama-2 \u003cem\u003eby Meta\u003c/em\u003e\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://github.com/ggerganov/llama.cpp\"\u003ellama.cpp \u003cem\u003eby @ggerganov\u003c/em\u003e\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://github.com/Dao-AILab/flash-attention\"\u003eFlashAttention-2 by \u003cem\u003eDao-AILab\u003c/em\u003e\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp dir=\"auto\"\u003e同时感谢Chinese-LLaMA-Alpaca(一期项目)的contributor以及\u003ca href=\"https://github.com/ymcui/Chinese-LLaMA-Alpaca#%E8%87%B4%E8%B0%A2\"\u003e关联项目和人员\u003c/a\u003e。\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e免责声明\u003c/h2\u003e\u003ca id=\"user-content-免责声明\" class=\"anchor\" aria-label=\"Permalink: 免责声明\" href=\"#免责声明\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e本项目基于由Meta发布的Llama-2模型进行开发,使用过程中请严格遵守Llama-2的开源许可协议。如果涉及使用第三方代码,请务必遵从相关的开源许可协议。模型生成的内容可能会因为计算方法、随机因素以及量化精度损失等影响其准确性,因此,本项目不对模型输出的准确性提供任何保证,也不会对任何因使用相关资源和输出结果产生的损失承担责任。如果将本项目的相关模型用于商业用途,开发者应遵守当地的法律法规,确保模型输出内容的合规性,本项目不对任何由此衍生的产品或服务承担责任。\u003c/p\u003e\n\u003cdetails\u003e\n\u003csummary\u003e\u003cb\u003e局限性声明\u003c/b\u003e\u003c/summary\u003e\n\u003cp dir=\"auto\"\u003e虽然本项目中的模型具备一定的中文理解和生成能力,但也存在局限性,包括但不限于:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e可能会产生不可预测的有害内容以及不符合人类偏好和价值观的内容\u003c/li\u003e\n\u003cli\u003e由于算力和数据问题,相关模型的训练并不充分,中文理解能力有待进一步提升\u003c/li\u003e\n\u003cli\u003e暂时没有在线可互动的demo(注:用户仍然可以自行在本地部署和体验)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/details\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e问题反馈\u003c/h2\u003e\u003ca id=\"user-content-问题反馈\" class=\"anchor\" aria-label=\"Permalink: 问题反馈\" href=\"#问题反馈\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e如有疑问,请在GitHub Issue中提交。礼貌地提出问题,构建和谐的讨论社区。\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e在提交问题之前,请先查看FAQ能否解决问题,同时建议查阅以往的issue是否能解决你的问题。\u003c/li\u003e\n\u003cli\u003e提交问题请使用本项目设置的Issue模板,以帮助快速定位具体问题。\u003c/li\u003e\n\u003cli\u003e重复以及与本项目无关的issue会被\u003ca href=\"https://github.com/marketplace/stale\"\u003estable-bot\u003c/a\u003e处理,敬请谅解。\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/article\u003e","loaded":true,"timedOut":false,"errorMessage":null,"headerInfo":{"toc":[{"level":1,"text":"Chinese-LLaMA-Alpaca-3项目启动!","anchor":"chinese-llama-alpaca-3项目启动","htmlText":"Chinese-LLaMA-Alpaca-3项目启动!"},{"level":4,"text":"本项目主要内容","anchor":"本项目主要内容","htmlText":"本项目主要内容"},{"level":4,"text":"已开源的模型","anchor":"已开源的模型","htmlText":"已开源的模型"},{"level":2,"text":"新闻","anchor":"新闻","htmlText":"新闻"},{"level":2,"text":"内容导引","anchor":"内容导引","htmlText":"内容导引"},{"level":2,"text":"模型简介","anchor":"模型简介","htmlText":"模型简介"},{"level":4,"text":"📖 经过优化的中文词表","anchor":"-经过优化的中文词表","htmlText":"📖 经过优化的中文词表"},{"level":4,"text":"⚡ 基于FlashAttention-2的高效注意力","anchor":"-基于flashattention-2的高效注意力","htmlText":"⚡ 基于FlashAttention-2的高效注意力"},{"level":4,"text":"🚄 基于PI和YaRN的超长上下文扩展技术","anchor":"-基于pi和yarn的超长上下文扩展技术","htmlText":"🚄 基于PI和YaRN的超长上下文扩展技术"},{"level":4,"text":"🤖 简化的中英双语系统提示语","anchor":"-简化的中英双语系统提示语","htmlText":"🤖 简化的中英双语系统提示语"},{"level":4,"text":"👮 人类偏好对齐","anchor":"-人类偏好对齐","htmlText":"👮 人类偏好对齐"},{"level":2,"text":"模型下载","anchor":"模型下载","htmlText":"模型下载"},{"level":3,"text":"模型选择指引","anchor":"模型选择指引","htmlText":"模型选择指引"},{"level":3,"text":"完整模型下载","anchor":"完整模型下载","htmlText":"完整模型下载"},{"level":4,"text":"长上下文版模型","anchor":"长上下文版模型","htmlText":"长上下文版模型"},{"level":4,"text":"RLHF版模型","anchor":"rlhf版模型","htmlText":"RLHF版模型"},{"level":4,"text":"AWQ版模型","anchor":"awq版模型","htmlText":"AWQ版模型"},{"level":3,"text":"LoRA模型下载","anchor":"lora模型下载","htmlText":"LoRA模型下载"},{"level":2,"text":"推理与部署","anchor":"推理与部署","htmlText":"推理与部署"},{"level":2,"text":"系统效果","anchor":"系统效果","htmlText":"系统效果"},{"level":3,"text":"生成效果评测","anchor":"生成效果评测","htmlText":"生成效果评测"},{"level":3,"text":"客观效果评测:C-Eval","anchor":"客观效果评测c-eval","htmlText":"客观效果评测:C-Eval"},{"level":3,"text":"客观效果评测:CMMLU","anchor":"客观效果评测cmmlu","htmlText":"客观效果评测:CMMLU"},{"level":3,"text":"长上下文版模型评测","anchor":"长上下文版模型评测","htmlText":"长上下文版模型评测"},{"level":3,"text":"量化效果评测","anchor":"量化效果评测","htmlText":"量化效果评测"},{"level":3,"text":"投机采样加速效果评测","anchor":"投机采样加速效果评测","htmlText":"投机采样加速效果评测"},{"level":3,"text":"人类偏好对齐(RLHF)版本评测","anchor":"人类偏好对齐rlhf版本评测","htmlText":"人类偏好对齐(RLHF)版本评测"},{"level":4,"text":"对齐水平","anchor":"对齐水平","htmlText":"对齐水平"},{"level":4,"text":"客观效果评测:C-Eval \u0026 CMMLU","anchor":"客观效果评测c-eval--cmmlu","htmlText":"客观效果评测:C-Eval \u0026amp; CMMLU"},{"level":2,"text":"训练与精调","anchor":"训练与精调","htmlText":"训练与精调"},{"level":3,"text":"预训练","anchor":"预训练","htmlText":"预训练"},{"level":3,"text":"指令精调","anchor":"指令精调","htmlText":"指令精调"},{"level":3,"text":"RLHF精调","anchor":"rlhf精调","htmlText":"RLHF精调"},{"level":2,"text":"常见问题","anchor":"常见问题","htmlText":"常见问题"},{"level":2,"text":"引用","anchor":"引用","htmlText":"引用"},{"level":2,"text":"致谢","anchor":"致谢","htmlText":"致谢"},{"level":2,"text":"免责声明","anchor":"免责声明","htmlText":"免责声明"},{"level":2,"text":"问题反馈","anchor":"问题反馈","htmlText":"问题反馈"}],"siteNavLoginPath":"/login?return_to=https%3A%2F%2Fgithub.com%2Fymcui%2FChinese-LLaMA-Alpaca-2"}},{"displayName":"LICENSE","repoName":"Chinese-LLaMA-Alpaca-2","refName":"main","path":"LICENSE","preferredFileType":"license","tabName":"Apache-2.0","richText":null,"loaded":false,"timedOut":false,"errorMessage":null,"headerInfo":{"toc":null,"siteNavLoginPath":"/login?return_to=https%3A%2F%2Fgithub.com%2Fymcui%2FChinese-LLaMA-Alpaca-2"}}],"overviewFilesProcessingTime":0}},"appPayload":{"helpUrl":"https://docs.github.com","findFileWorkerPath":"/assets-cdn/worker/find-file-worker-7d7eb7c71814.js","findInFileWorkerPath":"/assets-cdn/worker/find-in-file-worker-96e76d5fdb2c.js","githubDevUrl":null,"enabled_features":{"copilot_workspace":null,"code_nav_ui_events":false,"overview_shared_code_dropdown_button":false,"react_blob_overlay":false,"accessible_code_button":true,"github_models_repo_integration":false}}}}</script> <div data-target="react-partial.reactRoot"><style data-styled="true" data-styled-version="5.3.11">.iVEunk{margin-top:16px;margin-bottom:16px;}/*!sc*/ .jzuOtQ{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;}/*!sc*/ .bGojzy{margin-bottom:0;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;row-gap:16px;}/*!sc*/ .iNSVHo{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;padding-bottom:16px;padding-top:8px;}/*!sc*/ .bVgnfw{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;gap:8px;}/*!sc*/ @media screen and (max-width:320px){.bVgnfw{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;}}/*!sc*/ .CEgMp{position:relative;}/*!sc*/ @media screen and (max-width:380px){.CEgMp .ref-selector-button-text-container{max-width:80px;}}/*!sc*/ @media screen and (max-width:320px){.CEgMp{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;}.CEgMp .overview-ref-selector{width:100%;}.CEgMp .overview-ref-selector > span{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start;}.CEgMp .overview-ref-selector > span > span[data-component="text"]{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;}}/*!sc*/ .gMOVLe[data-size="medium"]{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;min-width:0;}/*!sc*/ .gMOVLe[data-size="medium"] svg{color:var(--fgColor-muted,var(--color-fg-muted,#656d76));}/*!sc*/ .gMOVLe[data-size="medium"] > span{width:inherit;}/*!sc*/ .gUkoLg{-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;}/*!sc*/ .bZBlpz{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:100%;}/*!sc*/ .lhTYNA{margin-right:4px;color:var(--fgColor-muted,var(--color-fg-muted,#656d76));}/*!sc*/ .ffLUq{font-size:14px;min-width:0;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;}/*!sc*/ .bmcJak{min-width:0;}/*!sc*/ .fLXEGX{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;}/*!sc*/ @media screen and (max-width:1079px){.fLXEGX{display:none;}}/*!sc*/ .lmSMZJ[data-size="medium"]{color:var(--fgColor-muted,var(--color-fg-muted,#656d76));padding-left:4px;padding-right:4px;}/*!sc*/ .lmSMZJ[data-size="medium"] span[data-component="leadingVisual"]{margin-right:4px !important;}/*!sc*/ .dqfxud{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;}/*!sc*/ @media screen and (min-width:1080px){.dqfxud{display:none;}}/*!sc*/ @media screen and (max-width:543px){.dqfxud{display:none;}}/*!sc*/ .fGwBZA[data-size="medium"][data-no-visuals]{color:var(--fgColor-muted,var(--color-fg-muted,#656d76));}/*!sc*/ .jxTzTd{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;padding-left:8px;gap:8px;}/*!sc*/ .gqqBXN{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;gap:8px;}/*!sc*/ @media screen and (max-width:543px){.gqqBXN{display:none;}}/*!sc*/ .dzXgxt{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;}/*!sc*/ @media screen and (max-width:1011px){.dzXgxt{display:none;}}/*!sc*/ .iWFGlI{margin-left:8px;margin-right:8px;margin:0;}/*!sc*/ .vcvyP{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;min-width:160px;}/*!sc*/ .YUPas{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;}/*!sc*/ @media screen and (min-width:1012px){.YUPas{display:none;}}/*!sc*/ .izFOf{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;}/*!sc*/ @media screen and (min-width:544px){.izFOf{display:none;}}/*!sc*/ .vIPPs{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;gap:16px;}/*!sc*/ .fdROMU{width:100%;border-collapse:separate;border-spacing:0;border:1px solid;border-color:var(--borderColor-default,var(--color-border-default,#d0d7de));border-radius:6px;table-layout:fixed;overflow:unset;}/*!sc*/ .jGKpsv{height:0px;line-height:0px;}/*!sc*/ .jGKpsv tr{height:0px;font-size:0px;}/*!sc*/ .jdgHnn{padding:16px;color:var(--fgColor-muted,var(--color-fg-muted,#656d76));font-size:12px;text-align:left;height:40px;}/*!sc*/ .jdgHnn th{padding-left:16px;background-color:var(--bgColor-muted,var(--color-canvas-subtle,#f6f8fa));}/*!sc*/ .bQivRW{width:100%;border-top-left-radius:6px;}/*!sc*/ @media screen and (min-width:544px){.bQivRW{display:none;}}/*!sc*/ .ldkMIO{width:40%;border-top-left-radius:6px;}/*!sc*/ @media screen and (max-width:543px){.ldkMIO{display:none;}}/*!sc*/ .jMbWeI{text-align:right;padding-right:16px;width:136px;border-top-right-radius:6px;}/*!sc*/ .gpqjiB{color:var(--fgColor-muted,var(--color-fg-muted,#656d76));font-size:12px;height:40px;}/*!sc*/ .dzCJzi{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;-webkit-align-items:center;-webkit-box-align:center;-ms-flex-align:center;align-items:center;gap:8px;min-width:273px;padding:8px;}/*!sc*/ @media screen and (min-width:544px){.dzCJzi{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap;}}/*!sc*/ .eNCcrz{text-align:center;vertical-align:center;height:40px;border-top:1px solid;border-color:var(--borderColor-default,var(--color-border-default,#d0d7de));}/*!sc*/ .bHTcCe{border-top:1px solid var(--borderColor-default,var(--color-border-default));cursor:pointer;}/*!sc*/ .csrIcr{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;gap:16px;}/*!sc*/ .bUQNHB{border:1px solid;border-color:var(--borderColor-default,var(--color-border-default,#d0d7de));border-radius:6px;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;}/*!sc*/ @media screen and (max-width:543px){.bUQNHB{margin-left:-16px;margin-right:-16px;max-width:calc(100% + 32px);}}/*!sc*/ @media screen and (min-width:544px){.bUQNHB{max-width:100%;}}/*!sc*/ .jPdcfu{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;border-bottom:1px solid;border-bottom-color:var(--borderColor-default,var(--color-border-default,#d0d7de));-webkit-align-items:center;-webkit-box-align:center;-ms-flex-align:center;align-items:center;padding-right:8px;position:-webkit-sticky;position:sticky;top:0;background-color:var(--bgColor-default,var(--color-canvas-default,#ffffff));z-index:1;border-top-left-radius:6px;border-top-right-radius:6px;}/*!sc*/ .iphEWz{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;border-bottom:none;max-width:100%;padding-left:8px;padding-right:8px;}/*!sc*/ .hUCRAk{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-align-items:center;-webkit-box-align:center;-ms-flex-align:center;align-items:center;}/*!sc*/ .cwoBXV[data-size="medium"]{color:var(--fgColor-muted,var(--color-fg-subtle,#6e7781));padding-left:8px;padding-right:8px;}/*!sc*/ .QkQOb{padding:32px;overflow:auto;}/*!sc*/ data-styled.g1[id="Box-sc-g0xbh4-0"]{content:"iVEunk,jzuOtQ,bGojzy,iNSVHo,bVgnfw,CEgMp,gMOVLe,gUkoLg,bZBlpz,lhTYNA,ffLUq,bmcJak,fLXEGX,lmSMZJ,dqfxud,fGwBZA,jxTzTd,gqqBXN,dzXgxt,iWFGlI,vcvyP,YUPas,izFOf,vIPPs,fdROMU,jGKpsv,jdgHnn,bQivRW,ldkMIO,jMbWeI,gpqjiB,dzCJzi,eNCcrz,bHTcCe,csrIcr,bUQNHB,jPdcfu,iphEWz,hUCRAk,cwoBXV,QkQOb,"}/*!sc*/ .brGdpi{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;-webkit-clip:rect(0,0,0,0);clip:rect(0,0,0,0);white-space:nowrap;border-width:0;}/*!sc*/ data-styled.g6[id="_VisuallyHidden__VisuallyHidden-sc-11jhm7a-0"]{content:"brGdpi,"}/*!sc*/ .hWlpPn{position:relative;display:inline-block;}/*!sc*/ .hWlpPn::after{position:absolute;z-index:1000000;display:none;padding:0.5em 0.75em;font:normal normal 11px/1.5 -apple-system,BlinkMacSystemFont,"Segoe UI","Noto Sans",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";-webkit-font-smoothing:subpixel-antialiased;color:var(--tooltip-fgColor,var(--fgColor-onEmphasis,var(--color-fg-on-emphasis,#ffffff)));text-align:center;-webkit-text-decoration:none;text-decoration:none;text-shadow:none;text-transform:none;-webkit-letter-spacing:normal;-moz-letter-spacing:normal;-ms-letter-spacing:normal;letter-spacing:normal;word-wrap:break-word;white-space:pre;pointer-events:none;content:attr(aria-label);background:var(--tooltip-bgColor,var(--bgColor-emphasis,var(--color-neutral-emphasis-plus,#24292f)));border-radius:6px;opacity:0;}/*!sc*/ @-webkit-keyframes tooltip-appear{from{opacity:0;}to{opacity:1;}}/*!sc*/ @keyframes tooltip-appear{from{opacity:0;}to{opacity:1;}}/*!sc*/ .hWlpPn:hover::after,.hWlpPn:active::after,.hWlpPn:focus::after,.hWlpPn:focus-within::after{display:inline-block;-webkit-text-decoration:none;text-decoration:none;-webkit-animation-name:tooltip-appear;animation-name:tooltip-appear;-webkit-animation-duration:0.1s;animation-duration:0.1s;-webkit-animation-fill-mode:forwards;animation-fill-mode:forwards;-webkit-animation-timing-function:ease-in;animation-timing-function:ease-in;-webkit-animation-delay:0s;animation-delay:0s;}/*!sc*/ .hWlpPn.tooltipped-no-delay:hover::after,.hWlpPn.tooltipped-no-delay:active::after,.hWlpPn.tooltipped-no-delay:focus::after,.hWlpPn.tooltipped-no-delay:focus-within::after{-webkit-animation-delay:0s;animation-delay:0s;}/*!sc*/ .hWlpPn.tooltipped-multiline:hover::after,.hWlpPn.tooltipped-multiline:active::after,.hWlpPn.tooltipped-multiline:focus::after,.hWlpPn.tooltipped-multiline:focus-within::after{display:table-cell;}/*!sc*/ .hWlpPn.tooltipped-s::after,.hWlpPn.tooltipped-se::after,.hWlpPn.tooltipped-sw::after{top:100%;right:50%;margin-top:6px;}/*!sc*/ .hWlpPn.tooltipped-se::after{right:auto;left:50%;margin-left:-16px;}/*!sc*/ .hWlpPn.tooltipped-sw::after{margin-right:-16px;}/*!sc*/ .hWlpPn.tooltipped-n::after,.hWlpPn.tooltipped-ne::after,.hWlpPn.tooltipped-nw::after{right:50%;bottom:100%;margin-bottom:6px;}/*!sc*/ .hWlpPn.tooltipped-ne::after{right:auto;left:50%;margin-left:-16px;}/*!sc*/ .hWlpPn.tooltipped-nw::after{margin-right:-16px;}/*!sc*/ .hWlpPn.tooltipped-s::after,.hWlpPn.tooltipped-n::after{-webkit-transform:translateX(50%);-ms-transform:translateX(50%);transform:translateX(50%);}/*!sc*/ .hWlpPn.tooltipped-w::after{right:100%;bottom:50%;margin-right:6px;-webkit-transform:translateY(50%);-ms-transform:translateY(50%);transform:translateY(50%);}/*!sc*/ .hWlpPn.tooltipped-e::after{bottom:50%;left:100%;margin-left:6px;-webkit-transform:translateY(50%);-ms-transform:translateY(50%);transform:translateY(50%);}/*!sc*/ .hWlpPn.tooltipped-multiline::after{width:-webkit-max-content;width:-moz-max-content;width:max-content;max-width:250px;word-wrap:break-word;white-space:pre-line;border-collapse:separate;}/*!sc*/ .hWlpPn.tooltipped-multiline.tooltipped-s::after,.hWlpPn.tooltipped-multiline.tooltipped-n::after{right:auto;left:50%;-webkit-transform:translateX(-50%);-ms-transform:translateX(-50%);transform:translateX(-50%);}/*!sc*/ .hWlpPn.tooltipped-multiline.tooltipped-w::after,.hWlpPn.tooltipped-multiline.tooltipped-e::after{right:100%;}/*!sc*/ .hWlpPn.tooltipped-align-right-2::after{right:0;margin-right:0;}/*!sc*/ .hWlpPn.tooltipped-align-left-2::after{left:0;margin-left:0;}/*!sc*/ data-styled.g17[id="Tooltip__TooltipBase-sc-17tf59c-0"]{content:"hWlpPn,"}/*!sc*/ .liVpTx{display:inline-block;overflow:hidden;text-overflow:ellipsis;vertical-align:top;white-space:nowrap;max-width:125px;}/*!sc*/ data-styled.g19[id="Truncate__StyledTruncate-sc-23o1d2-0"]{content:"liVpTx,"}/*!sc*/ </style> <!-- --> <!-- --> <div class="Box-sc-g0xbh4-0 iVEunk"><div class="Box-sc-g0xbh4-0 jzuOtQ"><div class="Box-sc-g0xbh4-0 bGojzy"></div></div><div class="Box-sc-g0xbh4-0 iNSVHo"><div class="Box-sc-g0xbh4-0 bVgnfw"><div class="Box-sc-g0xbh4-0 CEgMp"><button type="button" aria-haspopup="true" aria-expanded="false" tabindex="0" aria-label="main branch" data-testid="anchor-button" class="Box-sc-g0xbh4-0 gMOVLe prc-Button-ButtonBase-c50BI overview-ref-selector width-full" data-loading="false" data-size="medium" data-variant="default" aria-describedby="branch-picker-repos-header-ref-selector-loading-announcement" id="branch-picker-repos-header-ref-selector"><span data-component="buttonContent" class="Box-sc-g0xbh4-0 gUkoLg prc-Button-ButtonContent-HKbr-"><span data-component="text" class="prc-Button-Label-pTQ3x"><div class="Box-sc-g0xbh4-0 bZBlpz"><div class="Box-sc-g0xbh4-0 lhTYNA"><svg aria-hidden="true" focusable="false" class="octicon octicon-git-branch" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M9.5 3.25a2.25 2.25 0 1 1 3 2.122V6A2.5 2.5 0 0 1 10 8.5H6a1 1 0 0 0-1 1v1.128a2.251 2.251 0 1 1-1.5 0V5.372a2.25 2.25 0 1 1 1.5 0v1.836A2.493 2.493 0 0 1 6 7h4a1 1 0 0 0 1-1v-.628A2.25 2.25 0 0 1 9.5 3.25Zm-6 0a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Zm8.25-.75a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5ZM4.25 12a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Z"></path></svg></div><div class="Box-sc-g0xbh4-0 ffLUq ref-selector-button-text-container"><span class="Box-sc-g0xbh4-0 bmcJak prc-Text-Text-0ima0"> <!-- -->main</span></div></div></span><span data-component="trailingVisual" class="prc-Button-Visual-2epfX prc-Button-VisualWrap-Db-eB"><svg aria-hidden="true" focusable="false" class="octicon octicon-triangle-down" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="m4.427 7.427 3.396 3.396a.25.25 0 0 0 .354 0l3.396-3.396A.25.25 0 0 0 11.396 7H4.604a.25.25 0 0 0-.177.427Z"></path></svg></span></span></button><button hidden="" data-hotkey-scope="read-only-cursor-text-area"></button></div><div class="Box-sc-g0xbh4-0 fLXEGX"><a style="--button-color:fg.muted" type="button" href="/ymcui/Chinese-LLaMA-Alpaca-2/branches" class="Box-sc-g0xbh4-0 lmSMZJ prc-Button-ButtonBase-c50BI" data-loading="false" data-size="medium" data-variant="invisible" aria-describedby=":Rclab:-loading-announcement"><span data-component="buttonContent" class="Box-sc-g0xbh4-0 gUkoLg prc-Button-ButtonContent-HKbr-"><span data-component="leadingVisual" class="prc-Button-Visual-2epfX prc-Button-VisualWrap-Db-eB"><svg aria-hidden="true" focusable="false" class="octicon octicon-git-branch" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M9.5 3.25a2.25 2.25 0 1 1 3 2.122V6A2.5 2.5 0 0 1 10 8.5H6a1 1 0 0 0-1 1v1.128a2.251 2.251 0 1 1-1.5 0V5.372a2.25 2.25 0 1 1 1.5 0v1.836A2.493 2.493 0 0 1 6 7h4a1 1 0 0 0 1-1v-.628A2.25 2.25 0 0 1 9.5 3.25Zm-6 0a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Zm8.25-.75a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5ZM4.25 12a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Z"></path></svg></span><span data-component="text" class="prc-Button-Label-pTQ3x">Branches</span></span></a><a style="--button-color:fg.muted" type="button" href="/ymcui/Chinese-LLaMA-Alpaca-2/tags" class="Box-sc-g0xbh4-0 lmSMZJ prc-Button-ButtonBase-c50BI" data-loading="false" data-size="medium" data-variant="invisible" aria-describedby=":Rklab:-loading-announcement"><span data-component="buttonContent" class="Box-sc-g0xbh4-0 gUkoLg prc-Button-ButtonContent-HKbr-"><span data-component="leadingVisual" class="prc-Button-Visual-2epfX prc-Button-VisualWrap-Db-eB"><svg aria-hidden="true" focusable="false" class="octicon octicon-tag" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1 7.775V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 0 1 0 2.474l-5.026 5.026a1.75 1.75 0 0 1-2.474 0l-6.25-6.25A1.752 1.752 0 0 1 1 7.775Zm1.5 0c0 .066.026.13.073.177l6.25 6.25a.25.25 0 0 0 .354 0l5.025-5.025a.25.25 0 0 0 0-.354l-6.25-6.25a.25.25 0 0 0-.177-.073H2.75a.25.25 0 0 0-.25.25ZM6 5a1 1 0 1 1 0 2 1 1 0 0 1 0-2Z"></path></svg></span><span data-component="text" class="prc-Button-Label-pTQ3x">Tags</span></span></a></div><div class="Box-sc-g0xbh4-0 dqfxud"><a style="--button-color:fg.muted" type="button" aria-label="Go to Branches page" href="/ymcui/Chinese-LLaMA-Alpaca-2/branches" class="Box-sc-g0xbh4-0 fGwBZA prc-Button-ButtonBase-c50BI" data-loading="false" data-no-visuals="true" data-size="medium" data-variant="invisible" aria-describedby=":Relab:-loading-announcement"><svg aria-hidden="true" focusable="false" class="octicon octicon-git-branch" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M9.5 3.25a2.25 2.25 0 1 1 3 2.122V6A2.5 2.5 0 0 1 10 8.5H6a1 1 0 0 0-1 1v1.128a2.251 2.251 0 1 1-1.5 0V5.372a2.25 2.25 0 1 1 1.5 0v1.836A2.493 2.493 0 0 1 6 7h4a1 1 0 0 0 1-1v-.628A2.25 2.25 0 0 1 9.5 3.25Zm-6 0a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Zm8.25-.75a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5ZM4.25 12a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Z"></path></svg></a><a style="--button-color:fg.muted" type="button" aria-label="Go to Tags page" href="/ymcui/Chinese-LLaMA-Alpaca-2/tags" class="Box-sc-g0xbh4-0 fGwBZA prc-Button-ButtonBase-c50BI" data-loading="false" data-no-visuals="true" data-size="medium" data-variant="invisible" aria-describedby=":Rmlab:-loading-announcement"><svg aria-hidden="true" focusable="false" class="octicon octicon-tag" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1 7.775V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 0 1 0 2.474l-5.026 5.026a1.75 1.75 0 0 1-2.474 0l-6.25-6.25A1.752 1.752 0 0 1 1 7.775Zm1.5 0c0 .066.026.13.073.177l6.25 6.25a.25.25 0 0 0 .354 0l5.025-5.025a.25.25 0 0 0 0-.354l-6.25-6.25a.25.25 0 0 0-.177-.073H2.75a.25.25 0 0 0-.25.25ZM6 5a1 1 0 1 1 0 2 1 1 0 0 1 0-2Z"></path></svg></a></div></div><div class="Box-sc-g0xbh4-0 jxTzTd"><div class="Box-sc-g0xbh4-0 gqqBXN"><div class="Box-sc-g0xbh4-0 dzXgxt"><!--$--><div class="Box-sc-g0xbh4-0 iWFGlI"><span class="Box-sc-g0xbh4-0 vcvyP TextInput-wrapper prc-components-TextInputWrapper-i1ofR prc-components-TextInputBaseWrapper-ueK9q" data-leading-visual="true" data-trailing-visual="true" aria-busy="false"><span class="TextInput-icon" id=":R2j5ab:" aria-hidden="true"><svg aria-hidden="true" focusable="false" class="octicon octicon-search" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M10.68 11.74a6 6 0 0 1-7.922-8.982 6 6 0 0 1 8.982 7.922l3.04 3.04a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215ZM11.5 7a4.499 4.499 0 1 0-8.997 0A4.499 4.499 0 0 0 11.5 7Z"></path></svg></span><input type="text" aria-label="Go to file" role="combobox" aria-controls="file-results-list" aria-expanded="false" aria-haspopup="dialog" autoCorrect="off" spellcheck="false" placeholder="Go to file" aria-describedby=":R2j5ab: :R2j5abH1:" data-component="input" class="prc-components-Input-Ic-y8" value=""/><span class="TextInput-icon" id=":R2j5abH1:" aria-hidden="true"></span></span></div><!--/$--></div><div class="Box-sc-g0xbh4-0 YUPas"><button type="button" class="prc-Button-ButtonBase-c50BI" data-loading="false" data-no-visuals="true" data-size="medium" data-variant="default" aria-describedby=":Rr5ab:-loading-announcement"><span data-component="buttonContent" data-align="center" class="prc-Button-ButtonContent-HKbr-"><span data-component="text" class="prc-Button-Label-pTQ3x">Go to file</span></span></button></div><div class="react-directory-add-file-icon"></div><div class="react-directory-remove-file-icon"></div></div><button type="button" aria-haspopup="true" aria-expanded="false" tabindex="0" class="prc-Button-ButtonBase-c50BI" data-loading="false" data-size="medium" data-variant="primary" aria-describedby=":R55ab:-loading-announcement" id=":R55ab:"><span data-component="buttonContent" data-align="center" class="prc-Button-ButtonContent-HKbr-"><span data-component="leadingVisual" class="prc-Button-Visual-2epfX prc-Button-VisualWrap-Db-eB"><svg aria-hidden="true" focusable="false" class="octicon octicon-code hide-sm" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="m11.28 3.22 4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L13.94 8l-3.72-3.72a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215Zm-6.56 0a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042L2.06 8l3.72 3.72a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L.47 8.53a.75.75 0 0 1 0-1.06Z"></path></svg></span><span data-component="text" class="prc-Button-Label-pTQ3x">Code</span><span data-component="trailingVisual" class="prc-Button-Visual-2epfX prc-Button-VisualWrap-Db-eB"><svg aria-hidden="true" focusable="false" class="octicon octicon-triangle-down" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="m4.427 7.427 3.396 3.396a.25.25 0 0 0 .354 0l3.396-3.396A.25.25 0 0 0 11.396 7H4.604a.25.25 0 0 0-.177.427Z"></path></svg></span></span></button><div class="Box-sc-g0xbh4-0 izFOf"><button data-component="IconButton" type="button" aria-label="Open more actions menu" aria-haspopup="true" aria-expanded="false" tabindex="0" class="prc-Button-ButtonBase-c50BI prc-Button-IconButton-szpyj" data-loading="false" data-no-visuals="true" data-size="medium" data-variant="default" aria-describedby=":R75ab:-loading-announcement" id=":R75ab:"><svg aria-hidden="true" focusable="false" class="octicon octicon-kebab-horizontal" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M8 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3ZM1.5 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Zm13 0a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path></svg></button></div></div></div><div class="Box-sc-g0xbh4-0 vIPPs"><div data-hpc="true"><button hidden="" data-testid="focus-next-element-button" data-hotkey="j"></button><button hidden="" data-testid="focus-previous-element-button" data-hotkey="k"></button><h2 class="sr-only ScreenReaderHeading-module__userSelectNone--vW4Cq prc-Heading-Heading-6CmGO" data-testid="screen-reader-heading" id="folders-and-files">Folders and files</h2><table aria-labelledby="folders-and-files" class="Box-sc-g0xbh4-0 fdROMU"><thead class="Box-sc-g0xbh4-0 jGKpsv"><tr class="Box-sc-g0xbh4-0 jdgHnn"><th colSpan="2" class="Box-sc-g0xbh4-0 bQivRW"><span class="text-bold">Name</span></th><th colSpan="1" class="Box-sc-g0xbh4-0 ldkMIO"><span class="text-bold">Name</span></th><th class="hide-sm"><div title="Last commit message" class="Truncate__StyledTruncate-sc-23o1d2-0 liVpTx width-fit"><span class="text-bold">Last commit message</span></div></th><th colSpan="1" class="Box-sc-g0xbh4-0 jMbWeI"><div title="Last commit date" class="Truncate__StyledTruncate-sc-23o1d2-0 liVpTx width-fit"><span class="text-bold">Last commit date</span></div></th></tr></thead><tbody><tr class="Box-sc-g0xbh4-0 gpqjiB"><td colSpan="3" class="bgColor-muted p-1 rounded-top-2"><div class="Box-sc-g0xbh4-0 dzCJzi"><h2 class="sr-only ScreenReaderHeading-module__userSelectNone--vW4Cq prc-Heading-Heading-6CmGO" data-testid="screen-reader-heading">Latest commit</h2><div style="width:120px" class="Skeleton Skeleton--text" data-testid="loading"> </div><div class="d-flex flex-shrink-0 gap-2"><div data-testid="latest-commit-details" class="d-none d-sm-flex flex-items-center"></div><div class="d-flex gap-2"><h2 class="sr-only ScreenReaderHeading-module__userSelectNone--vW4Cq prc-Heading-Heading-6CmGO" data-testid="screen-reader-heading">History</h2><a href="/ymcui/Chinese-LLaMA-Alpaca-2/commits/main/" class="prc-Button-ButtonBase-c50BI d-none d-lg-flex LinkButton-module__code-view-link-button--xvCGA flex-items-center fgColor-default" data-loading="false" data-size="small" data-variant="invisible" aria-describedby=":Raqj8pab:-loading-announcement"><span data-component="buttonContent" data-align="center" class="prc-Button-ButtonContent-HKbr-"><span data-component="leadingVisual" class="prc-Button-Visual-2epfX prc-Button-VisualWrap-Db-eB"><svg aria-hidden="true" focusable="false" class="octicon octicon-history" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="m.427 1.927 1.215 1.215a8.002 8.002 0 1 1-1.6 5.685.75.75 0 1 1 1.493-.154 6.5 6.5 0 1 0 1.18-4.458l1.358 1.358A.25.25 0 0 1 3.896 6H.25A.25.25 0 0 1 0 5.75V2.104a.25.25 0 0 1 .427-.177ZM7.75 4a.75.75 0 0 1 .75.75v2.992l2.028.812a.75.75 0 0 1-.557 1.392l-2.5-1A.751.751 0 0 1 7 8.25v-3.5A.75.75 0 0 1 7.75 4Z"></path></svg></span><span data-component="text" class="prc-Button-Label-pTQ3x"><span class="fgColor-default">264 Commits</span></span></span></a><div class="d-sm-none"></div><div class="d-flex d-lg-none"><span role="tooltip" aria-label="264 Commits" id="history-icon-button-tooltip" class="Tooltip__TooltipBase-sc-17tf59c-0 hWlpPn tooltipped-n"><a href="/ymcui/Chinese-LLaMA-Alpaca-2/commits/main/" class="prc-Button-ButtonBase-c50BI LinkButton-module__code-view-link-button--xvCGA flex-items-center fgColor-default" data-loading="false" data-size="small" data-variant="invisible" aria-describedby=":R1iqj8pab:-loading-announcement history-icon-button-tooltip"><span data-component="buttonContent" data-align="center" class="prc-Button-ButtonContent-HKbr-"><span data-component="leadingVisual" class="prc-Button-Visual-2epfX prc-Button-VisualWrap-Db-eB"><svg aria-hidden="true" focusable="false" class="octicon octicon-history" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="m.427 1.927 1.215 1.215a8.002 8.002 0 1 1-1.6 5.685.75.75 0 1 1 1.493-.154 6.5 6.5 0 1 0 1.18-4.458l1.358 1.358A.25.25 0 0 1 3.896 6H.25A.25.25 0 0 1 0 5.75V2.104a.25.25 0 0 1 .427-.177ZM7.75 4a.75.75 0 0 1 .75.75v2.992l2.028.812a.75.75 0 0 1-.557 1.392l-2.5-1A.751.751 0 0 1 7 8.25v-3.5A.75.75 0 0 1 7.75 4Z"></path></svg></span></span></a></span></div></div></div></div></td></tr><tr class="react-directory-row undefined" id="folder-row-0"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title=".github" aria-label=".github, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/.github">.github</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title=".github" aria-label=".github, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/.github">.github</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-1"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="examples" aria-label="examples, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/examples">examples</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="examples" aria-label="examples, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/examples">examples</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-2"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="notebooks" aria-label="notebooks, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/notebooks">notebooks</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="notebooks" aria-label="notebooks, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/notebooks">notebooks</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-3"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="pics" aria-label="pics, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/pics">pics</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="pics" aria-label="pics, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/pics">pics</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-4"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="prompts" aria-label="prompts, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/prompts">prompts</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="prompts" aria-label="prompts, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/prompts">prompts</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-5"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="scripts" aria-label="scripts, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/scripts">scripts</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file-directory-fill icon-directory" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M1.75 1A1.75 1.75 0 0 0 0 2.75v10.5C0 14.216.784 15 1.75 15h12.5A1.75 1.75 0 0 0 16 13.25v-8.5A1.75 1.75 0 0 0 14.25 3H7.5a.25.25 0 0 1-.2-.1l-.9-1.2C6.07 1.26 5.55 1 5 1H1.75Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="scripts" aria-label="scripts, (Directory)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/tree/main/scripts">scripts</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-6"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title=".gitattributes" aria-label=".gitattributes, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/.gitattributes">.gitattributes</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title=".gitattributes" aria-label=".gitattributes, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/.gitattributes">.gitattributes</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-7"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title=".gitignore" aria-label=".gitignore, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/.gitignore">.gitignore</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title=".gitignore" aria-label=".gitignore, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/.gitignore">.gitignore</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-8"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="CITATION.cff" aria-label="CITATION.cff, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/CITATION.cff">CITATION.cff</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="CITATION.cff" aria-label="CITATION.cff, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/CITATION.cff">CITATION.cff</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row undefined" id="folder-row-9"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="LICENSE" aria-label="LICENSE, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/LICENSE">LICENSE</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="LICENSE" aria-label="LICENSE, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/LICENSE">LICENSE</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row truncate-for-mobile" id="folder-row-10"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="README.md" aria-label="README.md, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README.md">README.md</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="README.md" aria-label="README.md, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README.md">README.md</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row truncate-for-mobile" id="folder-row-11"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="README_EN.md" aria-label="README_EN.md, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README_EN.md">README_EN.md</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="README_EN.md" aria-label="README_EN.md, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README_EN.md">README_EN.md</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="react-directory-row truncate-for-mobile" id="folder-row-12"><td class="react-directory-row-name-cell-small-screen" colSpan="2"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="requirements.txt" aria-label="requirements.txt, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/requirements.txt">requirements.txt</a></div></div></div></div></td><td class="react-directory-row-name-cell-large-screen" colSpan="1"><div class="react-directory-filename-column"><svg aria-hidden="true" focusable="false" class="octicon octicon-file color-fg-muted" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M2 1.75C2 .784 2.784 0 3.75 0h6.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v9.586A1.75 1.75 0 0 1 13.25 16h-9.5A1.75 1.75 0 0 1 2 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h9.5a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 9 4.25V1.5Zm6.75.062V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path></svg><div class="overflow-hidden"><div class="react-directory-filename-cell"><div class="react-directory-truncate"><a title="requirements.txt" aria-label="requirements.txt, (File)" class="Link--primary" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/requirements.txt">requirements.txt</a></div></div></div></div></td><td class="react-directory-row-commit-cell"><div class="Skeleton Skeleton--text"> </div></td><td><div class="Skeleton Skeleton--text"> </div></td></tr><tr class="Box-sc-g0xbh4-0 eNCcrz show-for-mobile" data-testid="view-all-files-row"><td colSpan="3" class="Box-sc-g0xbh4-0 bHTcCe"><div><button class="prc-Link-Link-85e08">View all files</button></div></td></tr></tbody></table></div><div class="Box-sc-g0xbh4-0 csrIcr"><div class="Box-sc-g0xbh4-0 bUQNHB"><div itemscope="" itemType="https://schema.org/abstract" class="Box-sc-g0xbh4-0 jPdcfu"><h2 class="_VisuallyHidden__VisuallyHidden-sc-11jhm7a-0 brGdpi">Repository files navigation</h2><nav class="Box-sc-g0xbh4-0 iphEWz prc-components-UnderlineWrapper-oOh5J" aria-label="Repository files"><ul class="prc-components-UnderlineItemList-b23Hf" role="list"><li class="Box-sc-g0xbh4-0 hUCRAk"><a class="prc-components-UnderlineItem-lJsg-" href="#" aria-current="page"><span data-component="icon"><svg aria-hidden="true" focusable="false" class="octicon octicon-book" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M0 1.75A.75.75 0 0 1 .75 1h4.253c1.227 0 2.317.59 3 1.501A3.743 3.743 0 0 1 11.006 1h4.245a.75.75 0 0 1 .75.75v10.5a.75.75 0 0 1-.75.75h-4.507a2.25 2.25 0 0 0-1.591.659l-.622.621a.75.75 0 0 1-1.06 0l-.622-.621A2.25 2.25 0 0 0 5.258 13H.75a.75.75 0 0 1-.75-.75Zm7.251 10.324.004-5.073-.002-2.253A2.25 2.25 0 0 0 5.003 2.5H1.5v9h3.757a3.75 3.75 0 0 1 1.994.574ZM8.755 4.75l-.004 7.322a3.752 3.752 0 0 1 1.992-.572H14.5v-9h-3.495a2.25 2.25 0 0 0-2.25 2.25Z"></path></svg></span><span data-component="text" data-content="README">README</span></a></li><li class="Box-sc-g0xbh4-0 hUCRAk"><a class="prc-components-UnderlineItem-lJsg-" href="#"><span data-component="icon"><svg aria-hidden="true" focusable="false" class="octicon octicon-law" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M8.75.75V2h.985c.304 0 .603.08.867.231l1.29.736c.038.022.08.033.124.033h2.234a.75.75 0 0 1 0 1.5h-.427l2.111 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.006.005-.01.01-.045.04c-.21.176-.441.327-.686.45C14.556 10.78 13.88 11 13 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L12.178 4.5h-.162c-.305 0-.604-.079-.868-.231l-1.29-.736a.245.245 0 0 0-.124-.033H8.75V13h2.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1 0-1.5h2.5V3.5h-.984a.245.245 0 0 0-.124.033l-1.289.737c-.265.15-.564.23-.869.23h-.162l2.112 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.016.015-.045.04c-.21.176-.441.327-.686.45C4.556 10.78 3.88 11 3 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L2.178 4.5H1.75a.75.75 0 0 1 0-1.5h2.234a.249.249 0 0 0 .125-.033l1.288-.737c.265-.15.564-.23.869-.23h.984V.75a.75.75 0 0 1 1.5 0Zm2.945 8.477c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L13 6.327Zm-10 0c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L3 6.327Z"></path></svg></span><span data-component="text" data-content="Apache-2.0 license">Apache-2.0 license</span></a></li></ul></nav><button style="--button-color:fg.subtle" type="button" aria-label="Outline" aria-haspopup="true" aria-expanded="false" tabindex="0" class="Box-sc-g0xbh4-0 cwoBXV prc-Button-ButtonBase-c50BI" data-loading="false" data-size="medium" data-variant="invisible" aria-describedby=":Rr9ab:-loading-announcement" id=":Rr9ab:"><svg aria-hidden="true" focusable="false" class="octicon octicon-list-unordered" viewBox="0 0 16 16" width="16" height="16" fill="currentColor" display="inline-block" overflow="visible" style="vertical-align:text-bottom"><path d="M5.75 2.5h8.5a.75.75 0 0 1 0 1.5h-8.5a.75.75 0 0 1 0-1.5Zm0 5h8.5a.75.75 0 0 1 0 1.5h-8.5a.75.75 0 0 1 0-1.5Zm0 5h8.5a.75.75 0 0 1 0 1.5h-8.5a.75.75 0 0 1 0-1.5ZM2 14a1 1 0 1 1 0-2 1 1 0 0 1 0 2Zm1-6a1 1 0 1 1-2 0 1 1 0 0 1 2 0ZM2 4a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z"></path></svg></button></div><div class="Box-sc-g0xbh4-0 QkQOb js-snippet-clipboard-copy-unpositioned undefined" data-hpc="true"><article class="markdown-body entry-content container-lg" itemprop="text"><div class="markdown-heading" dir="auto"><h1 tabindex="-1" class="heading-element" dir="auto"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-3">Chinese-LLaMA-Alpaca-3</a>项目启动!</h1><a id="user-content-chinese-llama-alpaca-3项目启动" class="anchor" aria-label="Permalink: Chinese-LLaMA-Alpaca-3项目启动!" href="#chinese-llama-alpaca-3项目启动"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto"><a href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README.md"><strong>🇨🇳中文</strong></a> | <a href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/README_EN.md"><strong>🌐English</strong></a> | <a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki"><strong>📖文档/Docs</strong></a> | <a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/issues"><strong>❓提问/Issues</strong></a> | <a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/discussions"><strong>💬讨论/Discussions</strong></a> | <a href="http://llm-arena.ymcui.com/" rel="nofollow"><strong>⚔️竞技场/Arena</strong></a></p> <p align="center" dir="auto"> <br> <a target="_blank" rel="noopener noreferrer" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/pics/banner.png"><img src="/ymcui/Chinese-LLaMA-Alpaca-2/raw/main/pics/banner.png" width="800" style="max-width: 100%;"></a> <br> </p> <p align="center" dir="auto"> <a target="_blank" rel="noopener noreferrer nofollow" href="https://camo.githubusercontent.com/31cd0ec9b466a08fc3b292763f2130bb9367df59508160e848f3a17bd058e595/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c6963656e73652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d322e7376673f636f6c6f723d626c7565267374796c653d666c61742d737175617265"><img alt="GitHub" src="https://camo.githubusercontent.com/31cd0ec9b466a08fc3b292763f2130bb9367df59508160e848f3a17bd058e595/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c6963656e73652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d322e7376673f636f6c6f723d626c7565267374796c653d666c61742d737175617265" data-canonical-src="https://img.shields.io/github/license/ymcui/Chinese-LLaMA-Alpaca-2.svg?color=blue&amp;style=flat-square" style="max-width: 100%;"></a> <a target="_blank" rel="noopener noreferrer nofollow" href="https://camo.githubusercontent.com/24dcc1a4b8254cbc773e6f7a3860e3481282ffc8f863f09ea0766cb580782d6f/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f762f72656c656173652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32"><img alt="GitHub release (latest by date)" src="https://camo.githubusercontent.com/24dcc1a4b8254cbc773e6f7a3860e3481282ffc8f863f09ea0766cb580782d6f/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f762f72656c656173652f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32" data-canonical-src="https://img.shields.io/github/v/release/ymcui/Chinese-LLaMA-Alpaca-2" style="max-width: 100%;"></a> <a target="_blank" rel="noopener noreferrer nofollow" href="https://camo.githubusercontent.com/ae50bb200144d74e4430372f7bd93305547d17775caaab21b3e8aa541b157d20/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c616e6775616765732f746f702f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32"><img alt="GitHub top language" src="https://camo.githubusercontent.com/ae50bb200144d74e4430372f7bd93305547d17775caaab21b3e8aa541b157d20/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c616e6775616765732f746f702f796d6375692f4368696e6573652d4c4c614d412d416c706163612d32" data-canonical-src="https://img.shields.io/github/languages/top/ymcui/Chinese-LLaMA-Alpaca-2" style="max-width: 100%;"></a> <a href="https://app.codacy.com/gh/ymcui/Chinese-LLaMA-Alpaca-2/dashboard?utm_source=gh&amp;utm_medium=referral&amp;utm_content=&amp;utm_campaign=Badge_grade" rel="nofollow"><img src="https://camo.githubusercontent.com/805d9d39942744f73da9db6bfc252b112e3f52673a42cdc0491db3bed02f524f/68747470733a2f2f6170702e636f646163792e636f6d2f70726f6a6563742f62616467652f47726164652f3137313066616163356536333461636161626663323662306137373863646465" data-canonical-src="https://app.codacy.com/project/badge/Grade/1710faac5e634acaabfc26b0a778cdde" style="max-width: 100%;"></a> </p> <p dir="auto">本项目基于Meta发布的可商用大模型<a href="https://github.com/facebookresearch/llama">Llama-2</a>开发,是<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">中文LLaMA&amp;Alpaca大模型</a>的第二期项目,开源了<strong>中文LLaMA-2基座模型和Alpaca-2指令精调大模型</strong>。这些模型<strong>在原版Llama-2的基础上扩充并优化了中文词表</strong>,使用了大规模中文数据进行增量预训练,进一步提升了中文基础语义和指令理解能力,相比一代相关模型获得了显著性能提升。相关模型<strong>支持FlashAttention-2训练</strong>。标准版模型支持4K上下文长度,<strong>长上下文版模型支持16K、64k上下文长度</strong>。<strong>RLHF系列模型</strong>为标准版模型基础上进行人类偏好对齐精调,相比标准版模型在<strong>正确价值观体现</strong>方面获得了显著性能提升。</p> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">本项目主要内容</h4><a id="user-content-本项目主要内容" class="anchor" aria-label="Permalink: 本项目主要内容" href="#本项目主要内容"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>🚀 针对Llama-2模型扩充了<strong>新版中文词表</strong>,开源了中文LLaMA-2和Alpaca-2大模型</li> <li>🚀 开源了预训练脚本、指令精调脚本,用户可根据需要进一步训练模型</li> <li>🚀 使用个人电脑的CPU/GPU快速在本地进行大模型量化和部署体验</li> <li>🚀 支持<a href="https://github.com/huggingface/transformers">🤗transformers</a>, <a href="https://github.com/ggerganov/llama.cpp">llama.cpp</a>, <a href="https://github.com/oobabooga/text-generation-webui">text-generation-webui</a>, <a href="https://github.com/hwchase17/langchain">LangChain</a>, <a href="https://github.com/imartinez/privateGPT">privateGPT</a>, <a href="https://github.com/vllm-project/vllm">vLLM</a>等LLaMA生态</li> </ul> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">已开源的模型</h4><a id="user-content-已开源的模型" class="anchor" aria-label="Permalink: 已开源的模型" href="#已开源的模型"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>基座模型(4K上下文):Chinese-LLaMA-2 (1.3B, 7B, 13B)</li> <li>聊天模型(4K上下文):Chinese-Alpaca-2 (1.3B, 7B, 13B)</li> <li>长上下文模型(16K/64K): <ul dir="auto"> <li>Chinese-LLaMA-2-16K (7B, 13B) 、Chinese-Alpaca-2-16K (7B, 13B)</li> <li>Chinese-LLaMA-2-64K (7B)、Chinese-Alpaca-2-64K (7B)</li> </ul> </li> <li>偏好对齐模型:Chinese-Alpaca-2-RLHF (1.3B, 7B)</li> </ul> <p dir="auto"><a target="_blank" rel="noopener noreferrer" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/pics/screencast.gif"><img src="/ymcui/Chinese-LLaMA-Alpaca-2/raw/main/pics/screencast.gif" alt="" data-animated-image="" style="max-width: 100%;"></a></p> <hr> <p dir="auto"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">中文LLaMA&amp;Alpaca大模型</a> | <a href="https://github.com/airaria/Visual-Chinese-LLaMA-Alpaca">多模态中文LLaMA&amp;Alpaca大模型</a> | <a href="https://github.com/iflytek/VLE">多模态VLE</a> | <a href="https://github.com/iflytek/MiniRBT">中文MiniRBT</a> | <a href="https://github.com/ymcui/LERT">中文LERT</a> | <a href="https://github.com/ymcui/PERT">中英文PERT</a> | <a href="https://github.com/ymcui/MacBERT">中文MacBERT</a> | <a href="https://github.com/ymcui/Chinese-ELECTRA">中文ELECTRA</a> | <a href="https://github.com/ymcui/Chinese-XLNet">中文XLNet</a> | <a href="https://github.com/ymcui/Chinese-BERT-wwm">中文BERT</a> | <a href="https://github.com/airaria/TextBrewer">知识蒸馏工具TextBrewer</a> | <a href="https://github.com/airaria/TextPruner">模型裁剪工具TextPruner</a> | <a href="https://github.com/airaria/GRAIN">蒸馏裁剪一体化GRAIN</a></p> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">新闻</h2><a id="user-content-新闻" class="anchor" aria-label="Permalink: 新闻" href="#新闻"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto"><strong>[2024/04/30] Chinese-LLaMA-Alpaca-3 已正式发布,开源基于Llama-3的Llama-3-Chinese-8B和Llama-3-Chinese-8B-Instruct,推荐所有一期、二期项目用户升级至三代模型,请参阅:<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-3">https://github.com/ymcui/Chinese-LLaMA-Alpaca-3</a></strong></p> <p dir="auto">[2024/03/27] 本项目已入驻机器之心SOTA!模型平台,欢迎关注:<a href="https://sota.jiqizhixin.com/project/chinese-llama-alpaca-2" rel="nofollow">https://sota.jiqizhixin.com/project/chinese-llama-alpaca-2</a></p> <p dir="auto">[2024/01/23] 添加新版GGUF模型(imatrix量化)、AWQ量化模型,支持vLLM下加载YaRN长上下文模型。详情查看<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v4.1">📚 v4.1版本发布日志</a></p> <p dir="auto">[2023/12/29] 发布长上下文模型Chinese-LLaMA-2-7B-64K和Chinese-Alpaca-2-7B-64K,同时发布经过人类偏好对齐(RLHF)的Chinese-Alpaca-2-RLHF(1.3B/7B)。详情查看<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v4.0">📚 v4.0版本发布日志</a></p> <p dir="auto">[2023/09/01] 发布长上下文模型Chinese-Alpaca-2-7B-16K和Chinese-Alpaca-2-13B-16K,该模型可直接应用于下游任务,例如privateGPT等。详情查看<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v3.1">📚 v3.1版本发布日志</a></p> <p dir="auto">[2023/08/25] 发布长上下文模型Chinese-LLaMA-2-7B-16K和Chinese-LLaMA-2-13B-16K,支持16K上下文,并可通过NTK方法进一步扩展至24K+。详情查看<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v3.0">📚 v3.0版本发布日志</a></p> <p dir="auto">[2023/08/14] 发布Chinese-LLaMA-2-13B和Chinese-Alpaca-2-13B,添加text-generation-webui/LangChain/privateGPT支持,添加CFG Sampling解码方法等。详情查看<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v2.0">📚 v2.0版本发布日志</a></p> <p dir="auto">[2023/08/02] 添加FlashAttention-2训练支持,基于vLLM的推理加速支持,提供长回复系统提示语模板等。详情查看<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v1.1">📚 v1.1版本发布日志</a></p> <p dir="auto">[2023/07/31] 正式发布Chinese-LLaMA-2-7B(基座模型),使用120G中文语料增量训练(与一代Plus系列相同);进一步通过5M条指令数据精调(相比一代略微增加),得到Chinese-Alpaca-2-7B(指令/chat模型)。详情查看<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v1.0">📚 v1.0版本发布日志</a></p> <p dir="auto">[2023/07/19] 🚀启动<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2">中文LLaMA-2、Alpaca-2开源大模型项目</a></p> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">内容导引</h2><a id="user-content-内容导引" class="anchor" aria-label="Permalink: 内容导引" href="#内容导引"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <markdown-accessiblity-table><table> <thead> <tr> <th>章节</th> <th>描述</th> </tr> </thead> <tbody> <tr> <td><a href="#%E6%A8%A1%E5%9E%8B%E7%AE%80%E4%BB%8B">💁🏻‍♂️模型简介</a></td> <td>简要介绍本项目相关模型的技术特点</td> </tr> <tr> <td><a href="#%E6%A8%A1%E5%9E%8B%E4%B8%8B%E8%BD%BD">⏬模型下载</a></td> <td>中文LLaMA-2、Alpaca-2大模型下载地址</td> </tr> <tr> <td><a href="#%E6%8E%A8%E7%90%86%E4%B8%8E%E9%83%A8%E7%BD%B2">💻推理与部署</a></td> <td>介绍了如何对模型进行量化并使用个人电脑部署并体验大模型</td> </tr> <tr> <td><a href="#%E7%B3%BB%E7%BB%9F%E6%95%88%E6%9E%9C">💯系统效果</a></td> <td>介绍了模型在部分任务上的效果</td> </tr> <tr> <td><a href="#%E8%AE%AD%E7%BB%83%E4%B8%8E%E7%B2%BE%E8%B0%83">📝训练与精调</a></td> <td>介绍了如何训练和精调中文LLaMA-2、Alpaca-2大模型</td> </tr> <tr> <td><a href="#%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98">❓常见问题</a></td> <td>一些常见问题的回复</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">模型简介</h2><a id="user-content-模型简介" class="anchor" aria-label="Permalink: 模型简介" href="#模型简介"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">本项目推出了基于Llama-2的中文LLaMA-2以及Alpaca-2系列模型,相比<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">一期项目</a>其主要特点如下:</p> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">📖 经过优化的中文词表</h4><a id="user-content--经过优化的中文词表" class="anchor" aria-label="Permalink: 📖 经过优化的中文词表" href="#-经过优化的中文词表"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>在<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">一期项目</a>中,我们针对一代LLaMA模型的32K词表扩展了中文字词(LLaMA:49953,Alpaca:49954)</li> <li>在本项目中,我们<strong>重新设计了新词表</strong>(大小:55296),进一步提升了中文字词的覆盖程度,同时统一了LLaMA/Alpaca的词表,避免了因混用词表带来的问题,以期进一步提升模型对中文文本的编解码效率</li> </ul> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">⚡ 基于FlashAttention-2的高效注意力</h4><a id="user-content--基于flashattention-2的高效注意力" class="anchor" aria-label="Permalink: ⚡ 基于FlashAttention-2的高效注意力" href="#-基于flashattention-2的高效注意力"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li><a href="https://github.com/Dao-AILab/flash-attention">FlashAttention-2</a>是高效注意力机制的一种实现,相比其一代技术具有<strong>更快的速度和更优化的显存占用</strong></li> <li>当上下文长度更长时,为了避免显存爆炸式的增长,使用此类高效注意力技术尤为重要</li> <li>本项目的所有模型均使用了FlashAttention-2技术进行训练</li> </ul> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">🚄 基于PI和YaRN的超长上下文扩展技术</h4><a id="user-content--基于pi和yarn的超长上下文扩展技术" class="anchor" aria-label="Permalink: 🚄 基于PI和YaRN的超长上下文扩展技术" href="#-基于pi和yarn的超长上下文扩展技术"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>在<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">一期项目</a>中,我们实现了<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca/pull/743" data-hovercard-type="pull_request" data-hovercard-url="/ymcui/Chinese-LLaMA-Alpaca/pull/743/hovercard">基于NTK的上下文扩展技术</a>,可在不继续训练模型的情况下支持更长的上下文</li> <li>基于<a href="https://arxiv.org/abs/2306.15595" rel="nofollow">位置插值PI</a>和NTK等方法推出了16K长上下文版模型,支持16K上下文,并可通过NTK方法最高扩展至24K-32K</li> <li>基于<a href="https://arxiv.org/abs/2309.00071" rel="nofollow">YaRN</a>方法进一步推出了64K长上下文版模型,支持64K上下文</li> <li>进一步设计了<strong>方便的自适应经验公式</strong>,无需针对不同的上下文长度设置NTK超参,降低了使用难度</li> </ul> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">🤖 简化的中英双语系统提示语</h4><a id="user-content--简化的中英双语系统提示语" class="anchor" aria-label="Permalink: 🤖 简化的中英双语系统提示语" href="#-简化的中英双语系统提示语"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>在<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">一期项目</a>中,中文Alpaca系列模型使用了<a href="https://github.com/tatsu-lab/stanford_alpaca">Stanford Alpaca</a>的指令模板和系统提示语</li> <li>初步实验发现,Llama-2-Chat系列模型的默认系统提示语未能带来统计显著的性能提升,且其内容过于冗长</li> <li>本项目中的Alpaca-2系列模型简化了系统提示语,同时遵循Llama-2-Chat指令模板,以便更好地适配相关生态</li> </ul> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">👮 人类偏好对齐</h4><a id="user-content--人类偏好对齐" class="anchor" aria-label="Permalink: 👮 人类偏好对齐" href="#-人类偏好对齐"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>在<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">一期项目</a>中,中文Alpaca系列模型仅完成预训练和指令精调,获得了基本的对话能力</li> <li>通过基于人类反馈的强化学习(RLHF)实验,发现可显著提升模型传递正确价值观的能力</li> <li>本项目推出了Alpaca-2-RLHF系列模型,使用方式与SFT模型一致</li> </ul> <p dir="auto">下图展示了本项目以及<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">一期项目</a>推出的所有大模型之间的关系。</p> <p dir="auto"><a target="_blank" rel="noopener noreferrer" href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/pics/models.png"><img src="/ymcui/Chinese-LLaMA-Alpaca-2/raw/main/pics/models.png" alt="" style="max-width: 100%;"></a></p> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">模型下载</h2><a id="user-content-模型下载" class="anchor" aria-label="Permalink: 模型下载" href="#模型下载"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">模型选择指引</h3><a id="user-content-模型选择指引" class="anchor" aria-label="Permalink: 模型选择指引" href="#模型选择指引"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">以下是中文LLaMA-2和Alpaca-2模型的对比以及建议使用场景。<strong>如需聊天交互,请选择Alpaca而不是LLaMA。</strong></p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">对比项</th> <th align="center">中文LLaMA-2</th> <th align="center">中文Alpaca-2</th> </tr> </thead> <tbody> <tr> <td align="left">模型类型</td> <td align="center"><strong>基座模型</strong></td> <td align="center"><strong>指令/Chat模型(类ChatGPT)</strong></td> </tr> <tr> <td align="left">已开源大小</td> <td align="center">1.3B、7B、13B</td> <td align="center">1.3B、7B、13B</td> </tr> <tr> <td align="left">训练类型</td> <td align="center">Causal-LM (CLM)</td> <td align="center">指令精调</td> </tr> <tr> <td align="left">训练方式</td> <td align="center">7B、13B:LoRA + 全量emb/lm-head<br>1.3B:全量</td> <td align="center">7B、13B:LoRA + 全量emb/lm-head<br>1.3B:全量</td> </tr> <tr> <td align="left">基于什么模型训练</td> <td align="center"><a href="https://github.com/facebookresearch/llama">原版Llama-2</a>(非chat版)</td> <td align="center">中文LLaMA-2</td> </tr> <tr> <td align="left">训练语料</td> <td align="center">无标注通用语料(120G纯文本)</td> <td align="center">有标注指令数据(500万条)</td> </tr> <tr> <td align="left">词表大小<sup>[1]</sup></td> <td align="center">55,296</td> <td align="center">55,296</td> </tr> <tr> <td align="left">上下文长度<sup>[2]</sup></td> <td align="center">标准版:4K(12K-18K)<br>长上下文版(PI):16K(24K-32K)<br>长上下文版(YaRN):64K</td> <td align="center">标准版:4K(12K-18K)<br>长上下文版(PI):16K(24K-32K)<br>长上下文版(YaRN):64K</td> </tr> <tr> <td align="left">输入模板</td> <td align="center">不需要</td> <td align="center">需要套用特定模板<sup>[3]</sup>,类似Llama-2-Chat</td> </tr> <tr> <td align="left">适用场景</td> <td align="center">文本续写:给定上文,让模型生成下文</td> <td align="center">指令理解:问答、写作、聊天、交互等</td> </tr> <tr> <td align="left">不适用场景</td> <td align="center">指令理解 、多轮聊天等</td> <td align="center">文本无限制自由生成</td> </tr> <tr> <td align="left">偏好对齐</td> <td align="center">无</td> <td align="center">RLHF版本(1.3B、7B)</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-alert markdown-alert-note" dir="auto"><p class="markdown-alert-title" dir="auto"><svg class="octicon octicon-info mr-2" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="M0 8a8 8 0 1 1 16 0A8 8 0 0 1 0 8Zm8-6.5a6.5 6.5 0 1 0 0 13 6.5 6.5 0 0 0 0-13ZM6.5 7.75A.75.75 0 0 1 7.25 7h1a.75.75 0 0 1 .75.75v2.75h.25a.75.75 0 0 1 0 1.5h-2a.75.75 0 0 1 0-1.5h.25v-2h-.25a.75.75 0 0 1-.75-.75ZM8 6a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z"></path></svg>Note</p><p dir="auto">[1] <em>本项目一代模型和二代模型的词表不同,请勿混用。二代LLaMA和Alpaca的词表相同。</em><br> [2] <em>括号内表示基于NTK上下文扩展支持的最大长度。</em><br> [3] <em>Alpaca-2采用了Llama-2-chat系列模板(格式相同,提示语不同),而不是一代Alpaca的模板,请勿混用。</em><br> [4] <em>不建议单独使用1.3B模型,而是通过投机采样搭配更大的模型(7B、13B)使用。</em><br></p> </div> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">完整模型下载</h3><a id="user-content-完整模型下载" class="anchor" aria-label="Permalink: 完整模型下载" href="#完整模型下载"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">以下是完整版模型,直接下载即可使用,无需其他合并步骤。推荐网络带宽充足的用户。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">模型名称</th> <th align="center">类型</th> <th align="center">大小</th> <th align="center">下载地址</th> <th align="center">GGUF</th> </tr> </thead> <tbody> <tr> <td align="left">Chinese-LLaMA-2-13B</td> <td align="center">基座模型</td> <td align="center">24.7 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1T3RqEUSmyg6ZuBwMhwSmoQ?pwd=e9qy" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1YNa5qJ0x59OEOI7tNODxea-1YvMPoH05?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-13b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-13b" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-llama-2-13b-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-LLaMA-2-7B</td> <td align="center">基座模型</td> <td align="center">12.9 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1E5NI3nlQpx1j8z3eIzbIlg?pwd=n8k3" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/18pp4I-mvQxRA7b8vF9gP-2cH_ocnXVKh?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-7b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-7b" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-llama-2-7b-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-LLaMA-2-1.3B</td> <td align="center">基座模型</td> <td align="center">2.4 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1hEuOCllnJJ5NMEZJf8OkRw?pwd=nwjg" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1Sd3PA_gs6JctXtBg5HwmHXh9GX93riMP?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-1.3b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-1.3b" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-llama-2-1.3b-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-13B</td> <td align="center">指令模型</td> <td align="center">24.7 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1MT_Zlap1OtdYMgoBNTS3dg?pwd=9xja" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1MTsKlzR61xmbTR4hBWzQas_MOpUZsogN?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-13b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-13b" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-13b-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-7B</td> <td align="center">指令模型</td> <td align="center">12.9 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1wxx-CdgbMupXVRBcaN4Slw?pwd=kpn9" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1JsJDVs7tE2y31PBNleBlDPsB7S0ZrY8d?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-1.3B</td> <td align="center">指令模型</td> <td align="center">2.4 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1PD7Ng-ltOIdUGHNorveptA?pwd=ar1p" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1h6qOy-Unvqs1_CJ8uPp0eKC61Gbbn8n7?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-1.3b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-1.3b" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-1.3b-gguf" rel="nofollow">[🤗HF]</a></td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">长上下文版模型</h4><a id="user-content-长上下文版模型" class="anchor" aria-label="Permalink: 长上下文版模型" href="#长上下文版模型"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">以下是长上下文版模型,<strong>推荐以长文本为主的下游任务使用</strong>,否则建议使用上述标准版。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">模型名称</th> <th align="center">类型</th> <th align="center">大小</th> <th align="center">下载地址</th> <th align="center">GGUF</th> </tr> </thead> <tbody> <tr> <td align="left">Chinese-LLaMA-2-7B-64K 🆕</td> <td align="center">基座模型</td> <td align="center">12.9 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1ShDQ2FG2QUJrvfnxCn4hwQ?pwd=xe5k" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/17l9xJx55L2YNpqt7NiLVQzOZ6fV4rzJ-?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-7b-64k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-7b-64k" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-llama-2-7b-64k-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-7B-64K 🆕</td> <td align="center">指令模型</td> <td align="center">12.9 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1KBAr9PCGvX2oQkYfCuLEjw?pwd=sgp6" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/13G_d5xcDnhtaMOaulj1BFiZbVoVwJ-Cu?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b-64k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b-64k" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b-64k-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-LLaMA-2-13B-16K</td> <td align="center">基座模型</td> <td align="center">24.7 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1XWrh3Ru9x4UI4-XmocVT2w?pwd=f7ik" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1nii6lF0DgB1u81CnsE4cCK2jD5oq_OW-?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-13b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-13b-16k" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-llama-2-13b-16k-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-LLaMA-2-7B-16K</td> <td align="center">基座模型</td> <td align="center">12.9 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1ZH7T7KU_up61ugarSIXw2g?pwd=pquq" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1Zc6jI5bl3myQbQsY79dWJJ8mP_fyf3iF?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-7b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-7b-16k" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-llama-2-7b-16k-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-13B-16K</td> <td align="center">指令模型</td> <td align="center">24.7 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1gIzRM1eg-Xx1xV-3nXW27A?pwd=qi7c" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1mOkYQCvEqtGoZ9DaIpYFweSkSia2Q0vl?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-13b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-13b-16k" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-13b-16k-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-7B-16K</td> <td align="center">指令模型</td> <td align="center">12.9 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1Qk3U1LyvMb1RSr5AbiatPw?pwd=bfis" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1KBRSd2xAhiVQmamfA5wpm5ovYFRKuMdr?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b-16k" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b-16k-gguf" rel="nofollow">[🤗HF]</a></td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">RLHF版模型</h4><a id="user-content-rlhf版模型" class="anchor" aria-label="Permalink: RLHF版模型" href="#rlhf版模型"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">以下是人类偏好对齐版模型,对涉及法律、道德的问题较标准版有更优的价值导向。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">模型名称</th> <th align="center">类型</th> <th align="center">大小</th> <th align="center">下载地址</th> <th align="center">GGUF</th> </tr> </thead> <tbody> <tr> <td align="left">Chinese-Alpaca-2-7B-RLHF 🆕</td> <td align="center">指令模型</td> <td align="center">12.9 GB</td> <td align="center"><a href="https://pan.baidu.com/s/17GJ1y4rpPDuvWlvPaWgnqw?pwd=4feb" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1OHZVVtwM5McVEIZzyOYgGYLAxcZNVK4D?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b-rlhf" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-7b-rlhf" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-7b-rlhf-gguf" rel="nofollow">[🤗HF]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-1.3B-RLHF 🆕</td> <td align="center">指令模型</td> <td align="center">2.4 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1cLKJKieNitWbOggUXXaamw?pwd=cprp" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/drive/folders/1zcvPUPPkq69SgqRu6YBurAZ9ptcPSZNx?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-1.3b-rlhf" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-1.3b-rlhf" rel="nofollow">[🤖ModelScope]</a></td> <td align="center"><a href="https://huggingface.co/hfl/chinese-alpaca-2-1.3b-rlhf-gguf" rel="nofollow">[🤗HF]</a></td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">AWQ版模型</h4><a id="user-content-awq版模型" class="anchor" aria-label="Permalink: AWQ版模型" href="#awq版模型"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">AWQ(Activation-aware Weight Quantization)是一种高效的模型量化方案,目前可兼容🤗transformers、llama.cpp等主流框架。</p> <p dir="auto">本项目模型的AWQ预搜索结果可通过以下链接获取:<a href="https://huggingface.co/hfl/chinese-llama-alpaca-2-awq" rel="nofollow">https://huggingface.co/hfl/chinese-llama-alpaca-2-awq</a></p> <ul dir="auto"> <li>生成AWQ量化模型(AWQ官方目录):<a href="https://github.com/mit-han-lab/llm-awq">https://github.com/mit-han-lab/llm-awq</a></li> <li>llama.cpp中使用AWQ:<a href="https://github.com/ggerganov/llama.cpp/tree/master/awq-py">https://github.com/ggerganov/llama.cpp/tree/master/awq-py</a></li> </ul> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">LoRA模型下载</h3><a id="user-content-lora模型下载" class="anchor" aria-label="Permalink: LoRA模型下载" href="#lora模型下载"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">以下是LoRA模型(含emb/lm-head),与上述完整模型一一对应。需要注意的是<strong>LoRA模型无法直接使用</strong>,必须按照教程与重构模型进行合并。推荐网络带宽不足,手头有原版Llama-2且需要轻量下载的用户。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">模型名称</th> <th align="center">类型</th> <th align="center">合并所需基模型</th> <th align="center">大小</th> <th align="center">LoRA下载地址</th> </tr> </thead> <tbody> <tr> <td align="left">Chinese-LLaMA-2-LoRA-13B</td> <td align="center">基座模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-13b-hf" rel="nofollow">Llama-2-13B-hf</a></td> <td align="center">1.5 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1PFKTBn54GjAjzWeQISKruw?pwd=we6s" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/10Z_k9A9N9D_6RHrMTmbHQRCuI6s1iMb1/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-lora-13b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-13b" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-LLaMA-2-LoRA-7B</td> <td align="center">基座模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-7b-hf" rel="nofollow">Llama-2-7B-hf</a></td> <td align="center">1.1 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1bmgqdyRh9E3a2uqOGyNqiQ?pwd=7kvq" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1njJGSU_PRbzjYRNw5RSbC5-4fBOXTVY3/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-lora-7b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-7b" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-LoRA-13B</td> <td align="center">指令模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-13b-hf" rel="nofollow">Llama-2-13B-hf</a></td> <td align="center">1.5 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1Y5giIXOUUzI4Na6JOcviVA?pwd=tc2j" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1z2FIInsYJBTXipgztc-Mv7kkeqscx442/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-lora-13b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-13b" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-LoRA-7B</td> <td align="center">指令模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-7b-hf" rel="nofollow">Llama-2-7B-hf</a></td> <td align="center">1.1 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1g0olPxkB_rlZ9UUVfOnbcw?pwd=5e7w" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1MzJL-ZIzdJW7MIcAiYIDIDJ5dlMi8Kkk/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-lora-7b" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-7b" rel="nofollow">[🤖ModelScope]</a></td> </tr> </tbody> </table></markdown-accessiblity-table> <p dir="auto">以下是长上下文版模型,<strong>推荐以长文本为主的下游任务使用</strong>,否则建议使用上述标准版。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">模型名称</th> <th align="center">类型</th> <th align="center">合并所需基模型</th> <th align="center">大小</th> <th align="center">LoRA下载地址</th> </tr> </thead> <tbody> <tr> <td align="left">Chinese-LLaMA-2-LoRA-7B-64K 🆕</td> <td align="center">基座模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-7b-hf" rel="nofollow">Llama-2-7B-hf</a></td> <td align="center">1.1 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1QjqKNM9Xez5g6koUrbII_w?pwd=94pk" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1-NuGqfduUZARRquFjGLpTmI5J-HlXYSR/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-lora-7b-64k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-7b-64k" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-LoRA-7B-64K 🆕</td> <td align="center">指令模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-7b-hf" rel="nofollow">Llama-2-7B-hf</a></td> <td align="center">1.1 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1t6bPpMlJCrs9Ce7LXs09-w?pwd=37it" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1qESorx2PHtIsnj53JJ7XBsdOGHuLNjoI/view?usp=sharing" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-lora-7b-64k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-7b-64k" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-LLaMA-2-LoRA-13B-16K</td> <td align="center">基座模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-13b-hf" rel="nofollow">Llama-2-13B-hf</a></td> <td align="center">1.5 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1VrfOJmhDnXxrXcdnfX00fA?pwd=4t2j" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1mSpigmHcN9YX1spa4QN3IPtx43Vfs55H/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-lora-13b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-13b-16k" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-LLaMA-2-LoRA-7B-16K</td> <td align="center">基座模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-7b-hf" rel="nofollow">Llama-2-7B-hf</a></td> <td align="center">1.1 GB</td> <td align="center"><a href="https://pan.baidu.com/s/14Jnm7QmcDx3XsK_NHZz6Uw?pwd=5b7i" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1yUdyQuBMAmxmUEAvGiKbjKuxTYPPI-or/view?usp=sharing" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-llama-2-lora-7b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-llama-2-lora-7b-16k" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-LoRA-13B-16K</td> <td align="center">指令模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-13b-hf" rel="nofollow">Llama-2-13B-hf</a></td> <td align="center">1.5 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1g42_X7Z0QWDyrrDqv2jifQ?pwd=bq7n" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1ppGNyMWnuLDcClXN7DBTbKxVehsn3Gd2/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-lora-13b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-13b-16k" rel="nofollow">[🤖ModelScope]</a></td> </tr> <tr> <td align="left">Chinese-Alpaca-2-LoRA-7B-16K</td> <td align="center">指令模型</td> <td align="center"><a href="https://huggingface.co/meta-llama/Llama-2-7b-hf" rel="nofollow">Llama-2-7B-hf</a></td> <td align="center">1.1 GB</td> <td align="center"><a href="https://pan.baidu.com/s/1E7GEZ6stp8EavhkhR06FwA?pwd=ewwy" rel="nofollow">[Baidu]</a> <a href="https://drive.google.com/file/d/1GTgDNfMdcQhHEAfMPaP-EOEk_fwDvNEK/view?usp=share_link" rel="nofollow">[Google]</a> <br><a href="https://huggingface.co/hfl/chinese-alpaca-2-lora-7b-16k" rel="nofollow">[🤗HF]</a> <a href="https://modelscope.cn/models/ChineseAlpacaGroup/chinese-alpaca-2-lora-7b-16k" rel="nofollow">[🤖ModelScope]</a></td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-alert markdown-alert-important" dir="auto"><p class="markdown-alert-title" dir="auto"><svg class="octicon octicon-report mr-2" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="M0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v9.5A1.75 1.75 0 0 1 14.25 13H8.06l-2.573 2.573A1.458 1.458 0 0 1 3 14.543V13H1.75A1.75 1.75 0 0 1 0 11.25Zm1.75-.25a.25.25 0 0 0-.25.25v9.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h6.5a.25.25 0 0 0 .25-.25v-9.5a.25.25 0 0 0-.25-.25Zm7 2.25v2.5a.75.75 0 0 1-1.5 0v-2.5a.75.75 0 0 1 1.5 0ZM9 9a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path></svg>Important</p><p dir="auto">LoRA模型无法单独使用,必须与原版Llama-2进行合并才能转为完整模型。请通过以下方法对模型进行合并。</p> <ul dir="auto"> <li><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/online_conversion_zh"><strong>在线转换</strong></a>:Colab用户可利用本项目提供的notebook进行在线转换并量化模型</li> <li><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/manual_conversion_zh"><strong>手动转换</strong></a>:离线方式转换,生成不同格式的模型,以便进行量化或进一步精调</li> </ul> </div> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">推理与部署</h2><a id="user-content-推理与部署" class="anchor" aria-label="Permalink: 推理与部署" href="#推理与部署"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">本项目中的相关模型主要支持以下量化、推理和部署方式,具体内容请参考对应教程。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">工具</th> <th>特点</th> <th align="center">CPU</th> <th align="center">GPU</th> <th align="center">量化</th> <th align="center">GUI</th> <th align="center">API</th> <th align="center">vLLM<sup>§</sup></th> <th align="center">16K<sup>‡</sup></th> <th align="center">64K<sup>‡</sup></th> <th align="center">投机采样</th> <th align="center">教程</th> </tr> </thead> <tbody> <tr> <td align="left"><a href="https://github.com/ggerganov/llama.cpp"><strong>llama.cpp</strong></a></td> <td>丰富的量化选项和高效本地推理</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/llamacpp_zh">link</a></td> </tr> <tr> <td align="left"><a href="https://github.com/huggingface/transformers"><strong>🤗Transformers</strong></a></td> <td>原生transformers推理接口</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/inference_with_transformers_zh">link</a></td> </tr> <tr> <td align="left"><a href="https://colab.research.google.com/drive/1yu0eZ3a66by8Zqm883LLtRQrguBAb9MR?usp=sharing" rel="nofollow"><strong>Colab Demo</strong></a></td> <td>在Colab中启动交互界面</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center"><a href="https://colab.research.google.com/drive/1yu0eZ3a66by8Zqm883LLtRQrguBAb9MR?usp=sharing" rel="nofollow">link</a></td> </tr> <tr> <td align="left"><a href="https://platform.openai.com/docs/api-reference" rel="nofollow"><strong>仿OpenAI API调用</strong></a></td> <td>仿OpenAI API接口的服务器Demo</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/api_calls_zh">link</a></td> </tr> <tr> <td align="left"><a href="https://github.com/oobabooga/text-generation-webui"><strong>text-generation-webui</strong></a></td> <td>前端Web UI界面的部署方式</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅<sup>†</sup></td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">❌</td> <td align="center"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/text-generation-webui_zh">link</a></td> </tr> <tr> <td align="left"><a href="https://github.com/hwchase17/langchain"><strong>LangChain</strong></a></td> <td>适合二次开发的大模型应用开源框架</td> <td align="center">✅<sup>†</sup></td> <td align="center">✅</td> <td align="center">✅<sup>†</sup></td> <td align="center">❌</td> <td align="center">❌</td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/langchain_zh">link</a></td> </tr> <tr> <td align="left"><a href="https://github.com/imartinez/privateGPT"><strong>privateGPT</strong></a></td> <td>基于LangChain的多文档本地问答框架</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">❌</td> <td align="center">❌</td> <td align="center">✅</td> <td align="center">❌</td> <td align="center">❌</td> <td align="center"><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/privategpt_zh">link</a></td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-alert markdown-alert-note" dir="auto"><p class="markdown-alert-title" dir="auto"><svg class="octicon octicon-info mr-2" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="M0 8a8 8 0 1 1 16 0A8 8 0 0 1 0 8Zm8-6.5a6.5 6.5 0 1 0 0 13 6.5 6.5 0 0 0 0-13ZM6.5 7.75A.75.75 0 0 1 7.25 7h1a.75.75 0 0 1 .75.75v2.75h.25a.75.75 0 0 1 0 1.5h-2a.75.75 0 0 1 0-1.5h.25v-2h-.25a.75.75 0 0 1-.75-.75ZM8 6a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z"></path></svg>Note</p><p dir="auto"><sup>†</sup> 工具支持该特性,但教程中未实现,详细说明请参考对应官方文档<br> <sup>‡</sup> 指是否支持长上下文版本模型(需要第三方库支持自定义RoPE)<br> <sup>§</sup> vLLM后端不支持长上下文版本模型<br></p> </div> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">系统效果</h2><a id="user-content-系统效果" class="anchor" aria-label="Permalink: 系统效果" href="#系统效果"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">为了评测相关模型的效果,本项目分别进行了生成效果评测和客观效果评测(NLU类),从不同角度对大模型进行评估。需要注意的是,综合评估大模型能力仍然是亟待解决的重要课题,单个数据集的结果并不能综合评估模型性能。推荐用户在自己关注的任务上进行测试,选择适配相关任务的模型。</p> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">生成效果评测</h3><a id="user-content-生成效果评测" class="anchor" aria-label="Permalink: 生成效果评测" href="#生成效果评测"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">为了更加直观地了解模型的生成效果,本项目仿照<a href="https://chat.lmsys.org/?arena" rel="nofollow">Fastchat Chatbot Arena</a>推出了模型在线对战平台,可浏览和评测模型回复质量。对战平台提供了胜率、Elo评分等评测指标,并且可以查看两两模型的对战胜率等结果。题库来自于<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca/tree/main/examples/f16-p7b-p13b-33b">一期项目人工制作的200题</a>,以及在此基础上额外增加的题目。生成回复具有随机性,受解码超参、随机种子等因素影响,因此相关评测并非绝对严谨,结果仅供晾晒参考,欢迎自行体验。部分生成样例请查看<a href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/examples">examples目录</a>。</p> <p dir="auto"><strong>⚔️ 模型竞技场:<a href="http://llm-arena.ymcui.com/" rel="nofollow">http://llm-arena.ymcui.com</a></strong></p> <markdown-accessiblity-table><table> <thead> <tr> <th>系统</th> <th align="center">对战胜率(无平局) ↓</th> <th align="center">Elo评分</th> </tr> </thead> <tbody> <tr> <td><strong>Chinese-Alpaca-2-13B-16K</strong></td> <td align="center">86.84%</td> <td align="center">1580</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-13B</strong></td> <td align="center">72.01%</td> <td align="center">1579</td> </tr> <tr> <td><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">Chinese-Alpaca-Pro-33B</a></td> <td align="center">64.87%</td> <td align="center">1548</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-7B</strong></td> <td align="center">64.11%</td> <td align="center">1572</td> </tr> <tr> <td><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">Chinese-Alpaca-Pro-7B</a></td> <td align="center">62.05%</td> <td align="center">1500</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-7B-16K</strong></td> <td align="center">61.67%</td> <td align="center">1540</td> </tr> <tr> <td><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">Chinese-Alpaca-Pro-13B</a></td> <td align="center">61.26%</td> <td align="center">1567</td> </tr> <tr> <td><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">Chinese-Alpaca-Plus-33B</a></td> <td align="center">31.29%</td> <td align="center">1401</td> </tr> <tr> <td><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">Chinese-Alpaca-Plus-13B</a></td> <td align="center">23.43%</td> <td align="center">1329</td> </tr> <tr> <td><a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca">Chinese-Alpaca-Plus-7B</a></td> <td align="center">20.92%</td> <td align="center">1379</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-alert markdown-alert-note" dir="auto"><p class="markdown-alert-title" dir="auto"><svg class="octicon octicon-info mr-2" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="M0 8a8 8 0 1 1 16 0A8 8 0 0 1 0 8Zm8-6.5a6.5 6.5 0 1 0 0 13 6.5 6.5 0 0 0 0-13ZM6.5 7.75A.75.75 0 0 1 7.25 7h1a.75.75 0 0 1 .75.75v2.75h.25a.75.75 0 0 1 0 1.5h-2a.75.75 0 0 1 0-1.5h.25v-2h-.25a.75.75 0 0 1-.75-.75ZM8 6a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z"></path></svg>Note</p><p dir="auto">以上结果截至2023年9月1日。最新结果请进入<a href="http://llm-arena.ymcui.com/" rel="nofollow"><strong>⚔️竞技场</strong></a>进行查看。</p> </div> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">客观效果评测:C-Eval</h3><a id="user-content-客观效果评测c-eval" class="anchor" aria-label="Permalink: 客观效果评测:C-Eval" href="#客观效果评测c-eval"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto"><a href="https://cevalbenchmark.com" rel="nofollow">C-Eval</a>是一个全面的中文基础模型评估套件,其中验证集和测试集分别包含1.3K和12.3K个选择题,涵盖52个学科。实验结果以“zero-shot / 5-shot”进行呈现。C-Eval推理代码请参考本项目:<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/ceval_zh">📖GitHub Wiki</a></p> <markdown-accessiblity-table><table> <thead> <tr> <th>LLaMA Models</th> <th align="center">Valid</th> <th align="center">Test</th> <th>Alpaca Models</th> <th align="center">Valid</th> <th align="center">Test</th> </tr> </thead> <tbody> <tr> <td><strong>Chinese-LLaMA-2-13B</strong></td> <td align="center">40.6 / 42.7</td> <td align="center">38.0 / 41.6</td> <td><strong>Chinese-Alpaca-2-13B</strong></td> <td align="center">44.3 / 45.9</td> <td align="center">42.6 / 44.0</td> </tr> <tr> <td><strong>Chinese-LLaMA-2-7B</strong></td> <td align="center">28.2 / 36.0</td> <td align="center">30.3 / 34.2</td> <td><strong>Chinese-Alpaca-2-7B</strong></td> <td align="center">41.3 / 42.9</td> <td align="center">40.3 / 39.5</td> </tr> <tr> <td>Chinese-LLaMA-Plus-33B</td> <td align="center">37.4 / 40.0</td> <td align="center">35.7 / 38.3</td> <td>Chinese-Alpaca-Plus-33B</td> <td align="center">46.5 / 46.3</td> <td align="center">44.9 / 43.5</td> </tr> <tr> <td>Chinese-LLaMA-Plus-13B</td> <td align="center">27.3 / 34.0</td> <td align="center">27.8 / 33.3</td> <td>Chinese-Alpaca-Plus-13B</td> <td align="center">43.3 / 42.4</td> <td align="center">41.5 / 39.9</td> </tr> <tr> <td>Chinese-LLaMA-Plus-7B</td> <td align="center">27.3 / 28.3</td> <td align="center">26.9 / 28.4</td> <td>Chinese-Alpaca-Plus-7B</td> <td align="center">36.7 / 32.9</td> <td align="center">36.4 / 32.3</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">客观效果评测:CMMLU</h3><a id="user-content-客观效果评测cmmlu" class="anchor" aria-label="Permalink: 客观效果评测:CMMLU" href="#客观效果评测cmmlu"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto"><a href="https://github.com/haonan-li/CMMLU">CMMLU</a>是另一个综合性中文评测数据集,专门用于评估语言模型在中文语境下的知识和推理能力,涵盖了从基础学科到高级专业水平的67个主题,共计11.5K个选择题。CMMLU推理代码请参考本项目:<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/cmmlu_zh">📖GitHub Wiki</a></p> <markdown-accessiblity-table><table> <thead> <tr> <th>LLaMA Models</th> <th align="center">Test (0/few-shot)</th> <th>Alpaca Models</th> <th align="center">Test (0/few-shot)</th> </tr> </thead> <tbody> <tr> <td><strong>Chinese-LLaMA-2-13B</strong></td> <td align="center">38.9 / 42.5</td> <td><strong>Chinese-Alpaca-2-13B</strong></td> <td align="center">43.2 / 45.5</td> </tr> <tr> <td><strong>Chinese-LLaMA-2-7B</strong></td> <td align="center">27.9 / 34.1</td> <td><strong>Chinese-Alpaca-2-7B</strong></td> <td align="center">40.0 / 41.8</td> </tr> <tr> <td>Chinese-LLaMA-Plus-33B</td> <td align="center">35.2 / 38.8</td> <td>Chinese-Alpaca-Plus-33B</td> <td align="center">46.6 / 45.3</td> </tr> <tr> <td>Chinese-LLaMA-Plus-13B</td> <td align="center">29.6 / 34.0</td> <td>Chinese-Alpaca-Plus-13B</td> <td align="center">40.6 / 39.9</td> </tr> <tr> <td>Chinese-LLaMA-Plus-7B</td> <td align="center">25.4 / 26.3</td> <td>Chinese-Alpaca-Plus-7B</td> <td align="center">36.8 / 32.6</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">长上下文版模型评测</h3><a id="user-content-长上下文版模型评测" class="anchor" aria-label="Permalink: 长上下文版模型评测" href="#长上下文版模型评测"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto"><a href="https://github.com/THUDM/LongBench">LongBench</a>是一个大模型长文本理解能力的评测基准,由6大类、20个不同的任务组成,多数任务的平均长度在5K-15K之间,共包含约4.75K条测试数据。以下是本项目长上下文版模型在该中文任务(含代码任务)上的评测效果。LongBench推理代码请参考本项目:<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/longbench_zh">📖GitHub Wiki</a></p> <markdown-accessiblity-table><table> <thead> <tr> <th>Models</th> <th align="center">单文档QA</th> <th align="center">多文档QA</th> <th align="center">摘要</th> <th align="center">Few-shot学习</th> <th align="center">代码补全</th> <th align="center">合成任务</th> <th align="center">Avg</th> </tr> </thead> <tbody> <tr> <td><strong>Chinese-Alpaca-2-7B-64K</strong></td> <td align="center">44.7</td> <td align="center">28.1</td> <td align="center">14.4</td> <td align="center">39.0</td> <td align="center">44.6</td> <td align="center">5.0</td> <td align="center">29.3</td> </tr> <tr> <td><strong>Chinese-LLaMA-2-7B-64K</strong></td> <td align="center">27.2</td> <td align="center">16.4</td> <td align="center">6.5</td> <td align="center">33.0</td> <td align="center">7.8</td> <td align="center">5.0</td> <td align="center">16.0</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-13B-16K</strong></td> <td align="center">47.9</td> <td align="center">26.7</td> <td align="center">13.0</td> <td align="center">22.3</td> <td align="center">46.6</td> <td align="center">21.5</td> <td align="center">29.7</td> </tr> <tr> <td>Chinese-Alpaca-2-13B</td> <td align="center">38.4</td> <td align="center">20.0</td> <td align="center">11.9</td> <td align="center">17.3</td> <td align="center">46.5</td> <td align="center">8.0</td> <td align="center">23.7</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-7B-16K</strong></td> <td align="center">46.4</td> <td align="center">23.3</td> <td align="center">14.3</td> <td align="center">29.0</td> <td align="center">49.6</td> <td align="center">9.0</td> <td align="center">28.6</td> </tr> <tr> <td>Chinese-Alpaca-2-7B</td> <td align="center">34.0</td> <td align="center">17.4</td> <td align="center">11.8</td> <td align="center">21.3</td> <td align="center">50.3</td> <td align="center">4.5</td> <td align="center">23.2</td> </tr> <tr> <td><strong>Chinese-LLaMA-2-13B-16K</strong></td> <td align="center">36.7</td> <td align="center">17.7</td> <td align="center">3.1</td> <td align="center">29.8</td> <td align="center">13.8</td> <td align="center">3.0</td> <td align="center">17.3</td> </tr> <tr> <td>Chinese-LLaMA-2-13B</td> <td align="center">28.3</td> <td align="center">14.4</td> <td align="center">4.6</td> <td align="center">16.3</td> <td align="center">10.4</td> <td align="center">5.4</td> <td align="center">13.2</td> </tr> <tr> <td><strong>Chinese-LLaMA-2-7B-16K</strong></td> <td align="center">33.2</td> <td align="center">15.9</td> <td align="center">6.5</td> <td align="center">23.5</td> <td align="center">10.3</td> <td align="center">5.3</td> <td align="center">15.8</td> </tr> <tr> <td>Chinese-LLaMA-2-7B</td> <td align="center">19.0</td> <td align="center">13.9</td> <td align="center">6.4</td> <td align="center">11.0</td> <td align="center">11.0</td> <td align="center">4.7</td> <td align="center">11.0</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">量化效果评测</h3><a id="user-content-量化效果评测" class="anchor" aria-label="Permalink: 量化效果评测" href="#量化效果评测"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">以Chinese-LLaMA-2-7B为例,对比不同精度下的模型大小、PPL(困惑度)、C-Eval效果,方便用户了解量化精度损失。PPL以4K上下文大小计算,C-Eval汇报的是valid集合上zero-shot和5-shot结果。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">精度</th> <th align="center">模型大小</th> <th align="center">PPL</th> <th align="center">C-Eval</th> </tr> </thead> <tbody> <tr> <td align="left">FP16</td> <td align="center">12.9 GB</td> <td align="center">9.373</td> <td align="center">28.2 / 36.0</td> </tr> <tr> <td align="left">8-bit量化</td> <td align="center">6.8 GB</td> <td align="center">9.476</td> <td align="center">26.8 / 35.4</td> </tr> <tr> <td align="left">4-bit量化</td> <td align="center">3.7 GB</td> <td align="center">10.132</td> <td align="center">25.5 / 32.8</td> </tr> </tbody> </table></markdown-accessiblity-table> <p dir="auto">特别地,以下是在llama.cpp下不同量化方法的评测数据,供用户参考,速度以ms/tok计,测试设备为M1 Max。具体细节见<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/llamacpp_zh#%E5%85%B3%E4%BA%8E%E9%87%8F%E5%8C%96%E6%96%B9%E6%B3%95%E9%80%89%E6%8B%A9%E5%8F%8A%E6%8E%A8%E7%90%86%E9%80%9F%E5%BA%A6">📖GitHub Wiki</a></p> <markdown-accessiblity-table><table> <thead> <tr> <th>llama.cpp</th> <th align="right">F16</th> <th align="right">Q2_K</th> <th align="right">Q3_K</th> <th align="right">Q4_0</th> <th align="right">Q4_1</th> <th align="right">Q4_K</th> <th align="right">Q5_0</th> <th align="right">Q5_1</th> <th align="right">Q5_K</th> <th align="right">Q6_K</th> <th align="right">Q8_0</th> </tr> </thead> <tbody> <tr> <td>PPL</td> <td align="right">9.128</td> <td align="right">11.107</td> <td align="right">9.576</td> <td align="right">9.476</td> <td align="right">9.576</td> <td align="right">9.240</td> <td align="right">9.156</td> <td align="right">9.213</td> <td align="right">9.168</td> <td align="right">9.133</td> <td align="right">9.129</td> </tr> <tr> <td>Size</td> <td align="right">12.91G</td> <td align="right">2.41G</td> <td align="right">3.18G</td> <td align="right">3.69G</td> <td align="right">4.08G</td> <td align="right">3.92G</td> <td align="right">4.47G</td> <td align="right">4.86G</td> <td align="right">4.59G</td> <td align="right">5.30G</td> <td align="right">6.81G</td> </tr> <tr> <td>CPU Speed</td> <td align="right">117</td> <td align="right">42</td> <td align="right">51</td> <td align="right">39</td> <td align="right">44</td> <td align="right">43</td> <td align="right">48</td> <td align="right">51</td> <td align="right">50</td> <td align="right">54</td> <td align="right">65</td> </tr> <tr> <td>GPU Speed</td> <td align="right">53</td> <td align="right">19</td> <td align="right">21</td> <td align="right">17</td> <td align="right">18</td> <td align="right">20</td> <td align="right">x</td> <td align="right">x</td> <td align="right">25</td> <td align="right">26</td> <td align="right">x</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">投机采样加速效果评测</h3><a id="user-content-投机采样加速效果评测" class="anchor" aria-label="Permalink: 投机采样加速效果评测" href="#投机采样加速效果评测"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">通过投机采样方法并借助Chinese-LLaMA-2-1.3B和Chinese-Alpaca-2-1.3B,可以分别加速7B、13B的LLaMA和Alpaca模型的推理速度。以下是使用<a href="/ymcui/Chinese-LLaMA-Alpaca-2/blob/main/scripts/inference/speculative_sample.py">投机采样脚本</a>在1*A40-48G上解码<a href="#%E7%94%9F%E6%88%90%E6%95%88%E6%9E%9C%E8%AF%84%E6%B5%8B">生成效果评测</a>中的问题测得的平均速度(速度以ms/token计,模型均为fp16精度),供用户参考。详细说明见<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/inference_with_transformers_zh#%E6%8A%95%E6%9C%BA%E9%87%87%E6%A0%B7%E8%A7%A3%E7%A0%81">📖GitHub Wiki</a>。</p> <markdown-accessiblity-table><table> <thead> <tr> <th align="left">草稿模型</th> <th align="center">草稿模型速度</th> <th align="left">目标模型</th> <th align="center">目标模型速度</th> <th align="center">投机采样速度(加速比)</th> </tr> </thead> <tbody> <tr> <td align="left">Chinese-LLaMA-2-1.3B</td> <td align="center">7.6</td> <td align="left">Chinese-LLaMA-2-7B</td> <td align="center">49.3</td> <td align="center">36.0(1.37x)</td> </tr> <tr> <td align="left">Chinese-LLaMA-2-1.3B</td> <td align="center">7.6</td> <td align="left">Chinese-LLaMA-2-13B</td> <td align="center">66.0</td> <td align="center">47.1(1.40x)</td> </tr> <tr> <td align="left">Chinese-Alpaca-2-1.3B</td> <td align="center">8.1</td> <td align="left">Chinese-Alpaca-2-7B</td> <td align="center">50.2</td> <td align="center">34.9(1.44x)</td> </tr> <tr> <td align="left">Chinese-Alpaca-2-1.3B</td> <td align="center">8.2</td> <td align="left">Chinese-Alpaca-2-13B</td> <td align="center">67.0</td> <td align="center">41.6(1.61x)</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">人类偏好对齐(RLHF)版本评测</h3><a id="user-content-人类偏好对齐rlhf版本评测" class="anchor" aria-label="Permalink: 人类偏好对齐(RLHF)版本评测" href="#人类偏好对齐rlhf版本评测"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">对齐水平</h4><a id="user-content-对齐水平" class="anchor" aria-label="Permalink: 对齐水平" href="#对齐水平"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">为评估中文模型与人类价值偏好对齐程度,我们自行构建了评测数据集,覆盖了道德、色情、毒品、暴力等人类价值偏好重点关注的多个方面。实验结果以价值体现正确率进行呈现(体现正确价值观题目数 / 总题数)。</p> <markdown-accessiblity-table><table> <thead> <tr> <th>Alpaca Models</th> <th align="center">Accuracy</th> <th>Alpaca Models</th> <th align="center">Accuracy</th> </tr> </thead> <tbody> <tr> <td>Chinese-Alpaca-2-1.3B</td> <td align="center">79.3%</td> <td>Chinese-Alpaca-2-7B</td> <td align="center">88.3%</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-1.3B-RLHF</strong></td> <td align="center">95.8%</td> <td><strong>Chinese-Alpaca-2-7B-RLHF</strong></td> <td align="center">97.5%</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h4 tabindex="-1" class="heading-element" dir="auto">客观效果评测:C-Eval &amp; CMMLU</h4><a id="user-content-客观效果评测c-eval--cmmlu" class="anchor" aria-label="Permalink: 客观效果评测:C-Eval &amp; CMMLU" href="#客观效果评测c-eval--cmmlu"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <markdown-accessiblity-table><table> <thead> <tr> <th>Alpaca Models</th> <th align="center">C-Eval (0/few-shot)</th> <th align="center">CMMLU (0/few-shot)</th> </tr> </thead> <tbody> <tr> <td>Chinese-Alpaca-2-1.3B</td> <td align="center">23.8 / 26.8</td> <td align="center">24.8 / 25.1</td> </tr> <tr> <td>Chinese-Alpaca-2-7B</td> <td align="center">42.1 / 41.0</td> <td align="center">40.0 / 41.8</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-1.3B-RLHF</strong></td> <td align="center">23.6 / 27.1</td> <td align="center">24.9 / 25.0</td> </tr> <tr> <td><strong>Chinese-Alpaca-2-7B-RLHF</strong></td> <td align="center">40.6 / 41.2</td> <td align="center">39.5 / 41.0</td> </tr> </tbody> </table></markdown-accessiblity-table> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">训练与精调</h2><a id="user-content-训练与精调" class="anchor" aria-label="Permalink: 训练与精调" href="#训练与精调"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">预训练</h3><a id="user-content-预训练" class="anchor" aria-label="Permalink: 预训练" href="#预训练"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>在原版Llama-2的基础上,利用大规模无标注数据进行增量训练,得到Chinese-LLaMA-2系列基座模型</li> <li>训练数据采用了一期项目中Plus版本模型一致的数据,其总量约120G纯文本文件</li> <li>训练代码参考了🤗transformers中的<a href="https://github.com/huggingface/transformers/blob/main/examples/pytorch/language-modeling/run_clm.py">run_clm.py</a>,使用方法见<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/pt_scripts_zh">📖预训练脚本Wiki</a></li> </ul> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">指令精调</h3><a id="user-content-指令精调" class="anchor" aria-label="Permalink: 指令精调" href="#指令精调"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>在Chinese-LLaMA-2的基础上,利用有标注指令数据进行进一步精调,得到Chinese-Alpaca-2系列模型</li> <li>训练数据采用了一期项目中Pro版本模型使用的指令数据,其总量约500万条指令数据(相比一期略增加)</li> <li>训练代码参考了<a href="https://github.com/tatsu-lab/stanford_alpaca">Stanford Alpaca</a>项目中数据集处理的相关部分,使用方法见<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/sft_scripts_zh">📖指令精调脚本Wiki</a></li> </ul> <div class="markdown-heading" dir="auto"><h3 tabindex="-1" class="heading-element" dir="auto">RLHF精调</h3><a id="user-content-rlhf精调" class="anchor" aria-label="Permalink: RLHF精调" href="#rlhf精调"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <ul dir="auto"> <li>在Chinese-Alpaca-2系列模型基础上,利用偏好数据和PPO算法进行人类偏好对齐精调,得到Chinese-Alpaca-2-RLHF系列模型</li> <li>训练数据基于多个开源项目中的人类偏好数据和本项目指令精调数据进行采样,奖励模型阶段、强化学习阶段分别约69.5K、25.6K条样本</li> <li>训练代码基于<a href="https://github.com/microsoft/DeepSpeedExamples/tree/master/applications/DeepSpeed-Chat">DeepSpeed-Chat</a>开发,具体流程见<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/rm_zh">📖奖励模型Wiki</a>和<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/rl_zh">📖强化学习Wiki</a></li> </ul> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">常见问题</h2><a id="user-content-常见问题" class="anchor" aria-label="Permalink: 常见问题" href="#常见问题"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">请在提Issue前务必先查看FAQ中是否已存在解决方案。具体问题和解答请参考本项目 <a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca-2/wiki/faq_zh">📖GitHub Wiki</a></p> <div class="snippet-clipboard-content notranslate position-relative overflow-auto" data-snippet-clipboard-copy-content="问题1:本项目和一期项目的区别? 问题2:模型能否商用? 问题3:接受第三方Pull Request吗? 问题4:为什么不对模型做全量预训练而是用LoRA? 问题5:二代模型支不支持某些支持一代LLaMA的工具? 问题6:Chinese-Alpaca-2是Llama-2-Chat训练得到的吗? 问题7:为什么24G显存微调Chinese-Alpaca-2-7B会OOM? 问题8:可以使用16K长上下文版模型替代标准版模型吗? 问题9:如何解读第三方公开榜单的结果? 问题10:会出34B或者70B级别的模型吗? 问题11:为什么长上下文版模型是16K,不是32K或者100K? 问题12:为什么Alpaca模型会回复说自己是ChatGPT? 问题13:为什么pt_lora_model或者sft_lora_model下的adapter_model.bin只有几百k?"><pre class="notranslate"><code>问题1:本项目和一期项目的区别? 问题2:模型能否商用? 问题3:接受第三方Pull Request吗? 问题4:为什么不对模型做全量预训练而是用LoRA? 问题5:二代模型支不支持某些支持一代LLaMA的工具? 问题6:Chinese-Alpaca-2是Llama-2-Chat训练得到的吗? 问题7:为什么24G显存微调Chinese-Alpaca-2-7B会OOM? 问题8:可以使用16K长上下文版模型替代标准版模型吗? 问题9:如何解读第三方公开榜单的结果? 问题10:会出34B或者70B级别的模型吗? 问题11:为什么长上下文版模型是16K,不是32K或者100K? 问题12:为什么Alpaca模型会回复说自己是ChatGPT? 问题13:为什么pt_lora_model或者sft_lora_model下的adapter_model.bin只有几百k? </code></pre></div> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">引用</h2><a id="user-content-引用" class="anchor" aria-label="Permalink: 引用" href="#引用"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">如果您使用了本项目的相关资源,请参考引用本项目的技术报告:<a href="https://arxiv.org/abs/2304.08177" rel="nofollow">https://arxiv.org/abs/2304.08177</a></p> <div class="snippet-clipboard-content notranslate position-relative overflow-auto" data-snippet-clipboard-copy-content="@article{Chinese-LLaMA-Alpaca, title={Efficient and Effective Text Encoding for Chinese LLaMA and Alpaca}, author={Cui, Yiming and Yang, Ziqing and Yao, Xin}, journal={arXiv preprint arXiv:2304.08177}, url={https://arxiv.org/abs/2304.08177}, year={2023} }"><pre class="notranslate"><code>@article{Chinese-LLaMA-Alpaca, title={Efficient and Effective Text Encoding for Chinese LLaMA and Alpaca}, author={Cui, Yiming and Yang, Ziqing and Yao, Xin}, journal={arXiv preprint arXiv:2304.08177}, url={https://arxiv.org/abs/2304.08177}, year={2023} } </code></pre></div> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">致谢</h2><a id="user-content-致谢" class="anchor" aria-label="Permalink: 致谢" href="#致谢"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">本项目主要基于以下开源项目二次开发,在此对相关项目和研究开发人员表示感谢。</p> <ul dir="auto"> <li><a href="https://github.com/facebookresearch/llama">Llama-2 <em>by Meta</em></a></li> <li><a href="https://github.com/ggerganov/llama.cpp">llama.cpp <em>by @ggerganov</em></a></li> <li><a href="https://github.com/Dao-AILab/flash-attention">FlashAttention-2 by <em>Dao-AILab</em></a></li> </ul> <p dir="auto">同时感谢Chinese-LLaMA-Alpaca(一期项目)的contributor以及<a href="https://github.com/ymcui/Chinese-LLaMA-Alpaca#%E8%87%B4%E8%B0%A2">关联项目和人员</a>。</p> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">免责声明</h2><a id="user-content-免责声明" class="anchor" aria-label="Permalink: 免责声明" href="#免责声明"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">本项目基于由Meta发布的Llama-2模型进行开发,使用过程中请严格遵守Llama-2的开源许可协议。如果涉及使用第三方代码,请务必遵从相关的开源许可协议。模型生成的内容可能会因为计算方法、随机因素以及量化精度损失等影响其准确性,因此,本项目不对模型输出的准确性提供任何保证,也不会对任何因使用相关资源和输出结果产生的损失承担责任。如果将本项目的相关模型用于商业用途,开发者应遵守当地的法律法规,确保模型输出内容的合规性,本项目不对任何由此衍生的产品或服务承担责任。</p> <details> <summary><b>局限性声明</b></summary> <p dir="auto">虽然本项目中的模型具备一定的中文理解和生成能力,但也存在局限性,包括但不限于:</p> <ul dir="auto"> <li>可能会产生不可预测的有害内容以及不符合人类偏好和价值观的内容</li> <li>由于算力和数据问题,相关模型的训练并不充分,中文理解能力有待进一步提升</li> <li>暂时没有在线可互动的demo(注:用户仍然可以自行在本地部署和体验)</li> </ul> </details> <div class="markdown-heading" dir="auto"><h2 tabindex="-1" class="heading-element" dir="auto">问题反馈</h2><a id="user-content-问题反馈" class="anchor" aria-label="Permalink: 问题反馈" href="#问题反馈"><svg class="octicon octicon-link" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path></svg></a></div> <p dir="auto">如有疑问,请在GitHub Issue中提交。礼貌地提出问题,构建和谐的讨论社区。</p> <ul dir="auto"> <li>在提交问题之前,请先查看FAQ能否解决问题,同时建议查阅以往的issue是否能解决你的问题。</li> <li>提交问题请使用本项目设置的Issue模板,以帮助快速定位具体问题。</li> <li>重复以及与本项目无关的issue会被<a href="https://github.com/marketplace/stale">stable-bot</a>处理,敬请谅解。</li> </ul> </article></div></div></div></div></div> <!-- --> <!-- --> <script type="application/json" id="__PRIMER_DATA_:R0:__">{"resolvedServerColorMode":"day"}</script></div> </react-partial> <input type="hidden" data-csrf="true" value="HqDqhZPDPZVLYLmqKPlIRyMjnHRfeJt1VqGIs6D1guUWyz8mic706HzANa7gldk1Ghj80qz0cnKVtIL6vShpeA==" /> </div> <div data-view-component="true" class="Layout-sidebar"> <div class="BorderGrid about-margin" data-pjax> <div class="BorderGrid-row"> <div class="BorderGrid-cell"> <div class="hide-sm hide-md"> <h2 class="mb-3 h4">About</h2> <p class="f4 my-3"> 中文LLaMA-2 &amp; Alpaca-2大模型二期项目 + 64K超长上下文模型 (Chinese LLaMA-2 &amp; Alpaca-2 LLMs with 64K long context models) </p> <h3 class="sr-only">Topics</h3> <div class="my-3"> <div class="f6"> <a href="/topics/nlp" title="Topic: nlp" data-view-component="true" class="topic-tag topic-tag-link"> nlp </a> <a href="/topics/yarn" title="Topic: yarn" data-view-component="true" class="topic-tag topic-tag-link"> yarn </a> <a href="/topics/llama" title="Topic: llama" data-view-component="true" class="topic-tag topic-tag-link"> llama </a> <a href="/topics/alpaca" title="Topic: alpaca" data-view-component="true" class="topic-tag topic-tag-link"> alpaca </a> <a href="/topics/64k" title="Topic: 64k" data-view-component="true" class="topic-tag topic-tag-link"> 64k </a> <a href="/topics/large-language-models" title="Topic: large-language-models" data-view-component="true" class="topic-tag topic-tag-link"> large-language-models </a> <a href="/topics/llm" title="Topic: llm" data-view-component="true" class="topic-tag topic-tag-link"> llm </a> <a href="/topics/rlhf" title="Topic: rlhf" data-view-component="true" class="topic-tag topic-tag-link"> rlhf </a> <a href="/topics/flash-attention" title="Topic: flash-attention" data-view-component="true" class="topic-tag topic-tag-link"> flash-attention </a> <a href="/topics/llama2" title="Topic: llama2" data-view-component="true" class="topic-tag topic-tag-link"> llama2 </a> <a href="/topics/llama-2" title="Topic: llama-2" data-view-component="true" class="topic-tag topic-tag-link"> llama-2 </a> <a href="/topics/alpaca-2" title="Topic: alpaca-2" data-view-component="true" class="topic-tag topic-tag-link"> alpaca-2 </a> <a href="/topics/alpaca2" title="Topic: alpaca2" data-view-component="true" class="topic-tag topic-tag-link"> alpaca2 </a> </div> </div> <h3 class="sr-only">Resources</h3> <div class="mt-2"> <a class="Link--muted" data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:readme&quot;}" href="#readme-ov-file"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book mr-2"> <path d="M0 1.75A.75.75 0 0 1 .75 1h4.253c1.227 0 2.317.59 3 1.501A3.743 3.743 0 0 1 11.006 1h4.245a.75.75 0 0 1 .75.75v10.5a.75.75 0 0 1-.75.75h-4.507a2.25 2.25 0 0 0-1.591.659l-.622.621a.75.75 0 0 1-1.06 0l-.622-.621A2.25 2.25 0 0 0 5.258 13H.75a.75.75 0 0 1-.75-.75Zm7.251 10.324.004-5.073-.002-2.253A2.25 2.25 0 0 0 5.003 2.5H1.5v9h3.757a3.75 3.75 0 0 1 1.994.574ZM8.755 4.75l-.004 7.322a3.752 3.752 0 0 1 1.992-.572H14.5v-9h-3.495a2.25 2.25 0 0 0-2.25 2.25Z"></path> </svg> Readme </a> </div> <h3 class="sr-only">License</h3> <div class="mt-2"> <a href="#Apache-2.0-1-ov-file" class="Link--muted" data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:license&quot;}" > <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-law mr-2"> <path d="M8.75.75V2h.985c.304 0 .603.08.867.231l1.29.736c.038.022.08.033.124.033h2.234a.75.75 0 0 1 0 1.5h-.427l2.111 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.006.005-.01.01-.045.04c-.21.176-.441.327-.686.45C14.556 10.78 13.88 11 13 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L12.178 4.5h-.162c-.305 0-.604-.079-.868-.231l-1.29-.736a.245.245 0 0 0-.124-.033H8.75V13h2.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1 0-1.5h2.5V3.5h-.984a.245.245 0 0 0-.124.033l-1.289.737c-.265.15-.564.23-.869.23h-.162l2.112 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.016.015-.045.04c-.21.176-.441.327-.686.45C4.556 10.78 3.88 11 3 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L2.178 4.5H1.75a.75.75 0 0 1 0-1.5h2.234a.249.249 0 0 0 .125-.033l1.288-.737c.265-.15.564-.23.869-.23h.984V.75a.75.75 0 0 1 1.5 0Zm2.945 8.477c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L13 6.327Zm-10 0c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L3 6.327Z"></path> </svg> Apache-2.0 license </a> </div> <include-fragment src="/ymcui/Chinese-LLaMA-Alpaca-2/hovercards/citation/sidebar_partial?tree_name=main"> </include-fragment> <div class="mt-2"> <a href="/ymcui/Chinese-LLaMA-Alpaca-2/activity" data-view-component="true" class="Link Link--muted"><svg text="gray" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pulse mr-2"> <path d="M6 2c.306 0 .582.187.696.471L10 10.731l1.304-3.26A.751.751 0 0 1 12 7h3.25a.75.75 0 0 1 0 1.5h-2.742l-1.812 4.528a.751.751 0 0 1-1.392 0L6 4.77 4.696 8.03A.75.75 0 0 1 4 8.5H.75a.75.75 0 0 1 0-1.5h2.742l1.812-4.529A.751.751 0 0 1 6 2Z"></path> </svg> <span class="color-fg-muted">Activity</span></a> </div> <h3 class="sr-only">Stars</h3> <div class="mt-2"> <a href="/ymcui/Chinese-LLaMA-Alpaca-2/stargazers" data-view-component="true" class="Link Link--muted"><svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star mr-2"> <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path> </svg> <strong>7.2k</strong> stars</a> </div> <h3 class="sr-only">Watchers</h3> <div class="mt-2"> <a href="/ymcui/Chinese-LLaMA-Alpaca-2/watchers" data-view-component="true" class="Link Link--muted"><svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-eye mr-2"> <path d="M8 2c1.981 0 3.671.992 4.933 2.078 1.27 1.091 2.187 2.345 2.637 3.023a1.62 1.62 0 0 1 0 1.798c-.45.678-1.367 1.932-2.637 3.023C11.67 13.008 9.981 14 8 14c-1.981 0-3.671-.992-4.933-2.078C1.797 10.83.88 9.576.43 8.898a1.62 1.62 0 0 1 0-1.798c.45-.677 1.367-1.931 2.637-3.022C4.33 2.992 6.019 2 8 2ZM1.679 7.932a.12.12 0 0 0 0 .136c.411.622 1.241 1.75 2.366 2.717C5.176 11.758 6.527 12.5 8 12.5c1.473 0 2.825-.742 3.955-1.715 1.124-.967 1.954-2.096 2.366-2.717a.12.12 0 0 0 0-.136c-.412-.621-1.242-1.75-2.366-2.717C10.824 4.242 9.473 3.5 8 3.5c-1.473 0-2.825.742-3.955 1.715-1.124.967-1.954 2.096-2.366 2.717ZM8 10a2 2 0 1 1-.001-3.999A2 2 0 0 1 8 10Z"></path> </svg> <strong>78</strong> watching</a> </div> <h3 class="sr-only">Forks</h3> <div class="mt-2"> <a href="/ymcui/Chinese-LLaMA-Alpaca-2/forks" data-view-component="true" class="Link Link--muted"><svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked mr-2"> <path d="M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z"></path> </svg> <strong>574</strong> forks</a> </div> <div class="mt-2"> <a class="Link--muted" href="/contact/report-content?content_url=https%3A%2F%2Fgithub.com%2Fymcui%2FChinese-LLaMA-Alpaca-2&amp;report=ymcui+%28user%29"> Report repository </a> </div> </div> </div> </div> <div class="BorderGrid-row"> <div class="BorderGrid-cell"> <h2 class="h4 mb-3" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame"> <a href="/ymcui/Chinese-LLaMA-Alpaca-2/releases" data-view-component="true" class="Link--primary no-underline Link">Releases <span title="8" data-view-component="true" class="Counter">8</span></a></h2> <a class="Link--primary d-flex no-underline" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" href="/ymcui/Chinese-LLaMA-Alpaca-2/releases/tag/v4.1"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-tag flex-shrink-0 mt-1 color-fg-success"> <path d="M1 7.775V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 0 1 0 2.474l-5.026 5.026a1.75 1.75 0 0 1-2.474 0l-6.25-6.25A1.752 1.752 0 0 1 1 7.775Zm1.5 0c0 .066.026.13.073.177l6.25 6.25a.25.25 0 0 0 .354 0l5.025-5.025a.25.25 0 0 0 0-.354l-6.25-6.25a.25.25 0 0 0-.177-.073H2.75a.25.25 0 0 0-.25.25ZM6 5a1 1 0 1 1 0 2 1 1 0 0 1 0-2Z"></path> </svg> <div class="ml-2 min-width-0"> <div class="d-flex"> <span class="css-truncate css-truncate-target text-bold mr-2" style="max-width: none;">中文羊驼大模型二期 v4.1</span> <span title="Label: Latest" data-view-component="true" class="Label Label--success flex-shrink-0"> Latest </span> </div> <div class="text-small color-fg-muted"><relative-time datetime="2024-01-23T05:51:47Z" class="no-wrap">Jan 23, 2024</relative-time></div> </div> </a> <div data-view-component="true" class="mt-3"> <a text="small" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" href="/ymcui/Chinese-LLaMA-Alpaca-2/releases" data-view-component="true" class="Link">+ 7 releases</a></div> </div> </div> <div class="BorderGrid-row" hidden> <div class="BorderGrid-cell"> <include-fragment src="/ymcui/Chinese-LLaMA-Alpaca-2/used_by_list" accept="text/fragment+html"> </include-fragment> </div> </div> <div class="BorderGrid-row"> <div class="BorderGrid-cell"> <h2 class="h4 mb-3"> <a href="/ymcui/Chinese-LLaMA-Alpaca-2/graphs/contributors" data-view-component="true" class="Link--primary no-underline Link d-flex flex-items-center">Contributors <span title="8" data-view-component="true" class="Counter ml-1">8</span></a></h2> <include-fragment src="/ymcui/Chinese-LLaMA-Alpaca-2/contributors_list?count=8&amp;current_repository=Chinese-LLaMA-Alpaca-2&amp;items_to_show=8" aria-busy="true" aria-label="Loading contributors"> <ul class="list-style-none d-flex flex-wrap mb-n2"> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> <li class="mb-2 "> <div class="Skeleton avatar avatar-user mr-2" style="width:32px;height:32px;"></div> </li> </ul> </include-fragment> </div> </div> <div class="BorderGrid-row"> <div class="BorderGrid-cell"> <h2 class="h4 mb-3">Languages</h2> <div class="mb-2"> <span data-view-component="true" class="Progress"> <span style="background-color:#3572A5 !important;;width: 98.6%;" itemprop="keywords" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span> <span style="background-color:#89e051 !important;;width: 1.4%;" itemprop="keywords" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span> </span></div> <ul class="list-style-none"> <li class="d-inline"> <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/ymcui/Chinese-LLaMA-Alpaca-2/search?l=python" data-ga-click="Repository, language stats search click, location:repo overview"> <svg style="color:#3572A5;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2"> <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path> </svg> <span class="color-fg-default text-bold mr-1">Python</span> <span>98.6%</span> </a> </li> <li class="d-inline"> <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/ymcui/Chinese-LLaMA-Alpaca-2/search?l=shell" data-ga-click="Repository, language stats search click, location:repo overview"> <svg style="color:#89e051;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2"> <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path> </svg> <span class="color-fg-default text-bold mr-1">Shell</span> <span>1.4%</span> </a> </li> </ul> </div> </div> </div> </div> </div></div> </div> </div> </turbo-frame> </main> </div> </div> <footer class="footer pt-8 pb-6 f6 color-fg-muted p-responsive" role="contentinfo" > <h2 class='sr-only'>Footer</h2> <div class="d-flex flex-justify-center flex-items-center flex-column-reverse flex-lg-row flex-wrap flex-lg-nowrap"> <div class="d-flex flex-items-center flex-shrink-0 mx-2"> <a aria-label="Homepage" title="GitHub" class="footer-octicon mr-2" href="https://github.com"> <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-mark-github"> <path d="M12 1C5.9225 1 1 5.9225 1 12C1 16.8675 4.14875 20.9787 8.52125 22.4362C9.07125 22.5325 9.2775 22.2025 9.2775 21.9137C9.2775 21.6525 9.26375 20.7862 9.26375 19.865C6.5 20.3737 5.785 19.1912 5.565 18.5725C5.44125 18.2562 4.905 17.28 4.4375 17.0187C4.0525 16.8125 3.5025 16.3037 4.42375 16.29C5.29 16.2762 5.90875 17.0875 6.115 17.4175C7.105 19.0812 8.68625 18.6137 9.31875 18.325C9.415 17.61 9.70375 17.1287 10.02 16.8537C7.5725 16.5787 5.015 15.63 5.015 11.4225C5.015 10.2262 5.44125 9.23625 6.1425 8.46625C6.0325 8.19125 5.6475 7.06375 6.2525 5.55125C6.2525 5.55125 7.17375 5.2625 9.2775 6.67875C10.1575 6.43125 11.0925 6.3075 12.0275 6.3075C12.9625 6.3075 13.8975 6.43125 14.7775 6.67875C16.8813 5.24875 17.8025 5.55125 17.8025 5.55125C18.4075 7.06375 18.0225 8.19125 17.9125 8.46625C18.6138 9.23625 19.04 10.2125 19.04 11.4225C19.04 15.6437 16.4688 16.5787 14.0213 16.8537C14.42 17.1975 14.7638 17.8575 14.7638 18.8887C14.7638 20.36 14.75 21.5425 14.75 21.9137C14.75 22.2025 14.9563 22.5462 15.5063 22.4362C19.8513 20.9787 23 16.8537 23 12C23 5.9225 18.0775 1 12 1Z"></path> </svg> </a> <span> &copy; 2025 GitHub,&nbsp;Inc. </span> </div> <nav aria-label="Footer"> <h3 class="sr-only" id="sr-footer-heading">Footer navigation</h3> <ul class="list-style-none d-flex flex-justify-center flex-wrap mb-2 mb-lg-0" aria-labelledby="sr-footer-heading"> <li class="mx-2"> <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to Terms&quot;,&quot;label&quot;:&quot;text:terms&quot;}" href="https://docs.github.com/site-policy/github-terms/github-terms-of-service" data-view-component="true" class="Link--secondary Link">Terms</a> </li> <li class="mx-2"> <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to privacy&quot;,&quot;label&quot;:&quot;text:privacy&quot;}" href="https://docs.github.com/site-policy/privacy-policies/github-privacy-statement" data-view-component="true" class="Link--secondary Link">Privacy</a> </li> <li class="mx-2"> <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to security&quot;,&quot;label&quot;:&quot;text:security&quot;}" href="https://github.com/security" data-view-component="true" class="Link--secondary Link">Security</a> </li> <li class="mx-2"> <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to status&quot;,&quot;label&quot;:&quot;text:status&quot;}" href="https://www.githubstatus.com/" data-view-component="true" class="Link--secondary Link">Status</a> </li> <li class="mx-2"> <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to docs&quot;,&quot;label&quot;:&quot;text:docs&quot;}" href="https://docs.github.com/" data-view-component="true" class="Link--secondary Link">Docs</a> </li> <li class="mx-2"> <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to contact&quot;,&quot;label&quot;:&quot;text:contact&quot;}" href="https://support.github.com?tags=dotcom-footer" data-view-component="true" class="Link--secondary Link">Contact</a> </li> <li class="mx-2" > <cookie-consent-link> <button type="button" class="Link--secondary underline-on-hover border-0 p-0 color-bg-transparent" data-action="click:cookie-consent-link#showConsentManagement" data-analytics-event="{&quot;location&quot;:&quot;footer&quot;,&quot;action&quot;:&quot;cookies&quot;,&quot;context&quot;:&quot;subfooter&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;cookies_link_subfooter_footer&quot;}" > Manage cookies </button> </cookie-consent-link> </li> <li class="mx-2"> <cookie-consent-link> <button type="button" class="Link--secondary underline-on-hover border-0 p-0 color-bg-transparent" data-action="click:cookie-consent-link#showConsentManagement" data-analytics-event="{&quot;location&quot;:&quot;footer&quot;,&quot;action&quot;:&quot;dont_share_info&quot;,&quot;context&quot;:&quot;subfooter&quot;,&quot;tag&quot;:&quot;link&quot;,&quot;label&quot;:&quot;dont_share_info_link_subfooter_footer&quot;}" > Do not share my personal information </button> </cookie-consent-link> </li> </ul> </nav> </div> </footer> <ghcc-consent id="ghcc" class="position-fixed bottom-0 left-0" style="z-index: 999999" data-initial-cookie-consent-allowed="" data-cookie-consent-required="false"></ghcc-consent> <div id="ajax-error-message" class="ajax-error-message flash flash-error" hidden> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert"> <path d="M6.457 1.047c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0 1 14.082 15H1.918a1.75 1.75 0 0 1-1.543-2.575Zm1.763.707a.25.25 0 0 0-.44 0L1.698 13.132a.25.25 0 0 0 .22.368h12.164a.25.25 0 0 0 .22-.368Zm.53 3.996v2.5a.75.75 0 0 1-1.5 0v-2.5a.75.75 0 0 1 1.5 0ZM9 11a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path> </svg> <button type="button" class="flash-close js-ajax-error-dismiss" aria-label="Dismiss error"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path> </svg> </button> You can’t perform that action at this time. </div> <template id="site-details-dialog"> <details class="details-reset details-overlay details-overlay-dark lh-default color-fg-default hx_rsm" open> <summary role="button" aria-label="Close dialog"></summary> <details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast hx_rsm-dialog hx_rsm-modal"> <button class="Box-btn-octicon m-0 btn-octicon position-absolute right-0 top-0" type="button" aria-label="Close dialog" data-close-dialog> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x"> <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path> </svg> </button> <div class="octocat-spinner my-6 js-details-dialog-spinner"></div> </details-dialog> </details> </template> <div class="Popover js-hovercard-content position-absolute" style="display: none; outline: none;"> <div class="Popover-message Popover-message--bottom-left Popover-message--large Box color-shadow-large" style="width:360px;"> </div> </div> <template id="snippet-clipboard-copy-button"> <div class="zeroclipboard-container position-absolute right-0 top-0"> <clipboard-copy aria-label="Copy" class="ClipboardButton btn js-clipboard-copy m-2 p-0" data-copy-feedback="Copied!" data-tooltip-direction="w"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy js-clipboard-copy-icon m-2"> <path d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 0 1 0 1.5h-1.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-1.5a.75.75 0 0 1 1.5 0v1.5A1.75 1.75 0 0 1 9.25 16h-7.5A1.75 1.75 0 0 1 0 14.25Z"></path><path d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0 1 14.25 11h-7.5A1.75 1.75 0 0 1 5 9.25Zm1.75-.25a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Z"></path> </svg> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check js-clipboard-check-icon color-fg-success d-none m-2"> <path d="M13.78 4.22a.75.75 0 0 1 0 1.06l-7.25 7.25a.75.75 0 0 1-1.06 0L2.22 9.28a.751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018L6 10.94l6.72-6.72a.75.75 0 0 1 1.06 0Z"></path> </svg> </clipboard-copy> </div> </template> <template id="snippet-clipboard-copy-button-unpositioned"> <div class="zeroclipboard-container"> <clipboard-copy aria-label="Copy" class="ClipboardButton btn btn-invisible js-clipboard-copy m-2 p-0 d-flex flex-justify-center flex-items-center" data-copy-feedback="Copied!" data-tooltip-direction="w"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy js-clipboard-copy-icon"> <path d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 0 1 0 1.5h-1.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-1.5a.75.75 0 0 1 1.5 0v1.5A1.75 1.75 0 0 1 9.25 16h-7.5A1.75 1.75 0 0 1 0 14.25Z"></path><path d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0 1 14.25 11h-7.5A1.75 1.75 0 0 1 5 9.25Zm1.75-.25a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Z"></path> </svg> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check js-clipboard-check-icon color-fg-success d-none"> <path d="M13.78 4.22a.75.75 0 0 1 0 1.06l-7.25 7.25a.75.75 0 0 1-1.06 0L2.22 9.28a.751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018L6 10.94l6.72-6.72a.75.75 0 0 1 1.06 0Z"></path> </svg> </clipboard-copy> </div> </template> </div> <div id="js-global-screen-reader-notice" class="sr-only mt-n1" aria-live="polite" aria-atomic="true" ></div> <div id="js-global-screen-reader-notice-assertive" class="sr-only mt-n1" aria-live="assertive" aria-atomic="true"></div> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10