CINXE.COM
AIGC 先锋科技 - 个人中心 - 腾讯云开发者社区-腾讯云
<!DOCTYPE html><html munual-autotracker-init="" qct-pv-id="fAaLbd2Qcu4VB1lJn4LOo" qct-ip="8.222.208.146"><head><meta charSet="UTF-8"/><meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/><title>AIGC 先锋科技 - 个人中心 - 腾讯云开发者社区-腾讯云</title><meta name="keywords" content="开发者社区,云计算社区,云+社区,开发者技术分享社区,腾讯云社区"/><meta name="subjectTime" content="2024-07-07 01:20:23"/><meta name="description" content=""/><meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1, viewport-fit=cover"/><meta name="format-detection" content="telephone=no"/><link rel="canonical" href="https://cloud.tencent.com/developer/user/11196058"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/open_proj/proj_qcloud_v2/gateway/portal/css/global-20209142343.css"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/qcloud/ui/cloud-community/build/base/base-202410111735.css"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/open_proj/proj_qcloud_v2/community-pc/build/AskDialog/AskDialog-202204021635.css?max_age=31536000"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/open_proj/proj_qcloud_v2/community-pc/build/AskDialog/AskDialog-202204021635.css?max_age=31536000"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/qcloud/ui/community-pc/build/base/base-202410211524.css"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/qcloud/ui/cloud-community/build/base/base-202410111735.css"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/open_proj/proj_qcloud_v2/community/portal/css/markdown-201810241044.css?max_age=31536000"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/qcloud/ui/community-pc/build/UserCenter2/UserCenter2-202408291654.css"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/qcloud/developer/styles/release/cropperjs/1.4.1/cropper.min.css?max_age=31536000"/><link rel="stylesheet" href="//cloudcache.tencent-cloud.cn/qcloud/draft-master/dist/draft-master-v2.0.142.d4s2ddo9sb.css?max_age=31536000"/><style media="screen">@supports (padding:max(0px)){.set-safe-area .com-main{bottom:calc(max(12px,constant(safe-area-inset-bottom)) + 50px);bottom:calc(max(12px,env(safe-area-inset-bottom)) + 50px)}.set-safe-area .com-main-simple-sec,.set-safe-area .com-main.without-tab-ft,.set-safe-area .com-main.without-ft{bottom:max(12px,constant(safe-area-inset-bottom));bottom:max(12px,env(safe-area-inset-bottom))}.set-safe-area .com-main-sec{bottom:max(12px,constant(safe-area-inset-bottom));bottom:max(12px,env(safe-area-inset-bottom))}.set-safe-area .com-m-footer,.set-safe-area .sa-fixed-btns{bottom:max(12px,constant(safe-area-inset-bottom));bottom:max(12px,env(safe-area-inset-bottom))}.set-safe-area .com-mobile-body{bottom:max(12px,constant(safe-area-inset-bottom));bottom:max(12px,env(safe-area-inset-bottom))}}@supports (padding:max(0px)){.set-safe-area .support-wrap,.set-safe-area div.body{bottom:max(12px,constant(safe-area-inset-bottom));bottom:max(12px,env(safe-area-inset-bottom))}.set-safe-area .com-responsive-no-ft div.body{bottom:max(12px,constant(safe-area-inset-bottom));bottom:max(12px,env(safe-area-inset-bottom))}}.doc-con .J-docShareModal{display: none;} .doc-con .J-docShareCopyTipModalMB{display: none} .with-focus+.com-main-simple-sec, .with-focus+.com-main,.with-focus+.com-body,.with-focus+.qa-body{top:100px} .qa-detail-ask-panel:after{display:none!important;} .sa-fixed-btns .c-btn-weak{background-color: #fff;} .qa-r-editor.draft-editor-host.rno-markdown{height: 290px;overflow-y:auto;} .uc-achievement{line-height:24px;margin-bottom:5px;white-space: initial;overflow:visible;text-overflow:initial} .uc-achievement .uc-achievement-icon{top:0;margin-top:0;}</style></head><body style="position:initial"><div id="react-root" class=""><div class=""><div class="cdc-header"><div class="cdc-header__placeholder"></div><div class="cdc-header__inner"><div class="cdc-header__top"><div class="cdc-header__top-left"><a href="/?from=20060&from_column=20060" target="_blank" class="cdc-header__top-logo"><i>腾讯云</i></a><div class="cdc-header__top-line"></div><a href="/developer" class="cdc-header__top-logo community"><i>开发者社区</i></a><div class="cdc-header__activity"><div id="cloud-header-product-container"></div></div></div><div class="cdc-header__top-operates"><a href="/document/product?from=20702&from_column=20702" target="_blank" class="cdc-header__link">文档</a><a href="/voc/?from=20703&from_column=20703" target="_blank" class="cdc-header__link">建议反馈</a><a href="https://console.cloud.tencent.com?from=20063&from_column=20063" target="_blank" class="cdc-header__link" track-click="{"areaId":102001,"subAreaId":1}">控制台</a><div class="cdc-header__account"><div class="cdc-header__account-inner"><button class="cdc-btn cdc-header__account-btn cdc-btn--primary">登录/注册</button></div></div></div></div><div class="cdc-header__bottom"><div class="cdc-header__bottom-nav"><a href="/developer" class="cdc-header__bottom-home">首页</a><div class="cdc-header__nav-list"><div class="cdc-header__nav-item">学习</div><div class="cdc-header__nav-item">活动</div><div class="cdc-header__nav-item">专区</div><div class="cdc-header__nav-item">工具</div></div><a href="/tvp?from=20154&from_column=20154" class="cdc-header__tvp" target="_blank">TVP</a><div class="cdc-header__activity"><a class="cdc-header__activity-tit" href="/act?from=20061&from_column=20061" target="_blank">最新优惠活动<div class="cdc-badge"><div class="cdc-badge-inner"><div class="cdc-badge-text"></div></div></div></a></div><div id="community-header-product-container"></div></div><div class="cdc-header__bottom-operates"><div class="cdc-header__search"><div class="cdc-search__wrap"><div class="cdc-search"><span class="cdc-search__text">文章/答案/技术大牛</span><button class="cdc-search__btn">搜索<i class="cdc-search__i search"></i></button></div><div class="cdc-search__dropdown"><div class="cdc-search__bar"><input type="text" class="cdc-search__bar-input" placeholder="文章/答案/技术大牛" value=""/><div class="cdc-search__bar-btns"><button class="cdc-search__btn">搜索<i class="cdc-search__i search"></i></button><button class="cdc-search__btn">关闭<i class="cdc-search__i clear"></i></button></div></div></div></div></div><div class="cdc-header__create"><span class="cdc-header__create-btn not-logged"><span class="cdc-svg-icon-con"><span class="cdc-svg-icon" style="line-height:1;color:#0052D9;width:16px;height:16px"><svg width="16" height="16" viewBox="0 0 16 16" fill="currentcolor" xmlns="http://www.w3.org/2000/svg"><path d="M14.2466 12.0145C14.1698 13.6258 12.8381 14.9131 11.2129 14.9131H11.1579H4.0927H4.03772C2.4125 14.9131 1.08014 13.6258 1.00334 12.0145H1V11.8668V4.07213V4.04627V3.89922H1.00334C1.08014 2.28732 2.4125 1 4.03772 1H9.6473V1.00069H10.0786L8.7688 2.10773H8.43888H7.7916H6.37904H4.03772C2.97234 2.10773 2.10445 2.9777 2.10445 4.04629V4.41869V4.4472V6.39498V11.4269V11.4309V11.8668C2.10445 12.9354 2.97234 13.8053 4.03772 13.8053H6.37904H8.87153H11.2129C12.2782 13.8053 13.1461 12.9355 13.1461 11.8668V11.466V11.454V9.5181V6.39364L14.2506 5.3051V11.8668V12.0145H14.2466ZM10.4324 7.15226L9.63146 7.99761C9.36577 8.2693 8.69326 8.95104 8.48066 9.17631C8.26726 9.40288 8.09039 9.58901 7.95061 9.73544C7.81079 9.88188 7.72667 9.96597 7.70083 9.98656C7.63321 10.0488 7.55703 10.1144 7.47022 10.1846C7.38412 10.2542 7.29404 10.3099 7.20063 10.3516C7.10722 10.4007 6.97072 10.459 6.79049 10.5305C6.61028 10.6001 6.42213 10.6676 6.22468 10.7339C6.02792 10.8002 5.84109 10.8571 5.66484 10.9061C5.48795 10.9538 5.3561 10.9863 5.2693 11.0009C5.08977 11.0214 4.96988 10.993 4.90956 10.9168C4.84931 10.8405 4.83276 10.7107 4.85924 10.5312C4.87315 10.4331 4.9043 10.292 4.95468 10.1078C5.00431 9.92297 5.05802 9.7315 5.11431 9.53341C5.1713 9.33526 5.22629 9.15179 5.27926 8.98484C5.33297 8.8179 5.37599 8.7026 5.40978 8.64032C5.44953 8.54357 5.49463 8.45413 5.54495 8.37399C5.59465 8.29379 5.66616 8.20503 5.75965 8.10766C5.79934 8.06588 5.89281 7.96649 6.03988 7.81018C6.18624 7.65311 6.80114 7.02774 7.02104 6.79783L7.75117 6.03524L8.56212 5.1899L10.6345 3.02466L12.5214 4.93874L10.4324 7.15226ZM13.816 3.58581C13.7166 3.68987 13.6272 3.78064 13.5483 3.85883C13.4694 3.93703 13.4006 4.0066 13.3423 4.06686C13.276 4.13643 13.2144 4.19738 13.1561 4.24903L11.2785 2.33569C11.3785 2.24025 11.4965 2.12565 11.6336 1.99115C11.7707 1.85668 11.8854 1.75061 11.9761 1.67242C12.0934 1.57708 12.2133 1.51013 12.3385 1.47109C12.4525 1.43529 12.5644 1.41805 12.6751 1.41876H12.7056C12.7665 1.42139 12.8268 1.42729 12.8851 1.43724C12.8838 1.4366 12.8811 1.43724 12.8798 1.4366C12.8811 1.4366 12.8838 1.4366 12.8851 1.43724C13.1376 1.48428 13.4019 1.62009 13.6265 1.83743C13.7511 1.95871 13.8524 2.09382 13.9259 2.23296C14.0346 2.43834 14.0863 2.65304 14.0763 2.8491C14.0763 2.87294 14.0783 2.89748 14.0783 2.92201C14.0783 3.03529 14.0571 3.14789 14.0154 3.26055C13.9737 3.37314 13.9067 3.48185 13.816 3.58581Z" fill="#0052D9"></path></svg></span></span>发布<span class="cdc-svg-icon-con cdc-header__create-btn-arrow"><span class="cdc-svg-icon" style="line-height:1;color:inherit;width:16px;height:16px"><svg width="16" height="16" viewBox="0 0 16 16" fill="currentcolor" xmlns="http://www.w3.org/2000/svg"><path d="M8.16377 4L9.57798 5.41421L14.5277 10.364L13.1135 11.7782L8.1638 6.829L3.21402 11.7782L1.7998 10.364L8.16377 4Z"></path></svg></span></span></span></div></div></div></div></div><div class="cdc-m-header with-focus"><div class="cdc-m-header__placeholder"></div><div class="cdc-m-header__inner"><div class="cdc-m-guider-banner is-sticky"><div class="cdc-m-guider-banner__guide-mvp" track-exposure="{"areaId":118000}" track-click="{"areaId":118000}"><div class="cdc-m-guider-banner__guide-mvp-text">精选内容/技术社群/优惠产品,<em>尽在小程序</em></div><div class="cdc-m-guider-banner__guide-mvp-btn">立即前往</div></div></div><div class="cdc-m-header__main"><div class="cdc-m-header__trigger"></div><div class="cdc-m-header__logo"><i class="cdc-m-header__logo-icon"></i></div><div class="cdc-m-header__search"><i class="cdc-m-header__search-icon"></i></div><div class="cdc-m-header__operate"><span class="cdc-m-header__operate-icon"></span></div></div></div></div><div class="com-sticky-header uc-sticky-header J-stickyBar hidden"><div class="com-inner"><div class="com-sticky-header-main"><span class="com-2-avatar large header-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg)"></span></span><div class="com-tab large"><div class="com-tab-hd large"><ul class="com-tab-ctrl"><li class="com-tab-item actived"><a href="javascript:;" data-state="activities" data-route="/activities">动态</a></li><li class="com-tab-item"><a href="/developer/user/11196058/column" data-state="column" data-route="/column">专栏</a></li><li class="com-tab-item"><a href="/developer/user/11196058/articles" data-state="articles" data-route="/articles">文章</a></li><li class="com-tab-item"><a href="/developer/user/11196058/questions" data-state="questions" data-route="/questions">问答</a></li><li class="com-tab-item"><a href="/developer/user/11196058/video" data-state="video" data-route="/video">视频</a></li><li class="com-tab-item"><a href="/developer/user/11196058/learning" data-state="learning" data-route="/learning">学习中心</a></li><li class="com-tab-item"><a href="/developer/user/11196058/salons" data-state="salons" data-route="/salons">沙龙</a></li><li class="com-tab-item"><a href="/developer/user/11196058/competition" data-state="competition" data-route="/competition">竞赛</a></li><li class="com-tab-item"><a href="/developer/user/11196058/inventories" data-state="inventories" data-route="/inventories">精选</a></li><li class="com-tab-item"><a href="/developer/user/11196058/favor/articles" data-state="favor/articles" data-route="/favor/articles">收藏/订阅/关注</a></li></ul></div></div></div></div></div><div class="J-body com-body uc-follow-user uc-follow-tag"><div class="uc-hero"><div class="uc-hero-inner"><div class="uc-hero-main"><span class="com-2-avatar xxlarge uc-hero-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/240/h/7000)"></span></span><h3 class="uc-hero-name">AIGC 先锋科技</h3><span class="com-2-level skin-2 uc-hero-level"><span class="text">LV0</span></span></div><a href="javascript:;" class="com-share-bubble-trigger com-opt-link uc-hero-share" hotrep="community.usercenter.home.hero.share_bubble"><span>分享</span></a><a href="javascript:;" class="com-opt-link link-warn uc-hero-report is-text">举报</a></div></div><div class="com-body-main J-bodyMain"><div class="com-2-layout"><div class="layout-main"><div class="com-tab large"><div class="com-tab-hd large"><ul class="com-tab-ctrl"><li class="com-tab-item actived"><a href="javascript:;" data-state="activities" data-route="/activities" hotrep="community.usercenter.home.tab.0">动态</a></li><li class="com-tab-item"><a href="javascript:;" data-state="column" data-route="/column" hotrep="community.usercenter.home.tab.1">专栏</a></li><li class="com-tab-item"><a href="javascript:;" data-state="articles" data-route="/articles" hotrep="community.usercenter.home.tab.2">文章</a></li><li class="com-tab-item"><a href="javascript:;" data-state="questions" data-route="/questions" hotrep="community.usercenter.home.tab.3">问答</a></li><li class="com-tab-item"><a href="javascript:;" data-state="video" data-route="/video" hotrep="community.usercenter.home.tab.4">视频</a></li><li class="com-tab-item"><a href="javascript:;" data-state="learning" data-route="/learning" hotrep="community.usercenter.home.tab.5">学习中心</a></li><li class="com-tab-item"><a href="javascript:;" data-state="salons" data-route="/salons" hotrep="community.usercenter.home.tab.6">沙龙</a></li><li class="com-tab-item"><a href="javascript:;" data-state="competition" data-route="/competition" hotrep="community.usercenter.home.tab.7">竞赛</a></li><li class="com-tab-item"><a href="javascript:;" data-state="inventories" data-route="/inventories" hotrep="community.usercenter.home.tab.8">精选</a></li><li class="com-tab-item"><a href="javascript:;" data-state="favor/articles" data-route="/favor/articles" hotrep="community.usercenter.home.tab.9">收藏/订阅/关注</a></li></ul><div class="c-nav-pro-mask" style="display:none"></div><div class="com-tab-hd-extra" track-click="{"areaId":116018}" track-exposure="{"areaId":116018}"><div class="com-search-insert"><button track-click="{"elementId":1}" class="com-search-insert-trigger">搜索</button><div class="com-3-nav-search-wrap"><div><div class="com-search-bar show focus"><label class="com-search"><input type="text" placeholder="找文章、答案、视频" class="search-input" value=""/><button class="search-btn">搜索</button><button class="close-btn">关闭</button></label><section class="com-search-dropdown only-one-group"><div class="com-search-dropdown-bd"><dl class="com-search-dropdown-group com-search-dropdown-history"><dt>搜索历史</dt><dd><div class="com-none" style="font-size:12px"><p class="com-none-cnt"><i class="com-i-warn-circle"></i>暂无搜索历史</p></div></dd></dl></div></section></div></div></div></div></div></div><div class="com-tab-bd"><div class="com-tab-panel"><div class="com-action-panels uc-action-panels" track-click="{"areaId":116001}" track-exposure="{"areaId":116001}"><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-12-09 14:32:55" title="2024-12-09 14:32:55" class="com-action-panel-time"> <span>3</span>天前<span class="com-v-box">2024-12-09 14:32:55</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2474935}" track-exposure="{"objectType":"article","objectId":2474935}"><a href="/developer/article/2474935" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">多角度视频描述:FIOVA基准推动LVLMs向人类水平迈进 !</h3><p class="com-3-article-panel-desc">大型语言模型(LLM)在自然语言处理(NLP)领域取得了重大进展,在文本生成 和问答等任务上表现出色。在这些进展的基础上,大型视觉语言模型(LVLMs),包括G...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-12-09 14:32:56" title="2024-12-09 14:32:56"> <span>3</span>天前<span class="com-v-box">2024-12-09 14:32:56</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17429" target="_blank" track-click="{"objectType":"tag","objectId":17429}">事件</a><span class="split">、</span><a href="/developer/tag/17431" target="_blank" track-click="{"objectType":"tag","objectId":17431}">视频</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/5787d1d311912c3447ae365cf32132bc.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-12-03 21:32:11" title="2024-12-03 21:32:11" class="com-action-panel-time"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:32:11</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2473485}" track-exposure="{"objectType":"article","objectId":2473485}"><a href="/developer/article/2473485" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">北大和港科技提出 DiT4Edit, 用于图像编辑的扩散变换器 (Diffusion Transformer) !</h3><p class="com-3-article-panel-desc">近年来,扩散模型的最新进展在文本驱动的视觉生成领域取得了显著的进步。例如,Stable Diffusion(SD) 、DALL-E 3 和PixArt[2]等文...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-12-03 21:32:11" title="2024-12-03 21:32:11"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:32:11</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17314" target="_blank" track-click="{"objectType":"tag","objectId":17314}">架构</a><span class="split">、</span><a href="/developer/tag/17347" target="_blank" track-click="{"objectType":"tag","objectId":17347}">科技</a><span class="split">、</span><a href="/developer/tag/17353" target="_blank" track-click="{"objectType":"tag","objectId":17353}">框架</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/f75447667cf15f3f5bbe22f1caa50fa4.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-12-03 21:31:17" title="2024-12-03 21:31:17" class="com-action-panel-time"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:31:17</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2473484}" track-exposure="{"objectType":"article","objectId":2473484}"><a href="/developer/article/2473484" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">MMFuser 用于精细视觉-语言理解的多模态多层特征融合器 !</h3><p class="com-3-article-panel-desc">近年来,多模态大型语言模型(MLLMs)在人工智能领域(AGI)的研究热点中崭露头角。这些模型通过跨模态互动和学习在理解和表达复杂人类意图方面取得了重要进展。在...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-12-03 21:31:18" title="2024-12-03 21:31:18"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:31:18</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17205" target="_blank" track-click="{"objectType":"tag","objectId":17205}">测试</a><span class="split">、</span><a href="/developer/tag/17314" target="_blank" track-click="{"objectType":"tag","objectId":17314}">架构</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/d55c7fcaff3237be7d26b6d5d46a6f29.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-12-03 21:30:37" title="2024-12-03 21:30:37" class="com-action-panel-time"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:30:37</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2473482}" track-exposure="{"objectType":"article","objectId":2473482}"><a href="/developer/article/2473482" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">GroundiT:利用 Diffusion Transformers实现精确无训练空间定位,实现 SOTA 性能 !</h3><p class="com-3-article-panel-desc">Transformer架构[45]在各种应用中推动了突破,其中扩散模型成为近期的重要受益者。尽管U-Net[42]作为去噪 Backbone [22; 43; ...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-12-03 21:30:37" title="2024-12-03 21:30:37"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:30:37</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17353" target="_blank" track-click="{"objectType":"tag","objectId":17353}">框架</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17189" target="_blank" track-click="{"objectType":"tag","objectId":17189}">编码</a><span class="split">、</span><a href="/developer/tag/17314" target="_blank" track-click="{"objectType":"tag","objectId":17314}">架构</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/74f8cae755fcd1d7cc6d2bc8e4dec048.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-12-03 21:29:43" title="2024-12-03 21:29:43" class="com-action-panel-time"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:29:43</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2473481}" track-exposure="{"objectType":"article","objectId":2473481}"><a href="/developer/article/2473481" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">MIT 提出 Bend-VLM ,超越传统去偏,在检索、分类和图像描述中的性能突出 !</h3><p class="com-3-article-panel-desc">预训练的基础视觉语言模型(VLMs),如CLIP [33],BLIP [22],以及LLaVA [25],已在诸如图像检索 [21],零和少样本分类 [33; ...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-12-03 21:29:43" title="2024-12-03 21:29:43"> <span>8</span>天前<span class="com-v-box">2024-12-03 21:29:43</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17189" target="_blank" track-click="{"objectType":"tag","objectId":17189}">编码</a><span class="split">、</span><a href="/developer/tag/17205" target="_blank" track-click="{"objectType":"tag","objectId":17205}">测试</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/2971a71056de3bbad941e8ddedd369e9.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-29 16:30:35" title="2024-11-29 16:30:35" class="com-action-panel-time"> <span>13</span>天前<span class="com-v-box">2024-11-29 16:30:35</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2472278}" track-exposure="{"objectType":"article","objectId":2472278}"><a href="/developer/article/2472278" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">从文本到图像:AutoBench-V利用LVLMs实现高效视觉问答评估 !</h3><p class="com-3-article-panel-desc">大型语言模型的繁荣为各种下游应用带来了显著的进步。随着LLM能力的增长,研究行人开始探索将视觉信息理解能力整合到LLM中,催生了大型视觉语言模型的。这些模型在广...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-29 16:30:35" title="2024-11-29 16:30:35"> <span>13</span>天前<span class="com-v-box">2024-11-29 16:30:35</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17205" target="_blank" track-click="{"objectType":"tag","objectId":17205}">测试</a><span class="split">、</span><a href="/developer/tag/17353" target="_blank" track-click="{"objectType":"tag","objectId":17353}">框架</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/10669" target="_blank" track-click="{"objectType":"tag","objectId":10669}">自动化</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/8204263fd40a4768fe850b6e2509b428.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-27 15:19:59" title="2024-11-27 15:19:59" class="com-action-panel-time"> <span>15</span>天前<span class="com-v-box">2024-11-27 15:19:59</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2471726}" track-exposure="{"objectType":"article","objectId":2471726}"><a href="/developer/article/2471726" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">牛津大学提出 Ensemble SuperICL ,超越 BaselineEn在 自然语言理解任务中表现卓越 !</h3><p class="com-3-article-panel-desc">语义学习(ICL)是一种有效的方法,可以将大型语言模型(LLM)调整到执行特定任务,而无需通过微调更新模型参数(Brown等人,2020年)。它涉及用少量的训练...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-27 15:19:59" title="2024-11-27 15:19:59"> <span>15</span>天前<span class="com-v-box">2024-11-27 15:19:59</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17205" target="_blank" track-click="{"objectType":"tag","objectId":17205}">测试</a><span class="split">、</span><a href="/developer/tag/17284" target="_blank" track-click="{"objectType":"tag","objectId":17284}">工作</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/187a7b1046e8febc1f9ca44ccaccbca8.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-26 15:56:03" title="2024-11-26 15:56:03" class="com-action-panel-time"> <span>16</span>天前<span class="com-v-box">2024-11-26 15:56:03</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2471307}" track-exposure="{"objectType":"article","objectId":2471307}"><a href="/developer/article/2471307" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">超越低秩自适应,从LoRA到Neat,利用轻量级神经网络优化预训练模型 !</h3><p class="com-3-article-panel-desc">预训练模型,在广泛和多样的一般领域语料库上进行训练,具有卓越的泛化能力,受益于一系列基本任务,如自然语言理解[Devlin,2018,Liu,2019],自然语...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-26 15:56:04" title="2024-11-26 15:56:04"> <span>16</span>天前<span class="com-v-box">2024-11-26 15:56:04</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/10332" target="_blank" track-click="{"objectType":"tag","objectId":10332}">神经网络</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17554" target="_blank" track-click="{"objectType":"tag","objectId":17554}">优化</a><span class="split">、</span><a href="/developer/tag/17907" target="_blank" track-click="{"objectType":"tag","objectId":17907}">LoRa</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/6196b378647b3f5c074f608b59eb1cdf.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-25 17:37:31" title="2024-11-25 17:37:31" class="com-action-panel-time"> <span>17</span>天前<span class="com-v-box">2024-11-25 17:37:31</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2470830}" track-exposure="{"objectType":"article","objectId":2470830}"><a href="/developer/article/2470830" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">Tri-HE基准:量化视觉语言模型中的目标和关系幻觉 !</h3><p class="com-3-article-panel-desc">大型视觉语言模型(LVLMs)引起了广泛关注。尽管性能优越,但现有研究主要关注提高LVLMs的有用性,而没有仔细考虑LVLMs生成的响应的可靠性。然而,最近的研...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-25 17:37:31" title="2024-11-25 17:37:31"> <span>17</span>天前<span class="com-v-box">2024-11-25 17:37:31</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17353" target="_blank" track-click="{"objectType":"tag","objectId":17353}">框架</a><span class="split">、</span><a href="/developer/tag/17360" target="_blank" track-click="{"objectType":"tag","objectId":17360}">量化</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/2d932d2016fe148d5e091a648f74d89f.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-25 17:36:25" title="2024-11-25 17:36:25" class="com-action-panel-time"> <span>17</span>天前<span class="com-v-box">2024-11-25 17:36:25</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2470829}" track-exposure="{"objectType":"article","objectId":2470829}"><a href="/developer/article/2470829" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">推理加速策略对 LLMs 偏差的影响 !</h3><p class="com-3-article-panel-desc">现代的LLM(如LLaMA和GPT-4)展示了惊人的语言生成能力,导致它们的受欢迎程度和采用率激增。然而,由于它们的巨大大小,部署这些模型可能会具有挑战性,甚至...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-25 17:36:25" title="2024-11-25 17:36:25"> <span>17</span>天前<span class="com-v-box">2024-11-25 17:36:25</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/10670" target="_blank" track-click="{"objectType":"tag","objectId":10670}">缓存</a><span class="split">、</span><a href="/developer/tag/17360" target="_blank" track-click="{"objectType":"tag","objectId":17360}">量化</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/a2114433dc5cdc9305d73c0cf88191a5.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-25 17:35:31" title="2024-11-25 17:35:31" class="com-action-panel-time"> <span>17</span>天前<span class="com-v-box">2024-11-25 17:35:31</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2470828}" track-exposure="{"objectType":"article","objectId":2470828}"><a href="/developer/article/2470828" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">MEDGO:一个中文医疗大语言模型,已在在上海东方医院落地 !</h3><p class="com-3-article-panel-desc">医疗服务对每个人的福祉至关重要,在保障人类生命和健康方面发挥关键作用,并在改善人们整体健康状况方面具有决定性价值。然而,医疗保健领域面临几个关键挑战。一个重要问...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-25 17:35:32" title="2024-11-25 17:35:32"> <span>17</span>天前<span class="com-v-box">2024-11-25 17:35:32</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17554" target="_blank" track-click="{"objectType":"tag","objectId":17554}">优化</a><span class="split">、</span><a href="/developer/tag/10570" target="_blank" track-click="{"objectType":"tag","objectId":10570}">医疗</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/37884a5de7e967c97dcd81725ef2a76a.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-21 19:18:43" title="2024-11-21 19:18:43" class="com-action-panel-time"> <span>21</span>天前<span class="com-v-box">2024-11-21 19:18:43</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2469026}" track-exposure="{"objectType":"article","objectId":2469026}"><a href="/developer/article/2469026" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">你永远不知道 : 量化导致视觉-语言基础模型的不一致偏差 !</h3><p class="com-3-article-panel-desc">量化(Gholami等人,2022年)是压缩深度学习模型的领先实践:它将模型的参数表示从32位浮点数转换为较低的字节宽度(例如,8位或4位整数),从而显著减少内...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-21 19:18:44" title="2024-11-21 19:18:44"> <span>21</span>天前<span class="com-v-box">2024-11-21 19:18:44</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17535" target="_blank" track-click="{"objectType":"tag","objectId":17535}">压缩</a><span class="split">、</span><a href="/developer/tag/17302" target="_blank" track-click="{"objectType":"tag","objectId":17302}">基础</a><span class="split">、</span><a href="/developer/tag/17360" target="_blank" track-click="{"objectType":"tag","objectId":17360}">量化</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/ed8090d57432bc811d1ad4472e55e2d2.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-21 19:18:20" title="2024-11-21 19:18:20" class="com-action-panel-time"> <span>21</span>天前<span class="com-v-box">2024-11-21 19:18:20</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2469025}" track-exposure="{"objectType":"article","objectId":2469025}"><a href="/developer/article/2469025" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">增强无界 3D 高斯扫描与视图一致的 2D 扩散优先级,3DGS-Enhancer在NVS增强中的先进效果 !</h3><p class="com-3-article-panel-desc">新视角合成(NVS)在计算机视觉和图形学领域有着数十年的历史,旨在从多个输入图像或视频中生成场景的视角。最近,三维高斯分裂(3DGS)[18]在生成具有高效渲染...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-21 19:18:20" title="2024-11-21 19:18:20"> <span>21</span>天前<span class="com-v-box">2024-11-21 19:18:20</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17431" target="_blank" track-click="{"objectType":"tag","objectId":17431}">视频</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/10576" target="_blank" track-click="{"objectType":"tag","objectId":10576}">渲染</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/9b2f9b1b5bb2421f8aaee846872e6cf9.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-19 12:41:09" title="2024-11-19 12:41:09" class="com-action-panel-time"> <span>23</span>天前<span class="com-v-box">2024-11-19 12:41:09</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2466687}" track-exposure="{"objectType":"article","objectId":2466687}"><a href="/developer/article/2466687" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">DocEdit-v2:通过多模态 LLM 基础进行文档结构编辑 !</h3><p class="com-3-article-panel-desc">数字文档广泛用于交流、信息传播和提高工作效率。语言引导文档编辑涉及根据用户与文档空间对齐、组件放置、区域分组、替换、调整大小、拆分、合并以及应用特殊效果的开放式...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-19 12:41:09" title="2024-11-19 12:41:09"> <span>23</span>天前<span class="com-v-box">2024-11-19 12:41:09</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17202" target="_blank" track-click="{"objectType":"tag","objectId":17202}">布局</a><span class="split">、</span><a href="/developer/tag/17302" target="_blank" track-click="{"objectType":"tag","objectId":17302}">基础</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17917" target="_blank" track-click="{"objectType":"tag","objectId":17917}">LLM</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/9bfdd26674bef72f37ab7215650e5349.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-19 12:38:51" title="2024-11-19 12:38:51" class="com-action-panel-time"> <span>23</span>天前<span class="com-v-box">2024-11-19 12:38:51</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2466686}" track-exposure="{"objectType":"article","objectId":2466686}"><a href="/developer/article/2466686" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">西湖大学 & 苏大提出 PiTe | 大型视频语言模型的空间与时间维度下的精细对齐研究 !</h3><p class="com-3-article-panel-desc">大型语言模型(LLMs)在AI领域迅速获得了 popularity ,展示了惊人的在各种自然语言任务上的能力。LLMs 强大的语言理解能力促使研究行人探索其在解...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-19 12:38:52" title="2024-11-19 12:38:52"> <span>23</span>天前<span class="com-v-box">2024-11-19 12:38:52</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17429" target="_blank" track-click="{"objectType":"tag","objectId":17429}">事件</a><span class="split">、</span><a href="/developer/tag/17431" target="_blank" track-click="{"objectType":"tag","objectId":17431}">视频</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/286674d7df95221a2d5904def3acaacd.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-15 13:52:37" title="2024-11-15 13:52:37" class="com-action-panel-time"> <span>27</span>天前<span class="com-v-box">2024-11-15 13:52:37</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2465931}" track-exposure="{"objectType":"article","objectId":2465931}"><a href="/developer/article/2465931" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">对话生成与总结任务的效率优化,LLMR在NLP任务中的性能优势分析 !</h3><p class="com-3-article-panel-desc">大型语言模型(LLMs)已在各种文本生成任务上取得了显著的性能,如摘要(Ahmed和Devanbu,2022)、对话系统。此外,这种方法可以实现零样本学习,即用...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-15 13:52:37" title="2024-11-15 13:52:37"> <span>27</span>天前<span class="com-v-box">2024-11-15 13:52:37</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17516" target="_blank" track-click="{"objectType":"tag","objectId":17516}">效率</a><span class="split">、</span><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17554" target="_blank" track-click="{"objectType":"tag","objectId":17554}">优化</a><span class="split">、</span><a href="/developer/tag/14918" target="_blank" track-click="{"objectType":"tag","objectId":14918}">nlp</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/f631f725a7ade4e5f49b96e01139bafe.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-14 17:57:35" title="2024-11-14 17:57:35" class="com-action-panel-time"> <span>28</span>天前<span class="com-v-box">2024-11-14 17:57:35</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2465701}" track-exposure="{"objectType":"article","objectId":2465701}"><a href="/developer/article/2465701" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">TransformerRanker 高效地为下游分类任务找到最适合的语言模型的工具 !</h3><p class="com-3-article-panel-desc">自然语言处理(NLP)中的分类任务通常是通过从模型仓库中选择预训练语言模型(PLM)并对其进行微调来解决的。然而,现有的大量的PLM使得从模型仓库中选择最佳性能...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-14 17:57:36" title="2024-11-14 17:57:36"> <span>28</span>天前<span class="com-v-box">2024-11-14 17:57:36</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17525" target="_blank" track-click="{"objectType":"tag","objectId":17525}">性能</a><span class="split">、</span><a href="/developer/tag/17276" target="_blank" track-click="{"objectType":"tag","objectId":17276}">工具</a><span class="split">、</span><a href="/developer/tag/17284" target="_blank" track-click="{"objectType":"tag","objectId":17284}">工作</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/73ce622793a8aa9167c35fa82c056580.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-13 18:46:39" title="2024-11-13 18:46:39" class="com-action-panel-time"> <span>29</span>天前<span class="com-v-box">2024-11-13 18:46:39</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2465484}" track-exposure="{"objectType":"article","objectId":2465484}"><a href="/developer/article/2465484" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">BERT 与 CoCoLoFa 联手:提高逻辑谬误检测的准确性与效率 !</h3><p class="com-3-article-panel-desc">逻辑谬误是削弱论点有效性的推理错误。类似于滑坡谬误或虚假二选一的逻辑谬误会降低在线讨论的质量并使论点显得更有疑点,促进错误信息的传播(Jin等人,2022年)。...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-13 18:46:39" title="2024-11-13 18:46:39"> <span>29</span>天前<span class="com-v-box">2024-11-13 18:46:39</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17284" target="_blank" track-click="{"objectType":"tag","objectId":17284}">工作</a><span class="split">、</span><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17516" target="_blank" track-click="{"objectType":"tag","objectId":17516}">效率</a><span class="split">、</span><a href="/developer/tag/17205" target="_blank" track-click="{"objectType":"tag","objectId":17205}">测试</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/d390e5e374cff0a2e136a9d9fc5e50f6.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-12 17:01:56" title="2024-11-12 17:01:56" class="com-action-panel-time"> <span>30</span>天前<span class="com-v-box">2024-11-12 17:01:56</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2465196}" track-exposure="{"objectType":"article","objectId":2465196}"><a href="/developer/article/2465196" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">ProLIP模型:首个使用概率目标进行亿级图像-文本数据预训练的VLMs !</h3><p class="com-3-article-panel-desc">视觉语言模型(VLMs)旨在实现视觉和语言的联合嵌入空间,已成为近年来机器学习领域的重要基石。在训练过程中,VLMs通过对比学习将一对对齐的图像-文本(例如,一...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-12 17:01:56" title="2024-11-12 17:01:56"> <span>30</span>天前<span class="com-v-box">2024-11-12 17:01:56</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17187" target="_blank" track-click="{"objectType":"tag","objectId":17187}">遍历</a><span class="split">、</span><a href="/developer/tag/17210" target="_blank" track-click="{"objectType":"tag","objectId":17210}">产品</a><span class="split">、</span><a href="/developer/tag/17290" target="_blank" track-click="{"objectType":"tag","objectId":17290}">函数</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/e1e00bf9d15756a1d3613ac36d445112.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div><div class="com-action-panel"><div class="com-action-panel-hd"><span class="com-action-panel-action"><i class="com-action-panel-action-icon publish"></i>发表了文章</span><time dateTime="2024-11-11 19:48:28" title="2024-11-11 19:48:28" class="com-action-panel-time"> 2024-11-11<span class="com-v-box">2024-11-11 19:48:28</span></time></div><div class="com-action-panel-bd"><div class="com-3-article-panel theme2" track-click="{"objectType":"article","objectId":2464908}" track-exposure="{"objectType":"article","objectId":2464908}"><a href="/developer/article/2464908" class="com-3-article-panel-media" trackClick="{"elementId":1}" target="_blank" hotrep="community.usercenter.home.article_panels.NaN.article_panel.link"><div class="com-3-article-panel-body"><h3 class="com-3-article-panel-title">LLM 视觉语言模型在动作识别中是否有效?</h3><p class="com-3-article-panel-desc">近年来,得益于大规模预训练,许多视觉语言基础模型在许多下游视觉任务上取得了显著成果,展现出了强大的泛化能力。其中,具有视觉语言预训练的模型,如 CLIP 及其在...</p><div class="com-3-article-panel-infos"><div class="com-3-article-panel-source"><a href="/developer/user/11196058" track-click="{"objectType":"user","objectId":11196058}" target="_blank" class="item author"><span class="com-2-avatar"><span class="com-2-avatar-inner" style="background-image:url(https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg?imageView2/2/w/64/h/7000)"></span></span>AIGC 先锋科技</a><span class="item time"><time dateTime="2024-11-11 19:48:28" title="2024-11-11 19:48:28"> 2024-11-11<span class="com-v-box">2024-11-11 19:48:28</span></time></span></div><div class="com-tag-links com-3-article-panel-tags"><i class="com-i-tag tag-icon"></i><a href="/developer/tag/17381" target="_blank" track-click="{"objectType":"tag","objectId":17381}">模型</a><span class="split">、</span><a href="/developer/tag/17431" target="_blank" track-click="{"objectType":"tag","objectId":17431}">视频</a><span class="split">、</span><a href="/developer/tag/17440" target="_blank" track-click="{"objectType":"tag","objectId":17440}">数据</a><span class="split">、</span><a href="/developer/tag/17917" target="_blank" track-click="{"objectType":"tag","objectId":17917}">LLM</a><span class="split">、</span><a href="/developer/tag/17302" target="_blank" track-click="{"objectType":"tag","objectId":17302}">基础</a></div></div></div><div class="com-3-article-panel-object"><span class="com-thumbnail com-3-article-panel-thumbnail" style="background-image:url(https://developer.qcloudimg.com/http-save/yehe-11196058/7ed2d6597d081b73c7784cc09ec76408.jpg?imageView2/2/w/300/h/7000)"></span></div></a></div></div></div></div><div class="c-pages-wrap"><div class="c-pages"><span class="c-pages-item c-pages-item-actived">1</span><a href="javascript:;" class="c-pages-item">2</a><a href="javascript:;" class="c-pages-item">3</a><a href="javascript:;" class="c-pages-item">4</a><a href="javascript:;" class="c-pages-item">5</a><a href="javascript:;" class="c-pages-item">6</a><a href="javascript:;" class="c-pages-item">7</a><a href="javascript:;" class="c-pages-item">8</a><a href="javascript:;" class="c-pages-item">9</a><a href="javascript:;" class="c-pages-item">10</a><a href="javascript:;" class="c-pages-item c-pages-item-next">下一页</a></div></div></div></div></div></div><div class="layout-side" track-click="{"areaId":116019}" track-exposure="{"areaId":116019}"><button type="button" class="l full-width uc-sidebar-btn com-ico-btn c-btn" track-click="{"elementId":4}"><span class="text" track-click="{"elementId":3}"><i class="icon follow"></i>关注</span></button><section class="com-2-section"><header class="com-2-section-hd"><h2 class="com-2-section-title without-icon"><span><em>个人</em>简介</span></h2></header><div class="com-2-section-bd"><ul class="uc-user-infos"><li class="uc-user-info"><i class="uc-user-info-icon job job"></i><p style="color:#999">暂未填写公司和职称</p></li><li class="uc-user-info"><i class="uc-user-info-icon job intro"></i><p style="color:#999">暂未填写个人简介</p></li><li class="uc-user-info"><i class="uc-user-info-icon job skill"></i><p style="color:#999">暂未填写技能专长</p></li><li class="uc-user-info"><i class="uc-user-info-icon job school"></i><p style="color:#999">暂未填写学校和专业</p></li><li class="uc-user-info"><i class="uc-user-info-icon job blog"></i><p style="color:#999">暂未填写个人网址</p></li><li class="uc-user-info"><i class="uc-user-info-icon job address"></i><p style="color:#999">暂未填写所在城市</p></li><li class="uc-user-info"><i class="uc-user-info-icon job date"></i>加入社区时间:2024-07-07</li></ul></div></section><section class="com-2-section"><header class="com-2-section-hd"><h2 class="com-2-section-title without-icon"><span><em>个人</em>成就</span></h2></header><div class="com-2-section-bd"><ul class="uc-achievements"><li class="uc-achievement"><i class="uc-achievement-icon like"></i><span class="uc-achievement-text">获得 188 次赞同</span></li><li class="uc-achievement"><i class="uc-achievement-icon read"></i><span class="uc-achievement-text">文章被阅读 41.2K 次</span></li></ul></div></section><div class="uc-side-infos"><a href="/developer/user/11196058/following/users" trackClick="{"elementId":7}" class="uc-side-info" hotrep="community.usercenter.home.side_infos.0">关注了:<em>1</em></a><span class="uc-side-info-split"></span><a href="/developer/user/11196058/followers" trackClick="{"elementId":8}" class="uc-side-info" hotrep="community.usercenter.home.side_infos.1">关注者:<em>14</em></a></div></div></div></div></div><div class="cdc-footer J-footer com-2-footer"><div class="cdc-footer__inner"><div class="cdc-footer__main"><div class="cdc-footer__website"><ul class="cdc-footer__website-group"><li class="cdc-footer__website-column"><div class="cdc-footer__website-box"><h3 class="cdc-footer__website-title">社区</h3><ul class="cdc-footer__website-list"><li class="cdc-footer__website-item"><a href="/developer/column">专栏文章</a></li><li class="cdc-footer__website-item"><a href="/developer/inventory">阅读清单</a></li><li class="cdc-footer__website-item"><a href="/developer/ask">互动问答</a></li><li class="cdc-footer__website-item"><a href="/developer/salon">技术沙龙</a></li><li class="cdc-footer__website-item"><a href="/developer/video">技术视频</a></li><li class="cdc-footer__website-item"><a href="/developer/teams">团队主页</a></li><li class="cdc-footer__website-item"><a href="/developer/timl">腾讯云TI平台</a></li></ul></div></li><li class="cdc-footer__website-column"><div class="cdc-footer__website-box"><h3 class="cdc-footer__website-title">活动</h3><ul class="cdc-footer__website-list"><li class="cdc-footer__website-item"><a href="/developer/support-plan">自媒体同步曝光计划</a></li><li class="cdc-footer__website-item"><a href="/developer/support-plan-invitation">邀请作者入驻</a></li><li class="cdc-footer__website-item"><a href="/developer/article/1535830">自荐上首页</a></li><li class="cdc-footer__website-item"><a href="/developer/competition">技术竞赛</a></li></ul></div></li><li class="cdc-footer__website-column"><div class="cdc-footer__website-box"><h3 class="cdc-footer__website-title">资源</h3><ul class="cdc-footer__website-list"><li class="cdc-footer__website-item"><a href="/developer/specials">技术周刊</a></li><li class="cdc-footer__website-item"><a href="/developer/tags">社区标签</a></li><li class="cdc-footer__website-item"><a href="/developer/devdocs">开发者手册</a></li><li class="cdc-footer__website-item"><a href="/lab?from=20064&from_column=20064">开发者实验室</a></li></ul></div></li><li class="cdc-footer__website-column"><div class="cdc-footer__website-box"><h3 class="cdc-footer__website-title">关于</h3><ul class="cdc-footer__website-list"><li class="cdc-footer__website-item"><a rel="nofollow" href="/developer/article/1006434">社区规范</a></li><li class="cdc-footer__website-item"><a rel="nofollow" href="/developer/article/1006435">免责声明</a></li><li class="cdc-footer__website-item"><a rel="nofollow" href="mailto:cloudcommunity@tencent.com">联系我们</a></li><li class="cdc-footer__website-item"><a rel="nofollow" href="/developer/friendlink">友情链接</a></li></ul></div></li></ul></div><div class="cdc-footer__qr"><h3 class="cdc-footer__qr-title">腾讯云开发者</h3><div class="cdc-footer__qr-object"><img class="cdc-footer__qr-image" src="https://qcloudimg.tencent-cloud.cn/raw/a8907230cd5be483497c7e90b061b861.png" alt="扫码关注腾讯云开发者"/></div><div class="cdc-footer__qr-infos"><p class="cdc-footer__qr-info"><span class="cdc-footer__qr-text">扫码关注腾讯云开发者</span></p><p class="cdc-footer__qr-info"><span class="cdc-footer__qr-text">领取腾讯云代金券</span></p></div></div></div><div class="cdc-footer__recommend"><div class="cdc-footer__recommend-rows"><div class="cdc-footer__recommend-cell"><h3 class="cdc-footer__recommend-title">热门产品</h3><div class="cdc-footer__recommend-wrap"><ul class="cdc-footer__recommend-list"><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="https://dnspod.cloud.tencent.com?from=20064&from_column=20064">域名注册</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/cvm?from=20064&from_column=20064">云服务器</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/tbaas?from=20064&from_column=20064">区块链服务</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/mq?from=20064&from_column=20064">消息队列</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/dsa?from=20064&from_column=20064">网络加速</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/tencentdb-catalog?from=20064&from_column=20064">云数据库</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/cns?from=20064&from_column=20064">域名解析</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/cos?from=20064&from_column=20064">云存储</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/css?from=20064&from_column=20064">视频直播</a></li></ul></div></div><div class="cdc-footer__recommend-cell"><h3 class="cdc-footer__recommend-title">热门推荐</h3><div class="cdc-footer__recommend-wrap"><ul class="cdc-footer__recommend-list"><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/facerecognition?from=20064&from_column=20064">人脸识别</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/tm?from=20064&from_column=20064">腾讯会议</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/act/pro/enterprise2019?from=20064&from_column=20064">企业云</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/cdn-scd?from=20064&from_column=20064">CDN加速</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/trtc?from=20064&from_column=20064">视频通话</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/tiia?from=20064&from_column=20064">图像分析</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/cdb?from=20064&from_column=20064">MySQL 数据库</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/symantecssl?from=20064&from_column=20064">SSL 证书</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/asr?from=20064&from_column=20064">语音识别</a></li></ul></div></div><div class="cdc-footer__recommend-cell"><h3 class="cdc-footer__recommend-title">更多推荐</h3><div class="cdc-footer__recommend-wrap"><ul class="cdc-footer__recommend-list"><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/solution/data_protection?from=20064&from_column=20064">数据安全</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/clb?from=20064&from_column=20064">负载均衡</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/sms?from=20064&from_column=20064">短信</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/ocr?from=20064&from_column=20064">文字识别</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/vod?from=20064&from_column=20064">云点播</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="https://tm.cloud.tencent.com?from=20064&from_column=20064">商标注册</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/solution/la?from=20064&from_column=20064">小程序开发</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/cat?from=20064&from_column=20064">网站监控</a></li><li class="cdc-footer__recommend-item"><a class="com-2-footer-recommend-link" href="/product/cdm?from=20064&from_column=20064">数据迁移</a></li></ul></div></div></div></div><div class="cdc-footer__copyright"><div class="cdc-footer__copyright-text"><p>Copyright © 2013 - <!-- -->2024<!-- --> Tencent Cloud. All Rights Reserved. 腾讯云 版权所有<!-- --> </p><p>深圳市腾讯计算机系统有限公司 ICP备案/许可证号:<a href="https://beian.miit.gov.cn/#/Integrated/index" target="_blank">粤B2-20090059<!-- --> </a><a href="https://www.beian.gov.cn/portal/index.do" target="_blank">深公网安备号 44030502008569</a></p><p>腾讯云计算(北京)有限责任公司 京ICP证150476号 | <!-- --> <a href="https://beian.miit.gov.cn/#/Integrated/index" target="_blank">京ICP备11018762号</a> | <!-- --> <a href="https://www.beian.gov.cn/portal/index.do" target="_blank">京公网安备号11010802020287</a></p></div></div></div></div></div><div class="com-widget-global"><div style="position:relative;z-index:8088"><div class="com-widget-global2"><div class="com-widget-global2__btn code"><div class="com-widget-global2__btn-tag">领券</div></div><div class="com-widget-global2__btn top" style="visibility:hidden"></div></div></div></div><div id="dialog-root"></div><div id="rno-dialog-root" class="rno-modal-wrap"></div></div><script>window.isServerContext = false; window.isClientContext = true;</script><script>window.$serverTime = 1734006509933; window.$clientTime = 1734006509933;</script><script class="">window.$ua = {"browser":{"name":"IE","version":"7.0","major":"7"},"cpu":{},"device":{},"engine":{},"os":{"name":"Windows","version":"Vista"}};</script><script src="//cloudcache.tencent-cloud.com/qcloud/developer/scripts/release/libs/dom4/1.8.3/dom4.js"></script><script src="https://cloudcache.tencent-cloud.com/qcloud/main/scripts/release/common/vendors/babel/polyfill.6.26.min.js"></script><script src="https://cloudcache.tencent-cloud.com/qcloud/main/scripts/release/common/vendors/react/react.16.8.6.min.js"></script><script src="https://cloudcache.tencent-cloud.com/qcloud/main/scripts/release/common/vendors/react/react-dom.16.8.6.min.js"></script><script src="https://cloudcache.tencent-cloud.com/qcloud/main/scripts/release/common/vendors/jquery-3.2.1.min.js"></script><script src="//cloudcache.tencent-cloud.com/qcloud/developer/scripts/release/base.e1782d07ea.js?max_age=31536000" crossorigin="anonymous"></script><script src="//cloudcache.tencent-cloud.com/qcloud/draft-master/dist/draft-master-v2.0.142.d4s2ddo9sb.js?max_age=31536000"></script><script src="https://cloud.tencent.com/qccomponent/login/api.js"></script><script src="//cloudcache.tencent-cloud.com/qcloud/main/scripts/release/common/deps/wechatJsSdk.js?version=1_0_1&max_age=31536000"></script><script src="//cloudcache.tencent-cloud.com/qcloud/developer/scripts/release/common.e23283971f.js?max_age=31536000" crossorigin="anonymous"></script><script src="https://web.sdk.qcloud.com/player/tcplayer/release/v4.7.2/tcplayer.v4.7.2.min.js"></script><script src="//dscache.tencent-cloud.cn/ecache/qcstat/qcloud/qcloudStatApi.js"></script><script src="https://qccommunity.qcloudimg.com/common/exposure-plugin-4.1.15.min.js"></script><script src="https://qccommunity.qcloudimg.com/community-track/qcloud-community-track.min.js"></script><script src="https://dscache.tencent-cloud.com/sdk/dianshi-sdk/loader/umd/dianshi-sdk-loader.v0.0.18.js"></script><script src="//cloudcache.tencent-cloud.com/qcloud/developer/scripts/release/user-center/user-center.674a778ab2.js?max_age=31536000" crossorigin="anonymous"></script><script class=""> window.$render({"user":{"basicInfo":{"id":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","province":"","city":"","company":"","title":"","school":"","major":"","homePage":"","region":1,"jobType":1,"graduationDate":"","education":0,"specialityIds":[],"specialities":[],"gender":1,"trade":"","growthLevel":0,"isProfessionVerified":false,"status":1,"createTime":"2024-07-07 13:20:23","skipAds":false},"statInfo":{"id":11196058,"growthLevel":0,"followingCount":1,"followerCount":14,"upvoteCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandpickedCount":0,"joinedColumnCount":1,"followColumnCount":2,"articleCount":193,"realArticleCount":194,"joinedSalonCount":0,"beLikeNum":188,"articleReadNum":41214,"articleRecommNum":0,"answerRecommNum":0,"contribNum":0,"overallrank":598},"badgeInfo":{},"profileAuditing":false},"activities":{"pageStatus":"loaded","fetchStatus":"loaded","currentPage":1,"pageSize":20,"dataList":[{"id":10841,"type":40,"actionInfo":{"articleId":2474935,"articleOwnerUid":11196058,"title":"多角度视频描述:FIOVA基准推动LVLMs向人类水平迈进 !"},"createTime":1733725975,"detail":{"id":2474935,"articleId":2474935,"title":"多角度视频描述:FIOVA基准推动LVLMs向人类水平迈进 !","content":"","plain":"","brief":"","summary":"大型语言模型(LLM)在自然语言处理(NLP)领域取得了重大进展,在文本生成 和问答等任务上表现出色。在这些进展的基础上,大型视觉语言模型(LVLMs),包括G...","abstract":"大型语言模型(LLM)在自然语言处理(NLP)领域取得了重大进展,在文本生成 和问答等任务上表现出色。在这些进展的基础上,大型视觉语言模型(LVLMs),包括G...","posterSummary":"大型语言模型(LLM)在自然语言处理(NLP)领域取得了重大进展,在文本生成 和问答等任务上表现出色。在这些进展的基础上,大型视觉语言模型(LVLMs),包括GPT-4V 和LLaVA ,将LLM能力扩展到多模态领域。LVLMs在集成文本、图像和视频方面表现出色,在文本到视频生成和视频字幕(黄等,2024)等应用方面取得了显著进展。然而,评估LVLMs的真实能力仍然具有挑战性,因为传统的评估方法(...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/5787d1d311912c3447ae365cf32132bc.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/5787d1d311912c3447ae365cf32132bc.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1733725975,"updateTime":1733725976,"viewCount":92,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17429,"tagName":"事件","id":17429,"name":"事件"},{"tagId":17431,"tagName":"视频","id":17431,"name":"视频"}]}},{"id":10840,"type":40,"actionInfo":{"articleId":2473485,"articleOwnerUid":11196058,"title":"北大和港科技提出 DiT4Edit, 用于图像编辑的扩散变换器 (Diffusion Transformer) !"},"createTime":1733232731,"detail":{"id":2473485,"articleId":2473485,"title":"北大和港科技提出 DiT4Edit, 用于图像编辑的扩散变换器 (Diffusion Transformer) !","content":"","plain":"","brief":"","summary":"近年来,扩散模型的最新进展在文本驱动的视觉生成领域取得了显著的进步。例如,Stable Diffusion(SD) 、DALL-E 3 和PixArt[2]等文...","abstract":"近年来,扩散模型的最新进展在文本驱动的视觉生成领域取得了显著的进步。例如,Stable Diffusion(SD) 、DALL-E 3 和PixArt[2]等文...","posterSummary":"近年来,扩散模型的最新进展在文本驱动的视觉生成领域取得了显著的进步。例如,Stable Diffusion(SD) 、DALL-E 3 和PixArt[2]等文本到图像(T2I)模型的开发,对许多下游应用产生了重大影响,其中图像编辑是最具挑战性的任务之一。对于合成或真实输入图像,图像编辑算法旨在根据用户的意图,添加、删除或替换整个目标或目标属性。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/f75447667cf15f3f5bbe22f1caa50fa4.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/f75447667cf15f3f5bbe22f1caa50fa4.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1733232730,"updateTime":1733232731,"viewCount":112,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17314,"tagName":"架构","id":17314,"name":"架构"},{"tagId":17347,"tagName":"科技","id":17347,"name":"科技"},{"tagId":17353,"tagName":"框架","id":17353,"name":"框架"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"}]}},{"id":10839,"type":40,"actionInfo":{"articleId":2473484,"articleOwnerUid":11196058,"title":"MMFuser 用于精细视觉-语言理解的多模态多层特征融合器 !"},"createTime":1733232677,"detail":{"id":2473484,"articleId":2473484,"title":"MMFuser 用于精细视觉-语言理解的多模态多层特征融合器 !","content":"","plain":"","brief":"","summary":"近年来,多模态大型语言模型(MLLMs)在人工智能领域(AGI)的研究热点中崭露头角。这些模型通过跨模态互动和学习在理解和表达复杂人类意图方面取得了重要进展。在...","abstract":"近年来,多模态大型语言模型(MLLMs)在人工智能领域(AGI)的研究热点中崭露头角。这些模型通过跨模态互动和学习在理解和表达复杂人类意图方面取得了重要进展。在...","posterSummary":"近年来,多模态大型语言模型(MLLMs)在人工智能领域(AGI)的研究热点中崭露头角。这些模型通过跨模态互动和学习在理解和表达复杂人类意图方面取得了重要进展。在大型语言模型(LLMs)快速发展的基础上,MLLMs利用预训练的视觉编码器来提取图像特征,并将其与先进的LLMs相结合,展示了在各种视觉语言任务上的显著能力。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/d55c7fcaff3237be7d26b6d5d46a6f29.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/d55c7fcaff3237be7d26b6d5d46a6f29.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1733232677,"updateTime":1733232678,"viewCount":151,"likeCount":0,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17205,"tagName":"测试","id":17205,"name":"测试"},{"tagId":17314,"tagName":"架构","id":17314,"name":"架构"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"}]}},{"id":10838,"type":40,"actionInfo":{"articleId":2473482,"articleOwnerUid":11196058,"title":"GroundiT:利用 Diffusion Transformers实现精确无训练空间定位,实现 SOTA 性能 !"},"createTime":1733232637,"detail":{"id":2473482,"articleId":2473482,"title":"GroundiT:利用 Diffusion Transformers实现精确无训练空间定位,实现 SOTA 性能 !","content":"","plain":"","brief":"","summary":"Transformer架构[45]在各种应用中推动了突破,其中扩散模型成为近期的重要受益者。尽管U-Net[42]作为去噪 Backbone [22; 43; ...","abstract":"Transformer架构[45]在各种应用中推动了突破,其中扩散模型成为近期的重要受益者。尽管U-Net[42]作为去噪 Backbone [22; 43; ...","posterSummary":"Transformer架构[45]在各种应用中推动了突破,其中扩散模型成为近期的重要受益者。尽管U-Net[42]作为去噪 Backbone [22; 43; 41; 39]的成功,但近期基于 Transformer 的扩散模型,如 Diffusion Transformers(DiT)[37],在性能上又取得了新的飞跃。这一点由最近的生成模型,如Stable Diffusion 3[13]和So...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/74f8cae755fcd1d7cc6d2bc8e4dec048.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/74f8cae755fcd1d7cc6d2bc8e4dec048.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1733232637,"updateTime":1733232637,"viewCount":63,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17353,"tagName":"框架","id":17353,"name":"框架"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17189,"tagName":"编码","id":17189,"name":"编码"},{"tagId":17314,"tagName":"架构","id":17314,"name":"架构"}]}},{"id":10837,"type":40,"actionInfo":{"articleId":2473481,"articleOwnerUid":11196058,"title":"MIT 提出 Bend-VLM ,超越传统去偏,在检索、分类和图像描述中的性能突出 !"},"createTime":1733232583,"detail":{"id":2473481,"articleId":2473481,"title":"MIT 提出 Bend-VLM ,超越传统去偏,在检索、分类和图像描述中的性能突出 !","content":"","plain":"","brief":"","summary":"预训练的基础视觉语言模型(VLMs),如CLIP [33],BLIP [22],以及LLaVA [25],已在诸如图像检索 [21],零和少样本分类 [33; ...","abstract":"预训练的基础视觉语言模型(VLMs),如CLIP [33],BLIP [22],以及LLaVA [25],已在诸如图像检索 [21],零和少样本分类 [33; ...","posterSummary":"预训练的基础视觉语言模型(VLMs),如CLIP [33],BLIP [22],以及LLaVA [25],已在诸如图像检索 [21],零和少样本分类 [33; 4],文本引导图像生成 [32],以及人脸识别 [58]等领域得到广泛应用。然而,VL模型也编码了社会偏见 。随着越来越多的系统依赖CLIP,编码的表示性危害 [12; 3; 15; 52]可能导致分配性危害,例如计算机视觉系统将黑人个体错...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/2971a71056de3bbad941e8ddedd369e9.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/2971a71056de3bbad941e8ddedd369e9.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1733232583,"updateTime":1733232583,"viewCount":86,"likeCount":0,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17189,"tagName":"编码","id":17189,"name":"编码"},{"tagId":17205,"tagName":"测试","id":17205,"name":"测试"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"}]}},{"id":10835,"type":40,"actionInfo":{"articleId":2472278,"articleOwnerUid":11196058,"title":"从文本到图像:AutoBench-V利用LVLMs实现高效视觉问答评估 !"},"createTime":1732869035,"detail":{"id":2472278,"articleId":2472278,"title":"从文本到图像:AutoBench-V利用LVLMs实现高效视觉问答评估 !","content":"","plain":"","brief":"","summary":"大型语言模型的繁荣为各种下游应用带来了显著的进步。随着LLM能力的增长,研究行人开始探索将视觉信息理解能力整合到LLM中,催生了大型视觉语言模型的。这些模型在广...","abstract":"大型语言模型的繁荣为各种下游应用带来了显著的进步。随着LLM能力的增长,研究行人开始探索将视觉信息理解能力整合到LLM中,催生了大型视觉语言模型的。这些模型在广...","posterSummary":"大型语言模型的繁荣为各种下游应用带来了显著的进步。随着LLM能力的增长,研究行人开始探索将视觉信息理解能力整合到LLM中,催生了大型视觉语言模型的。这些模型在广泛的成对图像-文本数据集上进行训练,使他们能够通过有效地整合视觉和文本信息进行复杂的多模态推理。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/8204263fd40a4768fe850b6e2509b428.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/8204263fd40a4768fe850b6e2509b428.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732869035,"updateTime":1732869035,"viewCount":101,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17205,"tagName":"测试","id":17205,"name":"测试"},{"tagId":17353,"tagName":"框架","id":17353,"name":"框架"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":10669,"tagName":"自动化","id":10669,"name":"自动化"}]}},{"id":10834,"type":40,"actionInfo":{"articleId":2471726,"articleOwnerUid":11196058,"title":"牛津大学提出 Ensemble SuperICL ,超越 BaselineEn在 自然语言理解任务中表现卓越 !"},"createTime":1732691999,"detail":{"id":2471726,"articleId":2471726,"title":"牛津大学提出 Ensemble SuperICL ,超越 BaselineEn在 自然语言理解任务中表现卓越 !","content":"","plain":"","brief":"","summary":"语义学习(ICL)是一种有效的方法,可以将大型语言模型(LLM)调整到执行特定任务,而无需通过微调更新模型参数(Brown等人,2020年)。它涉及用少量的训练...","abstract":"语义学习(ICL)是一种有效的方法,可以将大型语言模型(LLM)调整到执行特定任务,而无需通过微调更新模型参数(Brown等人,2020年)。它涉及用少量的训练...","posterSummary":"语义学习(ICL)是一种有效的方法,可以将大型语言模型(LLM)调整到执行特定任务,而无需通过微调更新模型参数(Brown等人,2020年)。它涉及用少量的训练示例 Prompt LLM,并提供一个测试输入,使LLM能够从提供的上下文中推理出正确的输出。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/187a7b1046e8febc1f9ca44ccaccbca8.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/187a7b1046e8febc1f9ca44ccaccbca8.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732691999,"updateTime":1732691999,"viewCount":59,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17205,"tagName":"测试","id":17205,"name":"测试"},{"tagId":17284,"tagName":"工作","id":17284,"name":"工作"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"}]}},{"id":10833,"type":40,"actionInfo":{"articleId":2471307,"articleOwnerUid":11196058,"title":"超越低秩自适应,从LoRA到Neat,利用轻量级神经网络优化预训练模型 !"},"createTime":1732607763,"detail":{"id":2471307,"articleId":2471307,"title":"超越低秩自适应,从LoRA到Neat,利用轻量级神经网络优化预训练模型 !","content":"","plain":"","brief":"","summary":"预训练模型,在广泛和多样的一般领域语料库上进行训练,具有卓越的泛化能力,受益于一系列基本任务,如自然语言理解[Devlin,2018,Liu,2019],自然语...","abstract":"预训练模型,在广泛和多样的一般领域语料库上进行训练,具有卓越的泛化能力,受益于一系列基本任务,如自然语言理解[Devlin,2018,Liu,2019],自然语...","posterSummary":"预训练模型,在广泛和多样的一般领域语料库上进行训练,具有卓越的泛化能力,受益于一系列基本任务,如自然语言理解[Devlin,2018,Liu,2019],自然语言生成,以及图像分类[Dosovitskiy等人,2020a]。为了将预训练模型适应到特定的下游任务,通常采用微调。然而,由于预训练模型中参数数量庞大,完全微调需要大量的计算资源和产生大量的内存开销[Qin等人,2024]。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/6196b378647b3f5c074f608b59eb1cdf.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/6196b378647b3f5c074f608b59eb1cdf.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732607763,"updateTime":1732607764,"viewCount":119,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":10332,"tagName":"神经网络","id":10332,"name":"神经网络"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17554,"tagName":"优化","id":17554,"name":"优化"},{"tagId":17907,"tagName":"LoRa","id":17907,"name":"LoRa"}]}},{"id":10832,"type":40,"actionInfo":{"articleId":2470830,"articleOwnerUid":11196058,"title":"Tri-HE基准:量化视觉语言模型中的目标和关系幻觉 !"},"createTime":1732527451,"detail":{"id":2470830,"articleId":2470830,"title":"Tri-HE基准:量化视觉语言模型中的目标和关系幻觉 !","content":"","plain":"","brief":"","summary":"大型视觉语言模型(LVLMs)引起了广泛关注。尽管性能优越,但现有研究主要关注提高LVLMs的有用性,而没有仔细考虑LVLMs生成的响应的可靠性。然而,最近的研...","abstract":"大型视觉语言模型(LVLMs)引起了广泛关注。尽管性能优越,但现有研究主要关注提高LVLMs的有用性,而没有仔细考虑LVLMs生成的响应的可靠性。然而,最近的研...","posterSummary":"大型视觉语言模型(LVLMs)引起了广泛关注。尽管性能优越,但现有研究主要关注提高LVLMs的有用性,而没有仔细考虑LVLMs生成的响应的可靠性。然而,最近的研究已经观察到LVLMs存在严重的错觉,即LVLMs可能会生成给定图像中不存在的信息,这可能是由于在视觉指令调优过程中训练不足导致的。图1(a)提供了一个典型的例子,其中LaVA[2]模型认为该位置很繁忙,仅仅因为它识别这是一个有几个人存在的...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/2d932d2016fe148d5e091a648f74d89f.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/2d932d2016fe148d5e091a648f74d89f.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732527451,"updateTime":1732527451,"viewCount":77,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17353,"tagName":"框架","id":17353,"name":"框架"},{"tagId":17360,"tagName":"量化","id":17360,"name":"量化"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"}]}},{"id":10831,"type":40,"actionInfo":{"articleId":2470829,"articleOwnerUid":11196058,"title":"推理加速策略对 LLMs 偏差的影响 !"},"createTime":1732527385,"detail":{"id":2470829,"articleId":2470829,"title":"推理加速策略对 LLMs 偏差的影响 !","content":"","plain":"","brief":"","summary":"现代的LLM(如LLaMA和GPT-4)展示了惊人的语言生成能力,导致它们的受欢迎程度和采用率激增。然而,由于它们的巨大大小,部署这些模型可能会具有挑战性,甚至...","abstract":"现代的LLM(如LLaMA和GPT-4)展示了惊人的语言生成能力,导致它们的受欢迎程度和采用率激增。然而,由于它们的巨大大小,部署这些模型可能会具有挑战性,甚至...","posterSummary":"现代的LLM(如LLaMA和GPT-4)展示了惊人的语言生成能力,导致它们的受欢迎程度和采用率激增。然而,由于它们的巨大大小,部署这些模型可能会具有挑战性,甚至对于消费级设备来说可能不可行。大量的研究提出了加速策略,如量化剪枝,以实现高效推理。这些策略的目标通常是在保持预测性能的同时减少模型大小。随着时间的推移,这些策略越来越普遍,并集成到了流行的库(如HuggingFace)和库(如vLLM)中...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/a2114433dc5cdc9305d73c0cf88191a5.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/a2114433dc5cdc9305d73c0cf88191a5.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732527385,"updateTime":1732527385,"viewCount":60,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":10670,"tagName":"缓存","id":10670,"name":"缓存"},{"tagId":17360,"tagName":"量化","id":17360,"name":"量化"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"}]}},{"id":10830,"type":40,"actionInfo":{"articleId":2470828,"articleOwnerUid":11196058,"title":"MEDGO:一个中文医疗大语言模型,已在在上海东方医院落地 !"},"createTime":1732527331,"detail":{"id":2470828,"articleId":2470828,"title":"MEDGO:一个中文医疗大语言模型,已在在上海东方医院落地 !","content":"","plain":"","brief":"","summary":"医疗服务对每个人的福祉至关重要,在保障人类生命和健康方面发挥关键作用,并在改善人们整体健康状况方面具有决定性价值。然而,医疗保健领域面临几个关键挑战。一个重要问...","abstract":"医疗服务对每个人的福祉至关重要,在保障人类生命和健康方面发挥关键作用,并在改善人们整体健康状况方面具有决定性价值。然而,医疗保健领域面临几个关键挑战。一个重要问...","posterSummary":"医疗服务对每个人的福祉至关重要,在保障人类生命和健康方面发挥关键作用,并在改善人们整体健康状况方面具有决定性价值。然而,医疗保健领域面临几个关键挑战。一个重要问题是不同地区医疗服务质量的巨大差异,限制了患者获得持续、高质量医疗保健的途径。这种区域差异因医疗专业行人显著短缺和分布不均而加剧。熟练医疗行人的短缺尤其严重,尤其是在资源有限的偏远地区和基层卫生保健设施。这些挑战严重影响医疗服务的获得性和公...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/37884a5de7e967c97dcd81725ef2a76a.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/37884a5de7e967c97dcd81725ef2a76a.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732527331,"updateTime":1732527332,"viewCount":202,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17554,"tagName":"优化","id":17554,"name":"优化"},{"tagId":10570,"tagName":"医疗","id":10570,"name":"医疗"}]}},{"id":10828,"type":40,"actionInfo":{"articleId":2469026,"articleOwnerUid":11196058,"title":"你永远不知道 : 量化导致视觉-语言基础模型的不一致偏差 !"},"createTime":1732187923,"detail":{"id":2469026,"articleId":2469026,"title":"你永远不知道 : 量化导致视觉-语言基础模型的不一致偏差 !","content":"","plain":"","brief":"","summary":"量化(Gholami等人,2022年)是压缩深度学习模型的领先实践:它将模型的参数表示从32位浮点数转换为较低的字节宽度(例如,8位或4位整数),从而显著减少内...","abstract":"量化(Gholami等人,2022年)是压缩深度学习模型的领先实践:它将模型的参数表示从32位浮点数转换为较低的字节宽度(例如,8位或4位整数),从而显著减少内...","posterSummary":"量化(Gholami等人,2022年)是压缩深度学习模型的领先实践:它将模型的参数表示从32位浮点数转换为较低的字节宽度(例如,8位或4位整数),从而显著减少内存占用和推理延迟。然而,这些在数字表示上的转换可能会引入模型参数值的一小数值扰动,可能导致在量化后模型出现不期望的行为。在本文中,作者研究了量化对基础视觉语言(ViL)模型公平结果的影响。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/ed8090d57432bc811d1ad4472e55e2d2.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/ed8090d57432bc811d1ad4472e55e2d2.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732187923,"updateTime":1732187924,"viewCount":75,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17535,"tagName":"压缩","id":17535,"name":"压缩"},{"tagId":17302,"tagName":"基础","id":17302,"name":"基础"},{"tagId":17360,"tagName":"量化","id":17360,"name":"量化"}]}},{"id":10827,"type":40,"actionInfo":{"articleId":2469025,"articleOwnerUid":11196058,"title":"增强无界 3D 高斯扫描与视图一致的 2D 扩散优先级,3DGS-Enhancer在NVS增强中的先进效果 !"},"createTime":1732187900,"detail":{"id":2469025,"articleId":2469025,"title":"增强无界 3D 高斯扫描与视图一致的 2D 扩散优先级,3DGS-Enhancer在NVS增强中的先进效果 !","content":"","plain":"","brief":"","summary":"新视角合成(NVS)在计算机视觉和图形学领域有着数十年的历史,旨在从多个输入图像或视频中生成场景的视角。最近,三维高斯分裂(3DGS)[18]在生成具有高效渲染...","abstract":"新视角合成(NVS)在计算机视觉和图形学领域有着数十年的历史,旨在从多个输入图像或视频中生成场景的视角。最近,三维高斯分裂(3DGS)[18]在生成具有高效渲染...","posterSummary":"新视角合成(NVS)在计算机视觉和图形学领域有着数十年的历史,旨在从多个输入图像或视频中生成场景的视角。最近,三维高斯分裂(3DGS)[18]在生成具有高效渲染管线的照片级渲染方面表现出色。然而,在远离现有视角渲染高质量的新视角仍然具有很大的挑战性,这在稀疏视图设置中经常遇到,因为低采样区域的信息不足。如图1所示,当只有三个输入视图时,会出现明显的椭球形和空洞状的伪影。由于实际中存在这些低质量的渲...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/9b2f9b1b5bb2421f8aaee846872e6cf9.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/9b2f9b1b5bb2421f8aaee846872e6cf9.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1732187900,"updateTime":1732187900,"viewCount":110,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17431,"tagName":"视频","id":17431,"name":"视频"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":10576,"tagName":"渲染","id":10576,"name":"渲染"}]}},{"id":10825,"type":40,"actionInfo":{"articleId":2466687,"articleOwnerUid":11196058,"title":"DocEdit-v2:通过多模态 LLM 基础进行文档结构编辑 !"},"createTime":1731991269,"detail":{"id":2466687,"articleId":2466687,"title":"DocEdit-v2:通过多模态 LLM 基础进行文档结构编辑 !","content":"","plain":"","brief":"","summary":"数字文档广泛用于交流、信息传播和提高工作效率。语言引导文档编辑涉及根据用户与文档空间对齐、组件放置、区域分组、替换、调整大小、拆分、合并以及应用特殊效果的开放式...","abstract":"数字文档广泛用于交流、信息传播和提高工作效率。语言引导文档编辑涉及根据用户与文档空间对齐、组件放置、区域分组、替换、调整大小、拆分、合并以及应用特殊效果的开放式...","posterSummary":"数字文档广泛用于交流、信息传播和提高工作效率。语言引导文档编辑涉及根据用户与文档空间对齐、组件放置、区域分组、替换、调整大小、拆分、合并以及应用特殊效果的开放式请求修改文档的文本、视觉和结构组件。文档编辑本质上是一项生成性任务,因为它涉及从现有文档创建一个新的编辑输出。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/9bfdd26674bef72f37ab7215650e5349.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/9bfdd26674bef72f37ab7215650e5349.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1731991269,"updateTime":1731991269,"viewCount":64,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17202,"tagName":"布局","id":17202,"name":"布局"},{"tagId":17302,"tagName":"基础","id":17302,"name":"基础"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17917,"tagName":"LLM","id":17917,"name":"LLM"}]}},{"id":10824,"type":40,"actionInfo":{"articleId":2466686,"articleOwnerUid":11196058,"title":"西湖大学 & 苏大提出 PiTe | 大型视频语言模型的空间与时间维度下的精细对齐研究 !"},"createTime":1731991131,"detail":{"id":2466686,"articleId":2466686,"title":"西湖大学 & 苏大提出 PiTe | 大型视频语言模型的空间与时间维度下的精细对齐研究 !","content":"","plain":"","brief":"","summary":"大型语言模型(LLMs)在AI领域迅速获得了 popularity ,展示了惊人的在各种自然语言任务上的能力。LLMs 强大的语言理解能力促使研究行人探索其在解...","abstract":"大型语言模型(LLMs)在AI领域迅速获得了 popularity ,展示了惊人的在各种自然语言任务上的能力。LLMs 强大的语言理解能力促使研究行人探索其在解...","posterSummary":"大型语言模型(LLMs)在AI领域迅速获得了 popularity ,展示了惊人的在各种自然语言任务上的能力。LLMs 强大的语言理解能力促使研究行人探索其在解决更广泛跨领域的任务中的实用性。因此,越来越多的研究专注于开发全面的 Large Visual-Language Models(LVLMs)以解决零样本设置下的视觉相关任务,特别是在视频理解方面。通用 Large Video-Languag...","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/286674d7df95221a2d5904def3acaacd.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/286674d7df95221a2d5904def3acaacd.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1731991131,"updateTime":1731991132,"viewCount":100,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17429,"tagName":"事件","id":17429,"name":"事件"},{"tagId":17431,"tagName":"视频","id":17431,"name":"视频"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"}]}},{"id":10822,"type":40,"actionInfo":{"articleId":2465931,"articleOwnerUid":11196058,"title":"对话生成与总结任务的效率优化,LLMR在NLP任务中的性能优势分析 !"},"createTime":1731649957,"detail":{"id":2465931,"articleId":2465931,"title":"对话生成与总结任务的效率优化,LLMR在NLP任务中的性能优势分析 !","content":"","plain":"","brief":"","summary":"大型语言模型(LLMs)已在各种文本生成任务上取得了显著的性能,如摘要(Ahmed和Devanbu,2022)、对话系统。此外,这种方法可以实现零样本学习,即用...","abstract":"大型语言模型(LLMs)已在各种文本生成任务上取得了显著的性能,如摘要(Ahmed和Devanbu,2022)、对话系统。此外,这种方法可以实现零样本学习,即用...","posterSummary":"大型语言模型(LLMs)已在各种文本生成任务上取得了显著的性能,如摘要(Ahmed和Devanbu,2022)、对话系统。此外,这种方法可以实现零样本学习,即用户输入一个自然语言提示并使LLM为任务生成所期望的输出(Brown等人,2020)。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/f631f725a7ade4e5f49b96e01139bafe.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/f631f725a7ade4e5f49b96e01139bafe.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1731649957,"updateTime":1731649957,"viewCount":79,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17516,"tagName":"效率","id":17516,"name":"效率"},{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17554,"tagName":"优化","id":17554,"name":"优化"},{"tagId":14918,"tagName":"nlp","id":14918,"name":"nlp"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"}]}},{"id":10821,"type":40,"actionInfo":{"articleId":2465701,"articleOwnerUid":11196058,"title":"TransformerRanker 高效地为下游分类任务找到最适合的语言模型的工具 !"},"createTime":1731578255,"detail":{"id":2465701,"articleId":2465701,"title":"TransformerRanker 高效地为下游分类任务找到最适合的语言模型的工具 !","content":"","plain":"","brief":"","summary":"自然语言处理(NLP)中的分类任务通常是通过从模型仓库中选择预训练语言模型(PLM)并对其进行微调来解决的。然而,现有的大量的PLM使得从模型仓库中选择最佳性能...","abstract":"自然语言处理(NLP)中的分类任务通常是通过从模型仓库中选择预训练语言模型(PLM)并对其进行微调来解决的。然而,现有的大量的PLM使得从模型仓库中选择最佳性能...","posterSummary":"自然语言处理(NLP)中的分类任务通常是通过从模型仓库中选择预训练语言模型(PLM)并对其进行微调来解决的。然而,现有的大量的PLM使得从模型仓库中选择最佳性能的PLM成为实际挑战。作者将介绍TransformerRanker,一种轻量级的库,能够有效地对分类任务中的PLM进行排名,且不需要进行计算成本高昂的再训练。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/73ce622793a8aa9167c35fa82c056580.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/73ce622793a8aa9167c35fa82c056580.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1731578255,"updateTime":1731578256,"viewCount":77,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17525,"tagName":"性能","id":17525,"name":"性能"},{"tagId":17276,"tagName":"工具","id":17276,"name":"工具"},{"tagId":17284,"tagName":"工作","id":17284,"name":"工作"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"}]}},{"id":10820,"type":40,"actionInfo":{"articleId":2465484,"articleOwnerUid":11196058,"title":"BERT 与 CoCoLoFa 联手:提高逻辑谬误检测的准确性与效率 !"},"createTime":1731494799,"detail":{"id":2465484,"articleId":2465484,"title":"BERT 与 CoCoLoFa 联手:提高逻辑谬误检测的准确性与效率 !","content":"","plain":"","brief":"","summary":"逻辑谬误是削弱论点有效性的推理错误。类似于滑坡谬误或虚假二选一的逻辑谬误会降低在线讨论的质量并使论点显得更有疑点,促进错误信息的传播(Jin等人,2022年)。...","abstract":"逻辑谬误是削弱论点有效性的推理错误。类似于滑坡谬误或虚假二选一的逻辑谬误会降低在线讨论的质量并使论点显得更有疑点,促进错误信息的传播(Jin等人,2022年)。...","posterSummary":"逻辑谬误是削弱论点有效性的推理错误。类似于滑坡谬误或虚假二选一的逻辑谬误会降低在线讨论的质量并使论点显得更有疑点,促进错误信息的传播(Jin等人,2022年)。自动检测文本中的逻辑谬误将帮助用户识别论点缺陷。然而,在自然环境中自动识别这些谬误并不容易。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/d390e5e374cff0a2e136a9d9fc5e50f6.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/d390e5e374cff0a2e136a9d9fc5e50f6.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1731494799,"updateTime":1731494799,"viewCount":70,"likeCount":0,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17284,"tagName":"工作","id":17284,"name":"工作"},{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17516,"tagName":"效率","id":17516,"name":"效率"},{"tagId":17205,"tagName":"测试","id":17205,"name":"测试"}]}},{"id":10819,"type":40,"actionInfo":{"articleId":2465196,"articleOwnerUid":11196058,"title":"ProLIP模型:首个使用概率目标进行亿级图像-文本数据预训练的VLMs !"},"createTime":1731402116,"detail":{"id":2465196,"articleId":2465196,"title":"ProLIP模型:首个使用概率目标进行亿级图像-文本数据预训练的VLMs !","content":"","plain":"","brief":"","summary":"视觉语言模型(VLMs)旨在实现视觉和语言的联合嵌入空间,已成为近年来机器学习领域的重要基石。在训练过程中,VLMs通过对比学习将一对对齐的图像-文本(例如,一...","abstract":"视觉语言模型(VLMs)旨在实现视觉和语言的联合嵌入空间,已成为近年来机器学习领域的重要基石。在训练过程中,VLMs通过对比学习将一对对齐的图像-文本(例如,一...","posterSummary":"视觉语言模型(VLMs)旨在实现视觉和语言的联合嵌入空间,已成为近年来机器学习领域的重要基石。在训练过程中,VLMs通过对比学习将一对对齐的图像-文本(例如,一张图像及其对应的描述)映射到相同的空间。从大规模图像-文本对齐数据集中学习到的丰富的联合表示在各种下游任务上取得了显著的成功,例如零样本分类(将类标签视为模版文本,例如,一张照片)或图像-文本跨模态检索。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/e1e00bf9d15756a1d3613ac36d445112.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/e1e00bf9d15756a1d3613ac36d445112.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1731402116,"updateTime":1731402116,"viewCount":90,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17187,"tagName":"遍历","id":17187,"name":"遍历"},{"tagId":17210,"tagName":"产品","id":17210,"name":"产品"},{"tagId":17290,"tagName":"函数","id":17290,"name":"函数"}]}},{"id":10818,"type":40,"actionInfo":{"articleId":2464908,"articleOwnerUid":11196058,"title":"LLM 视觉语言模型在动作识别中是否有效?"},"createTime":1731325708,"detail":{"id":2464908,"articleId":2464908,"title":"LLM 视觉语言模型在动作识别中是否有效?","content":"","plain":"","brief":"","summary":"近年来,得益于大规模预训练,许多视觉语言基础模型在许多下游视觉任务上取得了显著成果,展现出了强大的泛化能力。其中,具有视觉语言预训练的模型,如 CLIP 及其在...","abstract":"近年来,得益于大规模预训练,许多视觉语言基础模型在许多下游视觉任务上取得了显著成果,展现出了强大的泛化能力。其中,具有视觉语言预训练的模型,如 CLIP 及其在...","posterSummary":"近年来,得益于大规模预训练,许多视觉语言基础模型在许多下游视觉任务上取得了显著成果,展现出了强大的泛化能力。其中,具有视觉语言预训练的模型,如 CLIP 及其在视频任务上的成功继承者,彻底改变了众多下游任务,展示了前所未有的灵活性和性能。","description":"","picture":"https://developer.qcloudimg.com/http-save/yehe-11196058/7ed2d6597d081b73c7784cc09ec76408.jpg","coverImageUrl":"https://developer.qcloudimg.com/http-save/yehe-11196058/7ed2d6597d081b73c7784cc09ec76408.jpg","sourceType":99,"sourceDetail":{},"channelType":4,"channelDetail":{},"authorId":11196058,"columnId":103205,"columnIds":[],"writeTime":1731325708,"updateTime":1731325708,"viewCount":104,"likeCount":1,"commentCount":0,"favorCount":0,"weight":0,"status":2,"draftId":0,"tagIds":[],"isCommentEnable":true,"highQuality":false,"skipAds":false,"showAds":false,"focusRead":false,"publishTime":null,"editTime":null,"isCloseTextLink":false,"author":{"id":11196058,"uid":11196058,"name":"AIGC 先锋科技","label":"","avatar":"https://developer.qcloudimg.com/http-save/10011/3d452012fe48ee96c4eca96af288128e.jpg","company":"","title":"","region":1,"jobType":1,"specialityIds":[],"specialities":[],"gender":1,"growthLevel":0,"isProfessionVerified":false,"upvoteCount":0,"followingCount":0,"followerCount":0,"questionCount":0,"answerCount":0,"followQuestionCount":0,"followTagCount":0,"favorAnswerCount":0,"beHandPickedCount":0,"followColumnCount":0,"articleCount":0,"validArticleCount":0},"tags":[{"tagId":17381,"tagName":"模型","id":17381,"name":"模型"},{"tagId":17431,"tagName":"视频","id":17431,"name":"视频"},{"tagId":17440,"tagName":"数据","id":17440,"name":"数据"},{"tagId":17917,"tagName":"LLM","id":17917,"name":"LLM"},{"tagId":17302,"tagName":"基础","id":17302,"name":"基础"}]}}],"total":198},"column":{"pageStatus":"none","fetchStatus":"none","dataList":[]},"inventories":{"pageStatus":"none","createdData":{"fetchStatus":"none","pageNumber":1,"pageSize":12,"total":0,"list":[]},"favoredData":{"sortType":"lastFavTime","fetchStatus":"none","pageNumber":1,"pageSize":12,"total":0,"list":[]},"hotRankData":{"fetchStatus":"none","pageNumber":1,"pageSize":3,"total":0,"list":[]}},"articles":{"pageStatus":"none","columnsData":{"fetchStatus":"none","pageNumber":1,"pageSize":20,"total":0,"list":[]},"articlesData":{"fetchStatus":"none","currentPage":1,"pageSize":20,"total":0,"list":[]}},"questions":{"pageStatus":"none","fetchStatus":"none","currentPage":1,"pageSize":20,"dataList":[],"total":0},"salons":{"pageStatus":"none","joinedSalonsData":{"fetchStatus":"none","currentPage":1,"pageSize":20,"total":0,"list":[]}},"video":{"pageStatus":"none","list":[],"fetchStatus":"none","pageNumber":1,"pageSize":20,"total":0,"recommendList":[],"recommendListFetchStatus":"none","albumData":{"fetchStatus":"none","pageNumber":1,"pageSize":3,"total":0,"list":[]}},"competition":{"pageStatus":"none","list":[],"fetchStatus":"none","pageNumber":1,"pageSize":20,"total":0},"followingUsers":{"pageStatus":"none","fetchStatus":"none","currentPage":1,"pageSize":20,"dataList":[],"total":0},"followers":{"pageStatus":"none","fetchStatus":"none","currentPage":1,"pageSize":20,"dataList":[],"total":0},"followingColumns":{"pageStatus":"none","fetchStatus":"none","currentPage":1,"pageSize":20,"dataList":[],"total":0},"followingTags":{"pageStatus":"none","followingTagsData":{"fetchStatus":"none","currentPage":1,"pageSize":20,"list":[],"total":0}},"followingQuestions":{"pageStatus":"none","fetchStatus":"none","currentPage":1,"pageSize":20,"dataList":[],"total":0},"followingTeams":{"pageStatus":"none","followingTeamsData":{"fetchStatus":"none","list":[]}},"favorAnswers":{"pageStatus":"none","fetchStatus":"none","currentPage":1,"pageSize":20,"dataList":[],"total":0},"favorArticles":{"pageStatus":"none","fetchStatus":"none","currentPage":1,"pageSize":20,"dataList":[],"total":0},"level":{"pageStatus":"none","growthPlanning":{"fetchStatus":"none","levelGrowthMap":{},"userGrowth":0,"userLevel":0,"awardConfig":{},"visibleBubbleIndex":0},"levelDetail":{"fetchStatus":"none","sourceType":0,"pageNumber":1,"pageSize":10,"total":0,"list":[]}},"profile":{"pageStatus":"none","fetchStatus":"none","tagList":[]},"learning":{"pageStatus":"none","fetchStatus":"none"},"currentUserId":11196058,"currentRoute":"","env":"production","documentBaseTitle":"腾讯云开发者社区-腾讯云","cdnDomain":"cloudcache.tencent-cloud.cn","cssDomain":"cloudcache.tencent-cloud.cn","qcloudDomain":"cloud.tencent.com","consoleDomain":"console.cloud.tencent.com","qcommunity_identify_id":"GanDbwbXqHTMrXlpy-5dM","session":{"isLogined":false,"isQcloudUser":false,"isOwner":false,"nickname":"","accountInfoCompleted":false,"phoneCompleted":false,"profile":{},"contactPhoneCompleted":false,"userInfo":{}},"pvId":"fAaLbd2Qcu4VB1lJn4LOo","userIp":"8.222.208.146","fromMiniProgram":false,"route":{"url":"/developer/user/11196058","path":"/developer/user/11196058","pathname":"/developer/user/11196058","search":null,"query":{},"segments":["developer","user","11196058"]}}); </script><script class=""> if (!Element.prototype.matches) Element.prototype.matches = Element.prototype.msMatchesSelector || Element.prototype.webkitMatchesSelector; if (!Element.prototype.closest) Element.prototype.closest = function(s) { var el = this; if (!document.documentElement.contains(el)) return null; do { if (el.matches(s)) return el; el = el.parentElement; } while (el !== null); return null; }; window.addEventListener('mouseover', function(evt) { const target = evt.target; if (!target) { return; } const aEle = target.closest('a'); if (!aEle) { return; } let href = aEle.getAttribute('href'); if (!href) { return; } href = href.replace(/cloud.tencent.com.cn|cloud.tencent.com|cloud.tencent.cn/g, 'cloud.tencent.com'); aEle.setAttribute('href', href); }, true); </script></body></html>