<?xml version="1.0" encoding="UTF-8"?>
<!-- This sitemap was 动态地 generated on 2026年4月4日 at 上午7:45 by All in One SEO v4.3.6.1 - the original SEO plugin for WordPress. -->

<?xml-stylesheet type="text/xsl" href="https://www.bigai.ai/default.xsl"?>

<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
	<channel>
		<title>北京通用人工智能研究院BIGAI</title>
		<link><![CDATA[https://www.bigai.ai]]></link>
		<description><![CDATA[Official Homepage for Beijing Institute for General Artificial Intelligence]]></description>
		<lastBuildDate><![CDATA[Tue, 31 Mar 2026 05:17:26 +0000]]></lastBuildDate>
		<docs>https://validator.w3.org/feed/docs/rss2.html</docs>
		<atom:link href="https://www.bigai.ai/sitemap.rss" rel="self" type="application/rss+xml" />
		<ttl><![CDATA[60]]></ttl>

		<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e7%a7%91%e6%8a%80%e5%88%9b%e6%96%b0%e4%b8%8e%e4%ba%a7%e4%b8%9a%e5%88%9b%e6%96%b0%e6%b7%b1%e5%ba%a6%e8%9e%8d%e5%90%88%ef%bc%8c2026%e9%80%9a%e7%94%a8%e4%ba%ba%e5%b7%a5%e6%99%ba%e8%83%bd%e8%ae%ba/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e7%a7%91%e6%8a%80%e5%88%9b%e6%96%b0%e4%b8%8e%e4%ba%a7%e4%b8%9a%e5%88%9b%e6%96%b0%e6%b7%b1%e5%ba%a6%e8%9e%8d%e5%90%88%ef%bc%8c2026%e9%80%9a%e7%94%a8%e4%ba%ba%e5%b7%a5%e6%99%ba%e8%83%bd%e8%ae%ba/]]></link>
			<title>科技创新与产业创新深度融合，2026通用人工智能论坛成功召开</title>
			<pubDate><![CDATA[Tue, 31 Mar 2026 05:17:26 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/narrativeloom-enhancing-creative-storytelling-through-multi-persona-collaborative-improvisation/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/narrativeloom-enhancing-creative-storytelling-through-multi-persona-collaborative-improvisation/]]></link>
			<title>NarrativeLoom: Enhancing Creative Storytelling through Multi-Persona Collaborative Improvisation</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:55:46 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/read-the-room-video-social-reasoning-with-mental-physical-causal-chains/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/read-the-room-video-social-reasoning-with-mental-physical-causal-chains/]]></link>
			<title>Read the Room: Video Social Reasoning with Mental-Physical Causal Chains</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:47:54 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/scenecot-eliciting-chain-of-thought-reasoning-in-3d-scenes/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/scenecot-eliciting-chain-of-thought-reasoning-in-3d-scenes/]]></link>
			<title>SceneCOT: Eliciting Chain-of-Thought Reasoning in 3D Scenes</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:46:46 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/g4splat-geometry-guided-gaussian-splatting-with-generative-prior/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/g4splat-geometry-guided-gaussian-splatting-with-generative-prior/]]></link>
			<title>G4Splat: Geometry-Guided Gaussian Splatting with Generative Prior</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:45:50 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/linking-process-to-outcome-conditonal-reward-modeling-for-llm-reasoning/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/linking-process-to-outcome-conditonal-reward-modeling-for-llm-reasoning/]]></link>
			<title>Linking Process to Outcome: Conditonal Reward Modeling for LLM Reasoning</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:44:48 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/23/cross-robot-behavior-adaptation-through-intention-alignment/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/23/cross-robot-behavior-adaptation-through-intention-alignment/]]></link>
			<title>Cross-Robot Behavior Adaptation through Intention Alignment</title>
			<pubDate><![CDATA[Mon, 23 Mar 2026 07:15:41 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e7%a0%94%e7%a9%b6%e6%88%90%e6%9e%9c%e7%99%bb%e3%80%8ascience-robotics%e3%80%8b%ef%bc%9a%e9%a6%96%e6%ac%a1%e5%ae%9e%e7%8e%b0%e5%bc%82%e6%9e%84%e6%9c%ba%e5%99%a8%e4%ba%ba/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e7%a0%94%e7%a9%b6%e6%88%90%e6%9e%9c%e7%99%bb%e3%80%8ascience-robotics%e3%80%8b%ef%bc%9a%e9%a6%96%e6%ac%a1%e5%ae%9e%e7%8e%b0%e5%bc%82%e6%9e%84%e6%9c%ba%e5%99%a8%e4%ba%ba/]]></link>
			<title>通研院研究成果登《Science Robotics》：首次实现异构机器人团队之间基于意图理解的自适应模仿与协作</title>
			<pubDate><![CDATA[Fri, 20 Mar 2026 14:04:19 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e4%ba%a7%e5%93%81%e8%83%bd%e5%8a%9b%e9%a2%86%e8%b7%91%e5%85%a8%e7%90%83%ef%bc%81%e9%80%9a%e7%a0%94%e9%99%a2-tongagents-%e7%99%bb%e4%b8%8a%e5%a4%9a%e9%a1%b9%e5%9b%bd%e9%99%85%e6%99%ba%e8%83%bd/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e4%ba%a7%e5%93%81%e8%83%bd%e5%8a%9b%e9%a2%86%e8%b7%91%e5%85%a8%e7%90%83%ef%bc%81%e9%80%9a%e7%a0%94%e9%99%a2-tongagents-%e7%99%bb%e4%b8%8a%e5%a4%9a%e9%a1%b9%e5%9b%bd%e9%99%85%e6%99%ba%e8%83%bd/]]></link>
			<title>产品能力领跑全球！通研院 TongAgents 登上多项国际智能体榜单</title>
			<pubDate><![CDATA[Fri, 20 Mar 2026 13:55:04 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/aegis-automated-error-generation-and-identification-for-multi-agent-systems/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/aegis-automated-error-generation-and-identification-for-multi-agent-systems/]]></link>
			<title>Aegis: Automated Error Generation and Identification for Multi-Agent Systems</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:43:46 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/dexmove-learning-tactile-guided-non-prehensile-manipulation-with-dexterous-hands/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/dexmove-learning-tactile-guided-non-prehensile-manipulation-with-dexterous-hands/]]></link>
			<title>DexMove: Learning Tactile-Guided Non-Prehensile Manipulation with Dexterous Hands</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:42:38 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e6%99%ba%e9%80%9a%e6%9c%aa%e6%9d%a5-%e8%83%bd%e8%80%85%e8%81%9a%e9%a6%96%ef%bc%9a%e7%ac%ac%e5%8d%81%e5%b1%8a%e5%8c%97%e5%a4%a7%e6%99%ba%e8%83%bd%e9%9d%92%e5%b9%b4%e8%ae%ba%e5%9d%9b/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e6%99%ba%e9%80%9a%e6%9c%aa%e6%9d%a5-%e8%83%bd%e8%80%85%e8%81%9a%e9%a6%96%ef%bc%9a%e7%ac%ac%e5%8d%81%e5%b1%8a%e5%8c%97%e5%a4%a7%e6%99%ba%e8%83%bd%e9%9d%92%e5%b9%b4%e8%ae%ba%e5%9d%9b/]]></link>
			<title>智通未来 能者聚首：第十届北大智能青年论坛</title>
			<pubDate><![CDATA[Tue, 16 Dec 2025 07:45:30 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/12/03/generating-objects-with-part-articulation-from-a-single-image/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/12/03/generating-objects-with-part-articulation-from-a-single-image/]]></link>
			<title>Generating Objects with Part-Articulation from a Single Image</title>
			<pubDate><![CDATA[Wed, 03 Dec 2025 05:54:16 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/towards-brigding-the-gap-between-large-scale-pretraining-and-efficient-finetuning-for-humanoid-control/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/towards-brigding-the-gap-between-large-scale-pretraining-and-efficient-finetuning-for-humanoid-control/]]></link>
			<title>Towards Brigding the Gap Between Large-scale Pretraining and Efficient Finetuning for Humanoid Control</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:41:16 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/stvg-r1-incentivizing-instance-level-reasoning-and-grounding-in-videos-via-reinforcement-learning/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/stvg-r1-incentivizing-instance-level-reasoning-and-grounding-in-videos-via-reinforcement-learning/]]></link>
			<title>STVG-R1: Incentivizing Instance-Level Reasoning and Grounding in Videos via Reinforcement Learning</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:40:16 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/mvrmulti-view-video-reward-shaping-for-reinforcement-learning/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/mvrmulti-view-video-reward-shaping-for-reinforcement-learning/]]></link>
			<title>MVR:Multi-view Video Reward Shaping for Reinforcement Learning</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:39:07 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/milr-improving-multimodal-image-generation-via-test-time-latent-reasoning/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/milr-improving-multimodal-image-generation-via-test-time-latent-reasoning/]]></link>
			<title>MILR: Improving Multimodal Image Generation via Test-Time Latent Reasoning</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:38:07 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/when-large-multimodal-models-confront-evolving-knowledge-challenges-and-explorations/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/when-large-multimodal-models-confront-evolving-knowledge-challenges-and-explorations/]]></link>
			<title>When Large Multimodal Models Confront Evolving Knowledge: Challenges and Explorations</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:36:49 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/learning-what-matters-now-dynamic-preference-inference-under-contextual-shifts/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/learning-what-matters-now-dynamic-preference-inference-under-contextual-shifts/]]></link>
			<title>Learning What Matters Now: Dynamic Preference Inference under Contextual Shifts</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:35:35 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/rulereasoner-reinforced-rule-based-reasoning-via-domain-aware-dynamic-sampling/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/rulereasoner-reinforced-rule-based-reasoning-via-domain-aware-dynamic-sampling/]]></link>
			<title>RuleReasoner: Reinforced Rule-based Reasoning via Domain-aware Dynamic Sampling</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:34:20 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/policon-evaluating-llms-on-achieving-diverse-political-consensus-objectives/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/policon-evaluating-llms-on-achieving-diverse-political-consensus-objectives/]]></link>
			<title>PoliCon: Evaluating LLMs on Achieving Diverse Political Consensus Objectives</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:33:09 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/proposing-and-solving-olympiad-geometry-with-guided-tree-search/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/proposing-and-solving-olympiad-geometry-with-guided-tree-search/]]></link>
			<title>Proposing and solving olympiad geometry with guided tree search</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:31:43 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/tongui-building-generalized-gui-agents-by-learning-from-multimodal-web-tutorials/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/tongui-building-generalized-gui-agents-by-learning-from-multimodal-web-tutorials/]]></link>
			<title>TongUI: Building Generalized GUI Agents by Learning from Multimodal Web Tutorials</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:27:41 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/12/02/a-vr-based-robotic-teleoperation-system-with-haptic-feedback-and-adaptive-collision-avoidance/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/12/02/a-vr-based-robotic-teleoperation-system-with-haptic-feedback-and-adaptive-collision-avoidance/]]></link>
			<title>A VR-Based Robotic Teleoperation System With Haptic Feedback and Adaptive Collision Avoidance</title>
			<pubDate><![CDATA[Tue, 02 Dec 2025 07:33:22 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/3d-scene-change-modeling-with-consistent-multi-view-aggregation/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/3d-scene-change-modeling-with-consistent-multi-view-aggregation/]]></link>
			<title>3D Scene Change Modeling With Consistent Multi-View Aggregation</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:31:59 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e3%80%81%e5%ae%87%e6%a0%91%e6%8f%90%e5%87%ba%e9%a6%96%e4%b8%aa%e6%94%bb%e5%85%8b%e6%b3%9b%e5%8c%96%e5%a3%81%e5%9e%92%e7%9a%84%e9%80%9a%e7%94%a8%e8%bf%90%e5%8a%a8%e6%8e%a7/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e3%80%81%e5%ae%87%e6%a0%91%e6%8f%90%e5%87%ba%e9%a6%96%e4%b8%aa%e6%94%bb%e5%85%8b%e6%b3%9b%e5%8c%96%e5%a3%81%e5%9e%92%e7%9a%84%e9%80%9a%e7%94%a8%e8%bf%90%e5%8a%a8%e6%8e%a7/]]></link>
			<title>通研院、宇树提出首个攻克泛化壁垒的通用运动控制框架OmniXtreme：让机器人解锁“托马斯全旋”等极限动作</title>
			<pubDate><![CDATA[Thu, 05 Mar 2026 02:00:57 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/12/02/m3bench-benchmarking-whole-body-motion-generation-for-mobile-manipulation-in-3d-scenes/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/12/02/m3bench-benchmarking-whole-body-motion-generation-for-mobile-manipulation-in-3d-scenes/]]></link>
			<title>M3Bench: Benchmarking Whole-Body Motion Generation for Mobile Manipulation in 3D Scenes</title>
			<pubDate><![CDATA[Tue, 02 Dec 2025 07:31:51 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2ai%e5%85%a8%e8%83%bd%e7%89%a9%e7%90%86%e7%a9%ba%e9%97%b4%e8%ae%ad%e7%bb%83%e5%9c%batongsim%e5%bc%80%e6%ba%90%e4%b8%8a%e7%ba%bf%ef%bc%8c%e4%b8%a4%e5%a4%a9/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2ai%e5%85%a8%e8%83%bd%e7%89%a9%e7%90%86%e7%a9%ba%e9%97%b4%e8%ae%ad%e7%bb%83%e5%9c%batongsim%e5%bc%80%e6%ba%90%e4%b8%8a%e7%ba%bf%ef%bc%8c%e4%b8%a4%e5%a4%a9/]]></link>
			<title>通研院AI“全能物理空间训练场”TongSIM开源上线，两天登上Huggingface热度趋势榜单第一名</title>
			<pubDate><![CDATA[Mon, 29 Dec 2025 07:45:35 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/2026%e4%ba%ba%e5%b7%a5%e6%99%ba%e8%83%bd%e6%95%99%e8%82%b2%e5%88%9b%e6%96%b0%e5%8f%91%e5%b1%95%e8%ae%ba%e5%9d%9b%e6%9a%a8%e6%b5%b7%e6%b7%80%e8%a1%97%e9%81%93%e9%a6%96%e5%b1%8a%e4%ba%a7%e5%ad%a6/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/2026%e4%ba%ba%e5%b7%a5%e6%99%ba%e8%83%bd%e6%95%99%e8%82%b2%e5%88%9b%e6%96%b0%e5%8f%91%e5%b1%95%e8%ae%ba%e5%9d%9b%e6%9a%a8%e6%b5%b7%e6%b7%80%e8%a1%97%e9%81%93%e9%a6%96%e5%b1%8a%e4%ba%a7%e5%ad%a6/]]></link>
			<title>2026人工智能教育创新发展论坛暨海淀街道首届产学结合高校通用人工智能大赛颁奖仪式成功举办</title>
			<pubDate><![CDATA[Fri, 30 Jan 2026 07:01:43 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e3%80%81%e5%8c%97%e5%a4%a7%e7%aa%81%e7%a0%b4%e6%80%a7%e6%88%90%e6%9e%9c%e7%99%bb%e3%80%8a%e8%87%aa%e7%84%b6%c2%b7%e6%9c%ba%e5%99%a8%e6%99%ba%e8%83%bd%e3%80%8b%ef%bc%9a/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e3%80%81%e5%8c%97%e5%a4%a7%e7%aa%81%e7%a0%b4%e6%80%a7%e6%88%90%e6%9e%9c%e7%99%bb%e3%80%8a%e8%87%aa%e7%84%b6%c2%b7%e6%9c%ba%e5%99%a8%e6%99%ba%e8%83%bd%e3%80%8b%ef%bc%9a/]]></link>
			<title>通研院、北大突破性成果登《自然·机器智能》：全球首个具备自主出题与解题能力的奥数几何AI模型</title>
			<pubDate><![CDATA[Fri, 30 Jan 2026 06:59:30 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e6%95%99%e8%82%b2%e9%83%a8%e5%8f%ac%e5%bc%80%e4%bb%a3%e8%a1%a8%e5%a7%94%e5%91%98%e5%ba%a7%e8%b0%88%e4%bc%9a%ef%bc%8c%e6%9c%b1%e6%9d%be%e7%ba%af%e6%8f%90%e5%87%ba%e6%99%ba%e8%83%bd%e6%97%b6%e4%bb%a3/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e6%95%99%e8%82%b2%e9%83%a8%e5%8f%ac%e5%bc%80%e4%bb%a3%e8%a1%a8%e5%a7%94%e5%91%98%e5%ba%a7%e8%b0%88%e4%bc%9a%ef%bc%8c%e6%9c%b1%e6%9d%be%e7%ba%af%e6%8f%90%e5%87%ba%e6%99%ba%e8%83%bd%e6%97%b6%e4%bb%a3/]]></link>
			<title>教育部召开代表委员座谈会，朱松纯提出智能时代“何以为人”的新内涵</title>
			<pubDate><![CDATA[Fri, 30 Jan 2026 06:57:13 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e5%8f%91%e5%b8%83%e8%a1%8c%e4%b8%9a%e6%99%ba%e8%83%bd%e4%bd%93%e5%9f%ba%e5%87%86%ef%bc%8c%e9%87%8d%e6%96%b0%e5%ae%9a%e4%b9%89%e6%95%b0%e5%ad%97%e5%91%98%e5%b7%a5/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e9%80%9a%e7%a0%94%e9%99%a2%e5%8f%91%e5%b8%83%e8%a1%8c%e4%b8%9a%e6%99%ba%e8%83%bd%e4%bd%93%e5%9f%ba%e5%87%86%ef%bc%8c%e9%87%8d%e6%96%b0%e5%ae%9a%e4%b9%89%e6%95%b0%e5%ad%97%e5%91%98%e5%b7%a5/]]></link>
			<title>通研院发布行业智能体基准，重新定义“数字员工”上岗标准</title>
			<pubDate><![CDATA[Fri, 13 Mar 2026 09:39:52 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/news/%e5%85%b1%e7%ad%91%e6%b5%b7%e6%b7%80ai%e6%95%99%e8%82%b2%e7%94%9f%e6%80%81%e5%9c%88%ef%bc%8c%e9%80%9a%e7%a0%94%e9%99%a2%e6%95%99%e8%82%b2%e7%a7%91%e6%8a%80%e6%88%90%e6%9e%9c%e4%ba%ae/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/news/%e5%85%b1%e7%ad%91%e6%b5%b7%e6%b7%80ai%e6%95%99%e8%82%b2%e7%94%9f%e6%80%81%e5%9c%88%ef%bc%8c%e9%80%9a%e7%a0%94%e9%99%a2%e6%95%99%e8%82%b2%e7%a7%91%e6%8a%80%e6%88%90%e6%9e%9c%e4%ba%ae/]]></link>
			<title>共筑“海淀AI教育生态圈”，通研院教育科技成果亮相海淀人工智能教育庙会</title>
			<pubDate><![CDATA[Fri, 06 Feb 2026 08:00:18 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/12/02/integration-of-robot-and-scene-kinematics-for-sequential-mobile-manipulation-planning/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/12/02/integration-of-robot-and-scene-kinematics-for-sequential-mobile-manipulation-planning/]]></link>
			<title>Integration of Robot and Scene Kinematics for Sequential Mobile Manipulation Planning</title>
			<pubDate><![CDATA[Tue, 02 Dec 2025 07:30:30 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/12/02/pr2-a-physics-and-photo-realistic-humanoid-testbed-with-pilot-study-in-competition/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/12/02/pr2-a-physics-and-photo-realistic-humanoid-testbed-with-pilot-study-in-competition/]]></link>
			<title>PR2: A Physics- and Photo-realistic Humanoid Testbed with Pilot Study in Competition</title>
			<pubDate><![CDATA[Tue, 02 Dec 2025 07:28:36 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/adaptive-preference-optimization-with-uncertainty-aware-utility-anchor/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/adaptive-preference-optimization-with-uncertainty-aware-utility-anchor/]]></link>
			<title>Adaptive Preference Optimization with Uncertainty-aware Utility Anchor</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 08:51:56 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/reinforced-query-reasoners-for-reasoning-intensive-retrieval-tasks/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/reinforced-query-reasoners-for-reasoning-intensive-retrieval-tasks/]]></link>
			<title>Reinforced Query Reasoners for Reasoning-intensive Retrieval Tasks</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 08:50:45 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/mind-the-gap-the-divergence-between-human-and-llm-generated-tasks/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/mind-the-gap-the-divergence-between-human-and-llm-generated-tasks/]]></link>
			<title>Mind the Gap: The Divergence Between Human and LLM-Generated Tasks</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:28:50 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/understanding-and-leveraging-the-expert-specialization-of-context-faithfulness-in-mixture-of-experts-llms/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/understanding-and-leveraging-the-expert-specialization-of-context-faithfulness-in-mixture-of-experts-llms/]]></link>
			<title>Understanding and Leveraging the Expert Specialization of Context Faithfulness in Mixture-of-Experts LLMs</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 08:49:38 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/enhancing-llm-based-social-bot-via-an-adversarial-learning-framework/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/enhancing-llm-based-social-bot-via-an-adversarial-learning-framework/]]></link>
			<title>Enhancing LLM-Based Social Bot via an Adversarial Learning Framework</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 08:48:13 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/from-objects-to-anywhere-a-holistic-benchmark-for-multi-level-visual-grounding-in-3d-scenes/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/from-objects-to-anywhere-a-holistic-benchmark-for-multi-level-visual-grounding-in-3d-scenes/]]></link>
			<title>From Objects to Anywhere: A Holistic Benchmark for Multi-level Visual Grounding in 3D Scenes</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 03:04:39 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/iterative-tool-usage-exploration-for-multimodal-agents-via-step-wise-preference-tuning/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/iterative-tool-usage-exploration-for-multimodal-agents-via-step-wise-preference-tuning/]]></link>
			<title>Iterative Tool Usage Exploration for Multimodal Agents via Step-wise Preference Tuning</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 02:59:59 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/all-in-one-3d-scene-synthesis-with-an-extensible-and-self-reflective-agent/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/all-in-one-3d-scene-synthesis-with-an-extensible-and-self-reflective-agent/]]></link>
			<title>All-in-one 3D Scene Synthesis with an Extensible and Self-Reflective Agent</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 02:58:40 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/reasoning-with-exploration-an-entropy-perspective/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/reasoning-with-exploration-an-entropy-perspective/]]></link>
			<title>Reasoning with Exploration: An Entropy Perspective</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:26:32 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/taccel-scaling-up-vision-based-tactile-robotics-via-high-performance-gpu-simulation/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/taccel-scaling-up-vision-based-tactile-robotics-via-high-performance-gpu-simulation/]]></link>
			<title>Taccel: Scaling Up Vision-based Tactile Robotics via High-performance GPU Simulation</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 02:57:36 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/absolute-zero-reinforced-self-play-reasoning-with-zero-data/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/absolute-zero-reinforced-self-play-reasoning-with-zero-data/]]></link>
			<title>Absolute Zero: Reinforced Self-play Reasoning with Zero Data</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 02:53:56 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2026/03/19/adapt-adaptive-decentralized-architecture-with-perception-aligned-training-for-structural-generalization-in-multi-agent-rl/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2026/03/19/adapt-adaptive-decentralized-architecture-with-perception-aligned-training-for-structural-generalization-in-multi-agent-rl/]]></link>
			<title>ADAPT: Adaptive Decentralized Architecture with Perception-aligned Training for Structural Generalization in Multi-Agent RL</title>
			<pubDate><![CDATA[Thu, 19 Mar 2026 09:24:50 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/world-models-should-prioritize-the-unification-of-physical-and-social-dynamics/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/world-models-should-prioritize-the-unification-of-physical-and-social-dynamics/]]></link>
			<title>World Models Should Prioritize the Unification of Physical and Social Dynamics</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 02:52:26 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/blog/2025/11/27/social-world-model-augmented-mechanism-design-policy-learning/]]></guid>
			<link><![CDATA[https://www.bigai.ai/blog/2025/11/27/social-world-model-augmented-mechanism-design-policy-learning/]]></link>
			<title>Social World Model-Augmented Mechanism Design Policy Learning</title>
			<pubDate><![CDATA[Thu, 27 Nov 2025 02:50:41 +0000]]></pubDate>
		</item>
					<item>
			<guid><![CDATA[https://www.bigai.ai/]]></guid>
			<link><![CDATA[https://www.bigai.ai/]]></link>
			<title>主页</title>
			<pubDate><![CDATA[Fri, 30 Jan 2026 07:38:40 +0000]]></pubDate>
		</item>
				</channel>
</rss>
