[{"title":"( 46 个子文件 65.18MB ) ⑩大模型-全套.rar","children":[{"title":"⑤、大模型八股文面试","children":[{"title":"22-检索增强生成(RAG) 优化策略篇.pdf <span style='color:#111;'> 2.64MB </span>","children":null,"spread":false},{"title":"7-相似度函数篇.pdf <span style='color:#111;'> 174.95KB </span>","children":null,"spread":false},{"title":"41-大模型(LLMs)LLM生成SFT数据方法面.pdf <span style='color:#111;'> 731.09KB </span>","children":null,"spread":false},{"title":"15-大模型 RAG 经验面.pdf <span style='color:#111;'> 1.41MB </span>","children":null,"spread":false},{"title":"21-RAG(Retrieval-Augmented Generation)评测面.pdf <span style='color:#111;'> 616.73KB </span>","children":null,"spread":false},{"title":"26-大模型(LLMs)参数高效微调(PEFT) 面.pdf <span style='color:#111;'> 1.52MB </span>","children":null,"spread":false},{"title":"1-大模型(LLMs)基础面.pdf <span style='color:#111;'> 481.56KB </span>","children":null,"spread":false},{"title":"4-Attention 升级面.pdf <span style='color:#111;'> 410.36KB </span>","children":null,"spread":false},{"title":"3-LLMs 激活函数篇.pdf <span style='color:#111;'> 374.98KB </span>","children":null,"spread":false},{"title":"23-大模型(LLMs)RAG —— 关键痛点及对应解决方案.pdf <span style='color:#111;'> 1.32MB </span>","children":null,"spread":false},{"title":"13-基于langchain RAG问答应用实战.pdf <span style='color:#111;'> 375.71KB </span>","children":null,"spread":false},{"title":"11-大模型(LLMs)langchain 面.pdf <span style='color:#111;'> 630.97KB </span>","children":null,"spread":false},{"title":"8-大模型(LLMs)进阶面.pdf <span style='color:#111;'> 1018.61KB </span>","children":null,"spread":false},{"title":"17-大模型(LLMs)RAG 版面分析——表格识别方法篇.pdf <span style='color:#111;'> 661.74KB </span>","children":null,"spread":false},{"title":"40-大模型(LLMs)训练集面.pdf <span style='color:#111;'> 304.68KB </span>","children":null,"spread":false},{"title":"35-大模型(LLMs)评测面.pdf <span style='color:#111;'> 252.63KB </span>","children":null,"spread":false},{"title":"34-基于lora的llama2二次预训练.pdf <span style='color:#111;'> 2.26MB </span>","children":null,"spread":false},{"title":"6-LLMs 损失函数篇.pdf <span style='color:#111;'> 355.57KB </span>","children":null,"spread":false},{"title":"39-强化学习在自然语言处理下的应用篇.pdf <span style='color:#111;'> 571.58KB </span>","children":null,"spread":false},{"title":"36-大模型(LLMs)强化学习面.pdf <span style='color:#111;'> 277.61KB </span>","children":null,"spread":false},{"title":"28-提示学习(Prompting)篇.pdf <span style='color:#111;'> 446.51KB </span>","children":null,"spread":false},{"title":"29-LoRA 系列篇.pdf <span style='color:#111;'> 767.28KB </span>","children":null,"spread":false},{"title":"24-大模型(LLMs)RAG 优化策略 —— RAG-Fusion篇.pdf <span style='color:#111;'> 1.06MB </span>","children":null,"spread":false},{"title":"32-大模型(LLMs)增量预训练篇.pdf <span style='color:#111;'> 904.59KB </span>","children":null,"spread":false},{"title":"2-Layer normalization 篇.pdf <span style='color:#111;'> 488.55KB </span>","children":null,"spread":false},{"title":"38-大模型(LLMs)强化学习—— PPO 面.pdf <span style='color:#111;'> 270.99KB </span>","children":null,"spread":false},{"title":"30-如何使用 PEFT库 中 LoRA?.pdf <span style='color:#111;'> 695.69KB </span>","children":null,"spread":false},{"title":"18-大模型(LLMs)RAG 版面分析——文本分块面.pdf <span style='color:#111;'> 482.88KB </span>","children":null,"spread":false},{"title":"42-大模型(LLMs)显存问题面.pdf <span style='color:#111;'> 525.45KB </span>","children":null,"spread":false},{"title":"20-大模型外挂知识库优化——负样本样本挖掘篇.pdf <span style='color:#111;'> 704.02KB </span>","children":null,"spread":false},{"title":"27-适配器微调(Adapter-tuning)篇.pdf <span style='color:#111;'> 184.62KB </span>","children":null,"spread":false},{"title":"14-基于LLM+向量库的文档对话 经验面.pdf <span style='color:#111;'> 2.16MB </span>","children":null,"spread":false},{"title":"16-LLM文档对话 —— pdf解析关键问题.pdf <span style='color:#111;'> 2.14MB </span>","children":null,"spread":false},{"title":"9-大模型(LLMs)微调面.pdf <span style='color:#111;'> 2.89MB </span>","children":null,"spread":false},{"title":"25-Graph RAG 面 — 一种 基于知识图谱的大模型检索增强实现策略.pdf <span style='color:#111;'> 951.70KB </span>","children":null,"spread":false},{"title":"10-LLMs 训练经验帖.pdf <span style='color:#111;'> 253.51KB </span>","children":null,"spread":false},{"title":"43-显存优化策略篇.pdf <span style='color:#111;'> 264.76KB </span>","children":null,"spread":false},{"title":"12-多轮对话中让AI保持长期记忆的8种优化方式篇.pdf <span style='color:#111;'> 362.00KB </span>","children":null,"spread":false},{"title":"33-增量预训练(Pretrain)样本拼接篇.pdf <span style='color:#111;'> 379.90KB </span>","children":null,"spread":false},{"title":"31-大模型(LLMs)推理面.pdf <span style='color:#111;'> 675.29KB </span>","children":null,"spread":false},{"title":"19-大模型外挂知识库优化——如何利用大模型辅助召回?.pdf <span style='color:#111;'> 733.87KB </span>","children":null,"spread":false},{"title":"5-transformers 操作篇.pdf <span style='color:#111;'> 227.10KB </span>","children":null,"spread":false},{"title":"37-大模型(LLMs)强化学习——RLHF及其变种面.pdf <span style='color:#111;'> 2.42MB </span>","children":null,"spread":false}],"spread":false},{"title":"④、大模型实战落地案例","children":[{"title":"103:大模型应用开发极简入门:基于 GPT-4 和 ChatGPT_2024.pdf <span style='color:#111;'> 6.44MB </span>","children":null,"spread":false},{"title":"大模型落地应用案例集.pdf <span style='color:#111;'> 6.57MB </span>","children":null,"spread":false},{"title":"大规模语言模型:从理论到实践.pdf <span style='color:#111;'> 26.91MB </span>","children":null,"spread":false}],"spread":true}],"spread":true}]