[{"data":1,"prerenderedAt":805},["ShallowReactive",2],{"/en-us/blog/categories/ai-ml":3,"navigation-en-us":21,"banner-en-us":442,"footer-en-us":452,"ai-ml-category-page-total-items-en-us":694,"ai-ml-category-page-featured-en-us":695,"ai-ml-category-page-7-en-us":723},{"id":4,"title":5,"body":6,"category":6,"config":7,"content":11,"description":6,"extension":13,"meta":14,"navigation":15,"path":16,"seo":17,"slug":6,"stem":19,"testContent":6,"type":6,"__hash__":20},"blogCategories/en-us/blog/categories/ai-ml.yml","Ai Ml",null,{"template":8,"slug":9,"hide":10},"BlogCategory","ai-ml",false,{"name":12},"AI/ML","yml",{},true,"/en-us/blog/categories/ai-ml",{"title":12,"description":18},"Browse articles related to AI/ML on the GitLab Blog","en-us/blog/categories/ai-ml","rEmbrx2EiZNCvAIStbsvNZ9AHldybfSqbXcgAnBzrUY",{"logo":22,"freeTrial":27,"sales":32,"login":37,"items":42,"search":362,"minimal":393,"duo":412,"switchNav":421,"pricingDeployment":432},{"config":23},{"href":24,"dataGaName":25,"dataGaLocation":26},"/","gitlab logo","header",{"text":28,"config":29},"Get free trial",{"href":30,"dataGaName":31,"dataGaLocation":26},"https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com&glm_content=default-saas-trial/","free trial",{"text":33,"config":34},"Talk to sales",{"href":35,"dataGaName":36,"dataGaLocation":26},"/sales/","sales",{"text":38,"config":39},"Sign in",{"href":40,"dataGaName":41,"dataGaLocation":26},"https://gitlab.com/users/sign_in/","sign in",[43,72,172,177,281,342],{"text":44,"config":45,"menu":47},"Platform",{"dataNavLevelOne":46},"platform",{"type":48,"columns":49},"cards",[50,56,64],{"title":44,"description":51,"link":52},"The intelligent orchestration platform for DevSecOps",{"text":53,"config":54},"Explore our Platform",{"href":55,"dataGaName":46,"dataGaLocation":26},"/platform/",{"title":57,"description":58,"link":59},"GitLab Duo Agent Platform","Agentic AI for the entire software lifecycle",{"text":60,"config":61},"Meet GitLab Duo",{"href":62,"dataGaName":63,"dataGaLocation":26},"/gitlab-duo-agent-platform/","gitlab duo agent platform",{"title":65,"description":66,"link":67},"Why GitLab","See the top reasons enterprises choose GitLab",{"text":68,"config":69},"Learn more",{"href":70,"dataGaName":71,"dataGaLocation":26},"/why-gitlab/","why gitlab",{"text":73,"left":15,"config":74,"menu":76},"Product",{"dataNavLevelOne":75},"solutions",{"type":77,"link":78,"columns":82,"feature":151},"lists",{"text":79,"config":80},"View all Solutions",{"href":81,"dataGaName":75,"dataGaLocation":26},"/solutions/",[83,107,130],{"title":84,"description":85,"link":86,"items":91},"Automation","CI/CD and automation to accelerate deployment",{"config":87},{"icon":88,"href":89,"dataGaName":90,"dataGaLocation":26},"AutomatedCodeAlt","/solutions/delivery-automation/","automated software delivery",[92,96,99,103],{"text":93,"config":94},"CI/CD",{"href":95,"dataGaLocation":26,"dataGaName":93},"/solutions/continuous-integration/",{"text":57,"config":97},{"href":62,"dataGaLocation":26,"dataGaName":98},"gitlab duo agent platform - product menu",{"text":100,"config":101},"Source Code Management",{"href":102,"dataGaLocation":26,"dataGaName":100},"/solutions/source-code-management/",{"text":104,"config":105},"Automated Software Delivery",{"href":89,"dataGaLocation":26,"dataGaName":106},"Automated software delivery",{"title":108,"description":109,"link":110,"items":115},"Security","Deliver code faster without compromising security",{"config":111},{"href":112,"dataGaName":113,"dataGaLocation":26,"icon":114},"/solutions/application-security-testing/","security and compliance","ShieldCheckLight",[116,120,125],{"text":117,"config":118},"Application Security Testing",{"href":112,"dataGaName":119,"dataGaLocation":26},"Application security testing",{"text":121,"config":122},"Software Supply Chain Security",{"href":123,"dataGaLocation":26,"dataGaName":124},"/solutions/supply-chain/","Software supply chain security",{"text":126,"config":127},"Software Compliance",{"href":128,"dataGaName":129,"dataGaLocation":26},"/solutions/software-compliance/","software compliance",{"title":131,"link":132,"items":137},"Measurement",{"config":133},{"icon":134,"href":135,"dataGaName":136,"dataGaLocation":26},"DigitalTransformation","/solutions/visibility-measurement/","visibility and measurement",[138,142,146],{"text":139,"config":140},"Visibility & Measurement",{"href":135,"dataGaLocation":26,"dataGaName":141},"Visibility and Measurement",{"text":143,"config":144},"Value Stream Management",{"href":145,"dataGaLocation":26,"dataGaName":143},"/solutions/value-stream-management/",{"text":147,"config":148},"Analytics & Insights",{"href":149,"dataGaLocation":26,"dataGaName":150},"/solutions/analytics-and-insights/","Analytics and insights",{"title":152,"type":77,"items":153},"GitLab for",[154,160,166],{"text":155,"config":156},"Enterprise",{"icon":157,"href":158,"dataGaLocation":26,"dataGaName":159},"Building","/enterprise/","enterprise",{"text":161,"config":162},"Small Business",{"icon":163,"href":164,"dataGaLocation":26,"dataGaName":165},"Work","/small-business/","small business",{"text":167,"config":168},"Public Sector",{"icon":169,"href":170,"dataGaLocation":26,"dataGaName":171},"Organization","/solutions/public-sector/","public sector",{"text":173,"config":174},"Pricing",{"href":175,"dataGaName":176,"dataGaLocation":26,"dataNavLevelOne":176},"/pricing/","pricing",{"text":178,"config":179,"menu":181},"Resources",{"dataNavLevelOne":180},"resources",{"type":77,"link":182,"columns":186,"feature":270},{"text":183,"config":184},"View all resources",{"href":185,"dataGaName":180,"dataGaLocation":26},"/resources/",[187,220,242],{"title":188,"items":189},"Getting started",[190,195,200,205,210,215],{"text":191,"config":192},"Install",{"href":193,"dataGaName":194,"dataGaLocation":26},"/install/","install",{"text":196,"config":197},"Quick start guides",{"href":198,"dataGaName":199,"dataGaLocation":26},"/get-started/","quick setup checklists",{"text":201,"config":202},"Learn",{"href":203,"dataGaLocation":26,"dataGaName":204},"https://university.gitlab.com/","learn",{"text":206,"config":207},"Product documentation",{"href":208,"dataGaName":209,"dataGaLocation":26},"https://docs.gitlab.com/","product documentation",{"text":211,"config":212},"Best practice videos",{"href":213,"dataGaName":214,"dataGaLocation":26},"/getting-started-videos/","best practice videos",{"text":216,"config":217},"Integrations",{"href":218,"dataGaName":219,"dataGaLocation":26},"/integrations/","integrations",{"title":221,"items":222},"Discover",[223,228,233,237],{"text":224,"config":225},"Customer success stories",{"href":226,"dataGaName":227,"dataGaLocation":26},"/customers/","customer success stories",{"text":229,"config":230},"Blog",{"href":231,"dataGaName":232,"dataGaLocation":26},"/blog/","blog",{"text":234,"config":235},"The Source",{"href":236,"dataGaName":232,"dataGaLocation":26},"/the-source/",{"text":238,"config":239},"Remote",{"href":240,"dataGaName":241,"dataGaLocation":26},"https://handbook.gitlab.com/handbook/company/culture/all-remote/","remote",{"title":243,"items":244},"Connect",[245,250,255,260,265],{"text":246,"config":247},"GitLab Services",{"href":248,"dataGaName":249,"dataGaLocation":26},"/services/","services",{"text":251,"config":252},"Community",{"href":253,"dataGaName":254,"dataGaLocation":26},"/community/","community",{"text":256,"config":257},"Forum",{"href":258,"dataGaName":259,"dataGaLocation":26},"https://forum.gitlab.com/","forum",{"text":261,"config":262},"Events",{"href":263,"dataGaName":264,"dataGaLocation":26},"/events/","events",{"text":266,"config":267},"Partners",{"href":268,"dataGaName":269,"dataGaLocation":26},"/partners/","partners",{"config":271,"title":274,"text":275,"link":276},{"background":272,"textColor":273},"url('https://res.cloudinary.com/about-gitlab-com/image/upload/v1777322348/qpq8yrgn8knii57omj0c.png')","#000","What’s new in GitLab","Stay updated with our latest features and improvements.",{"text":277,"config":278},"Read the latest",{"href":279,"dataGaName":280,"dataGaLocation":26},"/releases/whats-new/","whats new",{"text":282,"config":283,"menu":285},"Company",{"dataNavLevelOne":284},"company",{"type":77,"columns":286},[287],{"items":288},[289,294,300,302,307,312,317,322,327,332,337],{"text":290,"config":291},"About",{"href":292,"dataGaName":293,"dataGaLocation":26},"/company/","about",{"text":295,"config":296,"footerGa":299},"Jobs",{"href":297,"dataGaName":298,"dataGaLocation":26},"/jobs/","jobs",{"dataGaName":298},{"text":261,"config":301},{"href":263,"dataGaName":264,"dataGaLocation":26},{"text":303,"config":304},"Leadership",{"href":305,"dataGaName":306,"dataGaLocation":26},"/company/team/e-group/","leadership",{"text":308,"config":309},"Team",{"href":310,"dataGaName":311,"dataGaLocation":26},"/company/team/","team",{"text":313,"config":314},"Handbook",{"href":315,"dataGaName":316,"dataGaLocation":26},"https://handbook.gitlab.com/","handbook",{"text":318,"config":319},"Investor relations",{"href":320,"dataGaName":321,"dataGaLocation":26},"https://ir.gitlab.com/","investor relations",{"text":323,"config":324},"Trust Center",{"href":325,"dataGaName":326,"dataGaLocation":26},"/security/","trust center",{"text":328,"config":329},"AI Transparency Center",{"href":330,"dataGaName":331,"dataGaLocation":26},"/ai-transparency-center/","ai transparency center",{"text":333,"config":334},"Newsletter",{"href":335,"dataGaName":336,"dataGaLocation":26},"/company/contact/#contact-forms","newsletter",{"text":338,"config":339},"Press",{"href":340,"dataGaName":341,"dataGaLocation":26},"/press/","press",{"text":343,"config":344,"menu":345},"Contact us",{"dataNavLevelOne":284},{"type":77,"columns":346},[347],{"items":348},[349,352,357],{"text":33,"config":350},{"href":35,"dataGaName":351,"dataGaLocation":26},"talk to sales",{"text":353,"config":354},"Support portal",{"href":355,"dataGaName":356,"dataGaLocation":26},"https://support.gitlab.com","support portal",{"text":358,"config":359},"Customer portal",{"href":360,"dataGaName":361,"dataGaLocation":26},"https://customers.gitlab.com/customers/sign_in/","customer portal",{"close":363,"login":364,"suggestions":371},"Close",{"text":365,"link":366},"To search repositories and projects, login to",{"text":367,"config":368},"gitlab.com",{"href":40,"dataGaName":369,"dataGaLocation":370},"search login","search",{"text":372,"default":373},"Suggestions",[374,376,380,382,386,390],{"text":57,"config":375},{"href":62,"dataGaName":57,"dataGaLocation":370},{"text":377,"config":378},"Code Suggestions (AI)",{"href":379,"dataGaName":377,"dataGaLocation":370},"/solutions/code-suggestions/",{"text":93,"config":381},{"href":95,"dataGaName":93,"dataGaLocation":370},{"text":383,"config":384},"GitLab on AWS",{"href":385,"dataGaName":383,"dataGaLocation":370},"/partners/technology-partners/aws/",{"text":387,"config":388},"GitLab on Google Cloud",{"href":389,"dataGaName":387,"dataGaLocation":370},"/partners/technology-partners/google-cloud-platform/",{"text":391,"config":392},"Why GitLab?",{"href":70,"dataGaName":391,"dataGaLocation":370},{"freeTrial":394,"mobileIcon":399,"desktopIcon":404,"secondaryButton":407},{"text":395,"config":396},"Start free trial",{"href":397,"dataGaName":31,"dataGaLocation":398},"https://gitlab.com/-/trials/new/","nav",{"altText":400,"config":401},"Gitlab Icon",{"src":402,"dataGaName":403,"dataGaLocation":398},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1758203874/jypbw1jx72aexsoohd7x.svg","gitlab icon",{"altText":400,"config":405},{"src":406,"dataGaName":403,"dataGaLocation":398},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1758203875/gs4c8p8opsgvflgkswz9.svg",{"text":408,"config":409},"Get Started",{"href":410,"dataGaName":411,"dataGaLocation":398},"https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/get-started/","get started",{"freeTrial":413,"mobileIcon":417,"desktopIcon":419},{"text":414,"config":415},"Learn more about GitLab Duo",{"href":62,"dataGaName":416,"dataGaLocation":398},"gitlab duo",{"altText":400,"config":418},{"src":402,"dataGaName":403,"dataGaLocation":398},{"altText":400,"config":420},{"src":406,"dataGaName":403,"dataGaLocation":398},{"button":422,"mobileIcon":427,"desktopIcon":429},{"text":423,"config":424},"/switch",{"href":425,"dataGaName":426,"dataGaLocation":398},"#contact","switch",{"altText":400,"config":428},{"src":402,"dataGaName":403,"dataGaLocation":398},{"altText":400,"config":430},{"src":431,"dataGaName":403,"dataGaLocation":398},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1773335277/ohhpiuoxoldryzrnhfrh.png",{"freeTrial":433,"mobileIcon":438,"desktopIcon":440},{"text":434,"config":435},"Back to pricing",{"href":175,"dataGaName":436,"dataGaLocation":398,"icon":437},"back to pricing","GoBack",{"altText":400,"config":439},{"src":402,"dataGaName":403,"dataGaLocation":398},{"altText":400,"config":441},{"src":406,"dataGaName":403,"dataGaLocation":398},{"title":443,"button":444,"config":449},"See how agentic AI transforms software delivery",{"text":445,"config":446},"Sign up for GitLab Transcend on June 10",{"href":447,"dataGaName":448,"dataGaLocation":26},"/releases/whats-new/#sign-up","transcend event",{"layout":450,"icon":451,"disabled":10},"release","AiStar",{"data":453},{"text":454,"source":455,"edit":461,"contribute":466,"config":471,"items":476,"minimal":683},"Git is a trademark of Software Freedom Conservancy and our use of 'GitLab' is under license",{"text":456,"config":457},"View page source",{"href":458,"dataGaName":459,"dataGaLocation":460},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/","page source","footer",{"text":462,"config":463},"Edit this page",{"href":464,"dataGaName":465,"dataGaLocation":460},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/-/blob/main/content/","web ide",{"text":467,"config":468},"Please contribute",{"href":469,"dataGaName":470,"dataGaLocation":460},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/-/blob/main/CONTRIBUTING.md/","please contribute",{"twitter":472,"facebook":473,"youtube":474,"linkedin":475},"https://twitter.com/gitlab","https://www.facebook.com/gitlab","https://www.youtube.com/channel/UCnMGQ8QHMAnVIsI3xJrihhg","https://www.linkedin.com/company/gitlab-com",[477,524,578,622,649],{"title":173,"links":478,"subMenu":493},[479,483,488],{"text":480,"config":481},"View plans",{"href":175,"dataGaName":482,"dataGaLocation":460},"view plans",{"text":484,"config":485},"Why Premium?",{"href":486,"dataGaName":487,"dataGaLocation":460},"/pricing/premium/","why premium",{"text":489,"config":490},"Why Ultimate?",{"href":491,"dataGaName":492,"dataGaLocation":460},"/pricing/ultimate/","why ultimate",[494],{"title":495,"links":496},"Contact Us",[497,500,502,504,509,514,519],{"text":498,"config":499},"Contact sales",{"href":35,"dataGaName":36,"dataGaLocation":460},{"text":353,"config":501},{"href":355,"dataGaName":356,"dataGaLocation":460},{"text":358,"config":503},{"href":360,"dataGaName":361,"dataGaLocation":460},{"text":505,"config":506},"Status",{"href":507,"dataGaName":508,"dataGaLocation":460},"https://status.gitlab.com/","status",{"text":510,"config":511},"Terms of use",{"href":512,"dataGaName":513,"dataGaLocation":460},"/terms/","terms of use",{"text":515,"config":516},"Privacy statement",{"href":517,"dataGaName":518,"dataGaLocation":460},"/privacy/","privacy statement",{"text":520,"config":521},"Cookie preferences",{"dataGaName":522,"dataGaLocation":460,"id":523,"isOneTrustButton":15},"cookie preferences","ot-sdk-btn",{"title":73,"links":525,"subMenu":534},[526,530],{"text":527,"config":528},"DevSecOps platform",{"href":55,"dataGaName":529,"dataGaLocation":460},"devsecops platform",{"text":531,"config":532},"AI-Assisted Development",{"href":62,"dataGaName":533,"dataGaLocation":460},"ai-assisted development",[535],{"title":536,"links":537},"Topics",[538,543,548,553,558,563,568,573],{"text":539,"config":540},"CICD",{"href":541,"dataGaName":542,"dataGaLocation":460},"/topics/ci-cd/","cicd",{"text":544,"config":545},"GitOps",{"href":546,"dataGaName":547,"dataGaLocation":460},"/topics/gitops/","gitops",{"text":549,"config":550},"DevOps",{"href":551,"dataGaName":552,"dataGaLocation":460},"/topics/devops/","devops",{"text":554,"config":555},"Version Control",{"href":556,"dataGaName":557,"dataGaLocation":460},"/topics/version-control/","version control",{"text":559,"config":560},"DevSecOps",{"href":561,"dataGaName":562,"dataGaLocation":460},"/topics/devsecops/","devsecops",{"text":564,"config":565},"Cloud Native",{"href":566,"dataGaName":567,"dataGaLocation":460},"/topics/cloud-native/","cloud native",{"text":569,"config":570},"AI for Coding",{"href":571,"dataGaName":572,"dataGaLocation":460},"/topics/devops/ai-for-coding/","ai for coding",{"text":574,"config":575},"Agentic AI",{"href":576,"dataGaName":577,"dataGaLocation":460},"/topics/agentic-ai/","agentic ai",{"title":579,"links":580},"Solutions",[581,583,585,590,594,597,601,604,606,609,612,617],{"text":117,"config":582},{"href":112,"dataGaName":117,"dataGaLocation":460},{"text":106,"config":584},{"href":89,"dataGaName":90,"dataGaLocation":460},{"text":586,"config":587},"Agile development",{"href":588,"dataGaName":589,"dataGaLocation":460},"/solutions/agile-delivery/","agile delivery",{"text":591,"config":592},"SCM",{"href":102,"dataGaName":593,"dataGaLocation":460},"source code management",{"text":539,"config":595},{"href":95,"dataGaName":596,"dataGaLocation":460},"continuous integration & delivery",{"text":598,"config":599},"Value stream management",{"href":145,"dataGaName":600,"dataGaLocation":460},"value stream management",{"text":544,"config":602},{"href":603,"dataGaName":547,"dataGaLocation":460},"/solutions/gitops/",{"text":155,"config":605},{"href":158,"dataGaName":159,"dataGaLocation":460},{"text":607,"config":608},"Small business",{"href":164,"dataGaName":165,"dataGaLocation":460},{"text":610,"config":611},"Public sector",{"href":170,"dataGaName":171,"dataGaLocation":460},{"text":613,"config":614},"Education",{"href":615,"dataGaName":616,"dataGaLocation":460},"/solutions/education/","education",{"text":618,"config":619},"Financial services",{"href":620,"dataGaName":621,"dataGaLocation":460},"/solutions/finance/","financial services",{"title":178,"links":623},[624,626,628,630,633,635,637,639,641,643,645,647],{"text":191,"config":625},{"href":193,"dataGaName":194,"dataGaLocation":460},{"text":196,"config":627},{"href":198,"dataGaName":199,"dataGaLocation":460},{"text":201,"config":629},{"href":203,"dataGaName":204,"dataGaLocation":460},{"text":206,"config":631},{"href":208,"dataGaName":632,"dataGaLocation":460},"docs",{"text":229,"config":634},{"href":231,"dataGaName":232,"dataGaLocation":460},{"text":224,"config":636},{"href":226,"dataGaName":227,"dataGaLocation":460},{"text":238,"config":638},{"href":240,"dataGaName":241,"dataGaLocation":460},{"text":246,"config":640},{"href":248,"dataGaName":249,"dataGaLocation":460},{"text":251,"config":642},{"href":253,"dataGaName":254,"dataGaLocation":460},{"text":256,"config":644},{"href":258,"dataGaName":259,"dataGaLocation":460},{"text":261,"config":646},{"href":263,"dataGaName":264,"dataGaLocation":460},{"text":266,"config":648},{"href":268,"dataGaName":269,"dataGaLocation":460},{"title":282,"links":650},[651,653,655,657,659,661,663,667,672,674,676,678],{"text":290,"config":652},{"href":292,"dataGaName":284,"dataGaLocation":460},{"text":295,"config":654},{"href":297,"dataGaName":298,"dataGaLocation":460},{"text":303,"config":656},{"href":305,"dataGaName":306,"dataGaLocation":460},{"text":308,"config":658},{"href":310,"dataGaName":311,"dataGaLocation":460},{"text":313,"config":660},{"href":315,"dataGaName":316,"dataGaLocation":460},{"text":318,"config":662},{"href":320,"dataGaName":321,"dataGaLocation":460},{"text":664,"config":665},"Sustainability",{"href":666,"dataGaName":664,"dataGaLocation":460},"/sustainability/",{"text":668,"config":669},"Diversity, inclusion and belonging (DIB)",{"href":670,"dataGaName":671,"dataGaLocation":460},"/diversity-inclusion-belonging/","Diversity, inclusion and belonging",{"text":323,"config":673},{"href":325,"dataGaName":326,"dataGaLocation":460},{"text":333,"config":675},{"href":335,"dataGaName":336,"dataGaLocation":460},{"text":338,"config":677},{"href":340,"dataGaName":341,"dataGaLocation":460},{"text":679,"config":680},"Modern Slavery Transparency Statement",{"href":681,"dataGaName":682,"dataGaLocation":460},"https://handbook.gitlab.com/handbook/legal/modern-slavery-act-transparency-statement/","modern slavery transparency statement",{"items":684},[685,688,691],{"text":686,"config":687},"Terms",{"href":512,"dataGaName":513,"dataGaLocation":460},{"text":689,"config":690},"Cookies",{"dataGaName":522,"dataGaLocation":460,"id":523,"isOneTrustButton":15},{"text":692,"config":693},"Privacy",{"href":517,"dataGaName":518,"dataGaLocation":460},147,{"id":696,"title":697,"authorSlugs":698,"authors":700,"body":702,"category":9,"categorySlug":9,"config":703,"content":706,"date":709,"description":707,"extension":13,"externalUrl":6,"featured":15,"heroImage":710,"isFeatured":15,"meta":713,"navigation":15,"path":714,"publishedDate":709,"rawbody":715,"seo":716,"slug":705,"stem":718,"tagSlugs":719,"tags":721,"template":704,"updatedDate":6,"__hash__":722},"blogPosts/en-us/blog/atlassian-will-train-on-your-data-opt-out-with-gitlab.yml","Atlassian will train on your data: Opt out with GitLab",[699],"jessica-hurwitz",[701],"Jessica Hurwitz","Starting August 17, 2026, Atlassian will begin collecting customer metadata and in-app content from Jira, Confluence, and other cloud products to train its AI offerings, including Rovo and Rovo Dev. This announcement comes after [GitHub recently changed its Copilot data usage policy](https://about.gitlab.com/blog/github-copilots-new-policy-for-ai-training-is-a-governance-wake-up-call/). **Taken together, these changes suggest opt-out-by-default is becoming the industry norm. GitLab takes the opposite position: no data collection, no AI training on customer data, no matter what tier you're on.**\n\n[Atlassian's change](https://www.atlassian.com/trust/ai/data-contribution) is enabled by default for all cloud customers and affects roughly 300,000 organizations. For customers on the Free, Standard, and Premium tiers, metadata collection is mandatory and cannot be turned off. Only Enterprise-tier customers have the option to opt out. This policy change deserves a close read if your engineering, IT, and program management teams run on Atlassian because they are most exposed by this change — and least likely to have been consulted before it happened.\n\nAlthough the underlying governance questions are the same for both Atlassian and GitHub's changes, the data at risk is different. Where GitHub's change concerned source code and developer interactions, Atlassian's reaches into project plans, internal documentation, workflow configurations, and operational metadata across Jira, Confluence, and the broader Atlassian stack. **For organizations that rely on these tools as their system of record for how work gets planned and delivered, the implications run deep.**\n\n## What changed and what it means for your data\n\nAtlassian will collect two categories of information: \n\n- **Metadata:** de-identified operational signals like story points, sprint dates, and SLA values, including data from its Teamwork Graph and connected third-party apps  \n- **In-app content:** user-generated material such as Confluence page content, Jira issue titles, descriptions, and comments\n\nAtlassian says it will apply de-identification and aggregation before training. Collected data may be retained for up to seven years, with in-app data removed within 30 days of opt-out and models retrained within 90 days.\n\nThere are some exclusions: Customers using customer-managed encryption keys, Atlassian Government Cloud, Isolated Cloud, or those with HIPAA requirements are carved out from collection. But for the vast majority of Atlassian's cloud customer base, data collection will start unless you pay for the Enterprise tier and actively flip the switch.\n\nThis reverses Atlassian's prior stated position that customer data would not be used to train or improve AI services. Organizations that adopted Jira and Confluence to manage their most sensitive planning workflows, sprint boards, security tickets, incident postmortems, and internal documentation will soon be contributing that content to Atlassian's AI training pipeline, without ever being asked.\n\n## The governance gap in \"opt-out by default\"\n\nOpt-out-by-default data collection for AI training is an emerging pattern across the software industry. It raises the same set of questions every time: How does this interact with existing data processing agreements? Does the vendor's definition of \"metadata\" match what your legal and security teams would consider non-sensitive data?\n\n**For many organizations, the answer to these questions is \"we don't know.\"** \n\nWhen a vendor changes its data practices through a terms-of-service update, the burden falls on the customer to notice, evaluate the implications, and act within the window the vendor provides. \n\nThe mandatory nature of metadata collection on Free, Standard, and Premium tiers makes this more acute. The only exit is upgrading to Enterprise, which requires a minimum of 801 users and custom pricing that would represent a significant cost jump for teams that aren't there yet. Data protection, in other words, is now a purchasing decision.\n\nThe tiered structure also introduces a subtler problem. Metadata like story points, sprint velocity, SLA metrics, and task classifications may seem innocuous in isolation, but in aggregate they reveal project structure, team performance patterns, and delivery cadence. For organizations in competitive industries, that operational intelligence has real value, and \"de-identified\" does not necessarily mean \"non-sensitive\" once patterns are reconstructable at scale.\n\n## Why this matters more for Atlassian-stack organizations\n\nIn Atlassian-based organizations, Jira has been the center how teams plan, track, and deliver work. It’s the source of truth for sprint planning, bug tracking, release management, portfolio coordination, and cross-functional project execution. \n\nIn regulated industries like financial services, public sector and manufacturing, Jira and Confluence together hold sensitive operational data that may be subject to compliance requirements. The risk compounds for organizations that have expanded beyond Jira into the broader Atlassian ecosystem.\n\nWhen you run Jira, Confluence, Bitbucket, and Bamboo together, the surface area of data now feeding into AI training spans your project plans, internal documentation, source code metadata, and CI/CD configurations — each of which security and compliance teams would want to review before sharing with a vendor's training pipeline.\n\nAtlassian’s Teamwork Graph connectors add another dimension for customers who have integrated third-party tools, such as Slack, Figma, Google Drive, Salesforce, and ServiceNow, into their environment. Teamwork Graph connectors index relationship and activity signals from these connected apps, which means the metadata Atlassian collects will not be limited to what lives inside Atlassian products. For security and compliance teams accustomed to evaluating data flows on a per-vendor basis, this cross-platform reach complicates the assessment considerably.\n\nOrganizations that are already navigating [Atlassian's push from Data Center](https://about.gitlab.com/blog/atlassian-ending-data-center-as-gitlab-maintains-deployment-choice/) and Server editions to the cloud face a compounding challenge. Adding default AI data collection to that migration path raises the stakes further: **The question is no longer just \"do we move to Atlassian Cloud?\" but \"do we move to Atlassian Cloud knowing our data will feed AI training unless we're on the most expensive tier?\"**\n\n## What regulated industries should be evaluating now\n\nThe compliance implications vary by sector, but the obligation to reassess is consistent.\n\nIn financial services, frameworks like [SR 11-7](https://www.federalreserve.gov/supervisionreg/srletters/sr1107.htm) and [DORA](https://eur-lex.europa.eu/eli/reg/2022/2554/oj/eng) require documented, auditable oversight of third-party technology providers, including how those providers handle data. In the public sector, [NIST 800-53](https://csrc.nist.gov/publications/detail/sp/800-53/rev-5/final) and [FISMA](https://www.cisa.gov/topics/cyber-threats-and-advisories/federal-information-security-modernization-act) make controlling where sensitive data flows a foundational requirement. In healthcare, [HIPAA](https://www.hhs.gov/hipaa/index.html) governs how patient-adjacent data is handled by third parties. \n\nAcross the board, a material change in a vendor's data practices, such as Atlassian moving from \"we don't train on your data\" to \"we do, by default,\" triggers a documentation and risk reassessment obligation. \n\nInstitutions operating under the [EU AI Act](https://eur-lex.europa.eu/eli/reg/2024/1689/oj/eng) face an additional dimension: opt-out framing aligns with U.S. norms, while European regulators generally expect opt-in consent for data processing of this nature.\n\nIf your model risk or vendor management team documented Atlassian's data handling controls before this announcement, the question isn't whether this change triggers a reassessment obligation. It does. The question is whether your team can take action before August 17.\n\n## What to look for in your platform vendors\n\nCTOs and CISOs across regulated industries need to adopt AI in a way they can explain to regulators, boards, and customers. Because of this, GitLab operates within the following set of principles:\n\n**Unconditional data commitments, not tier-dependent protections.** Regulated organizations need to know, with specificity, what happens to their data. A commitment that varies by plan tier, or that requires action before a deadline, introduces exactly the kind of uncontrolled variable that keeps CISOs up at night.\n\n**Transparency and auditability.** Model risk management frameworks require organizations to understand the AI systems they deploy, including the training data and third parties involved. Vendors who cannot answer these questions clearly create documentation risk.\n\n**Separation between customer data and vendor AI training.** When a platform vendor trains models on customer usage data, workflows and operational patterns become inputs to a system that also serves competitors. For organizations where project structure or delivery cadence represents competitive advantage, that exposure matters.\n\n## How GitLab's approach differs\n\nGitLab doesn't train on customer data — at any tier, full stop. AI vendors powering GitLab Duo features are contractually prohibited from using customer inputs or outputs for their own purposes, [a commitment GitLab CEO Bill Staples](https://www.linkedin.com/posts/williamstaples_gitlab-1810-agentic-ai-now-open-to-even-activity-7443280763715985408-aHxf) has consistently reiterated.\n\n[GitLab's AI Transparency Center](https://about.gitlab.com/ai-transparency-center/) documents exactly which models power which features, how data is handled, and what vendor commitments are in place. [GitLab's AI Continuity Plan](https://handbook.gitlab.com/handbook/product/ai/continuity-plan/) documents how vendor changes are managed, including any material changes to how AI vendors treat customer data. For institutions managing third-party AI risk under DORA or similar frameworks, vendor continuity and concentration are active governance concerns, and having a documented plan for both is part of what responsible AI tooling looks like.\n\nFor organizations that require AI processing to stay within their own infrastructure, [GitLab Duo Agent Platform](https://about.gitlab.com/gitlab-duo/) is available with GitLab Self-Managed deployments, including support for integration with self-hosted AI models. This means prompts and code never leave the customer's environment. GitLab also provides IP indemnification for Duo-generated output, with no filters required and no activation steps needed. Where your data lives remains your choice, no matter your deployment model or subscription tier.\n\n> Whether your organization stays on Atlassian or begins evaluating alternatives, the conversation about who controls your data and how it gets used should be happening now. **The August 17 deadline is approaching, but you still have time to [try GitLab Ultimate with Duo Agent Platform for free today](https://gitlab.com/-/trials/new).**",{"featured":15,"template":704,"slug":705},"BlogPost","atlassian-will-train-on-your-data-opt-out-with-gitlab",{"title":697,"description":707,"authors":708,"date":709,"heroImage":710,"body":702,"category":9,"tags":711},"Learn why Atlassian's latest move is a threat to data governance and how GitLab's approach helps ensure your customers' data stays private and protected.",[701],"2026-05-04","https://res.cloudinary.com/about-gitlab-com/image/upload/v1773866173/vte9qh8rriznvyclhkes.png",[12,712],"product",{},"/en-us/blog/atlassian-will-train-on-your-data-opt-out-with-gitlab","seo:\n  config:\n    noIndex: false\n  title: 'Atlassian will train on your data: Opt out with GitLab'\n  description: Learn why Atlassian's latest move is a threat to data governance\n    and how GitLab's approach helps ensure your customers' data stays private\n    and protected.\ncontent:\n  title: 'Atlassian will train on your data: Opt out with GitLab'\n  description: Learn why Atlassian's latest move is a threat to data governance\n    and how GitLab's approach helps ensure your customers' data stays private\n    and protected.\n  authors:\n    - Jessica Hurwitz\n  date: '2026-05-04'\n  heroImage: https://res.cloudinary.com/about-gitlab-com/image/upload/v1773866173/vte9qh8rriznvyclhkes.png\n  body: >-\n    Starting August 17, 2026, Atlassian will begin collecting customer metadata and in-app content from Jira, Confluence, and other cloud products to train\n    its AI offerings, including Rovo and Rovo Dev. This announcement comes after [GitHub recently changed its Copilot data usage\n    policy](https://about.gitlab.com/blog/github-copilots-new-policy-for-ai-training-is-a-governance-wake-up-call/).\n    **Taken together, these changes suggest opt-out-by-default is becoming the industry norm. GitLab takes the opposite position: no data collection, no AI\n    training on customer data, no matter what tier you're on.**\n\n\n    [Atlassian's change](https://www.atlassian.com/trust/ai/data-contribution) is enabled by default for all cloud customers and affects roughly 300,000 organizations. For customers on the Free, Standard, and Premium tiers, metadata collection is mandatory and cannot be turned off. Only Enterprise-tier customers have the option to opt out. This policy change deserves a close read if your engineering, IT, and program management teams run on Atlassian because they are most exposed by this change — and least likely to have been consulted before it happened.\n\n\n    Although the underlying governance questions are the same for both Atlassian and GitHub's changes, the data at risk is different. Where GitHub's change concerned source code and developer interactions, Atlassian's reaches into project plans, internal documentation, workflow configurations, and operational metadata across Jira, Confluence, and the broader Atlassian stack. **For organizations that rely on these tools as their system of record for how work gets planned and delivered, the implications run deep.**\n\n\n    ## What changed and what it means for your data\n\n\n    Atlassian will collect two categories of information: \n\n\n    - **Metadata:** de-identified operational signals like story points, sprint dates, and SLA values, including data from its Teamwork Graph and connected third-party apps  \n\n    - **In-app content:** user-generated material such as Confluence page content, Jira issue titles, descriptions, and comments\n\n\n    Atlassian says it will apply de-identification and aggregation before training. Collected data may be retained for up to seven years, with in-app data removed within 30 days of opt-out and models retrained within 90 days.\n\n\n    There are some exclusions: Customers using customer-managed encryption keys, Atlassian Government Cloud, Isolated Cloud, or those with HIPAA requirements are carved out from collection. But for the vast majority of Atlassian's cloud customer base, data collection will start unless you pay for the Enterprise tier and actively flip the switch.\n\n\n    This reverses Atlassian's prior stated position that customer data would not be used to train or improve AI services. Organizations that adopted Jira and Confluence to manage their most sensitive planning workflows, sprint boards, security tickets, incident postmortems, and internal documentation will soon be contributing that content to Atlassian's AI training pipeline, without ever being asked.\n\n\n    ## The governance gap in \"opt-out by default\"\n\n\n    Opt-out-by-default data collection for AI training is an emerging pattern across the software industry. It raises the same set of questions every time: How does this interact with existing data processing agreements? Does the vendor's definition of \"metadata\" match what your legal and security teams would consider non-sensitive data?\n\n\n    **For many organizations, the answer to these questions is \"we don't know.\"** \n\n\n    When a vendor changes its data practices through a terms-of-service update, the burden falls on the customer to notice, evaluate the implications, and act within the window the vendor provides. \n\n\n    The mandatory nature of metadata collection on Free, Standard, and Premium tiers makes this more acute. The only exit is upgrading to Enterprise, which requires a minimum of 801 users and custom pricing that would represent a significant cost jump for teams that aren't there yet. Data protection, in other words, is now a purchasing decision.\n\n\n    The tiered structure also introduces a subtler problem. Metadata like story points, sprint velocity, SLA metrics, and task classifications may seem innocuous in isolation, but in aggregate they reveal project structure, team performance patterns, and delivery cadence. For organizations in competitive industries, that operational intelligence has real value, and \"de-identified\" does not necessarily mean \"non-sensitive\" once patterns are reconstructable at scale.\n\n\n    ## Why this matters more for Atlassian-stack organizations\n\n\n    In Atlassian-based organizations, Jira has been the center how teams plan, track, and deliver work. It’s the source of truth for sprint planning, bug tracking, release management, portfolio coordination, and cross-functional project execution. \n\n\n    In regulated industries like financial services, public sector and manufacturing, Jira and Confluence together hold sensitive operational data that may be subject to compliance requirements. The risk compounds for organizations that have expanded beyond Jira into the broader Atlassian ecosystem.\n\n\n    When you run Jira, Confluence, Bitbucket, and Bamboo together, the surface area of data now feeding into AI training spans your project plans, internal documentation, source code metadata, and CI/CD configurations — each of which security and compliance teams would want to review before sharing with a vendor's training pipeline.\n\n\n    Atlassian’s Teamwork Graph connectors add another dimension for customers who have integrated third-party tools, such as Slack, Figma, Google Drive, Salesforce, and ServiceNow, into their environment. Teamwork Graph connectors index relationship and activity signals from these connected apps, which means the metadata Atlassian collects will not be limited to what lives inside Atlassian products. For security and compliance teams accustomed to evaluating data flows on a per-vendor basis, this cross-platform reach complicates the assessment considerably.\n\n\n    Organizations that are already navigating [Atlassian's push from Data Center](https://about.gitlab.com/blog/atlassian-ending-data-center-as-gitlab-maintains-deployment-choice/) and Server editions to the cloud face a compounding challenge. Adding default AI data collection to that migration path raises the stakes further: **The question is no longer just \"do we move to Atlassian Cloud?\" but \"do we move to Atlassian Cloud knowing our data will feed AI training unless we're on the most expensive tier?\"**\n\n\n    ## What regulated industries should be evaluating now\n\n\n    The compliance implications vary by sector, but the obligation to reassess is consistent.\n\n\n    In financial services, frameworks like [SR 11-7](https://www.federalreserve.gov/supervisionreg/srletters/sr1107.htm) and [DORA](https://eur-lex.europa.eu/eli/reg/2022/2554/oj/eng) require documented, auditable oversight of third-party technology providers, including how those providers handle data. In the public sector, [NIST 800-53](https://csrc.nist.gov/publications/detail/sp/800-53/rev-5/final) and [FISMA](https://www.cisa.gov/topics/cyber-threats-and-advisories/federal-information-security-modernization-act) make controlling where sensitive data flows a foundational requirement. In healthcare, [HIPAA](https://www.hhs.gov/hipaa/index.html) governs how patient-adjacent data is handled by third parties. \n\n\n    Across the board, a material change in a vendor's data practices, such as Atlassian moving from \"we don't train on your data\" to \"we do, by default,\" triggers a documentation and risk reassessment obligation. \n\n\n    Institutions operating under the [EU AI Act](https://eur-lex.europa.eu/eli/reg/2024/1689/oj/eng) face an additional dimension: opt-out framing aligns with U.S. norms, while European regulators generally expect opt-in consent for data processing of this nature.\n\n\n    If your model risk or vendor management team documented Atlassian's data handling controls before this announcement, the question isn't whether this change triggers a reassessment obligation. It does. The question is whether your team can take action before August 17.\n\n\n    ## What to look for in your platform vendors\n\n\n    CTOs and CISOs across regulated industries need to adopt AI in a way they can explain to regulators, boards, and customers. Because of this, GitLab operates within the following set of principles:\n\n\n    **Unconditional data commitments, not tier-dependent protections.** Regulated organizations need to know, with specificity, what happens to their data. A commitment that varies by plan tier, or that requires action before a deadline, introduces exactly the kind of uncontrolled variable that keeps CISOs up at night.\n\n\n    **Transparency and auditability.** Model risk management frameworks require organizations to understand the AI systems they deploy, including the training data and third parties involved. Vendors who cannot answer these questions clearly create documentation risk.\n\n\n    **Separation between customer data and vendor AI training.** When a platform vendor trains models on customer usage data, workflows and operational patterns become inputs to a system that also serves competitors. For organizations where project structure or delivery cadence represents competitive advantage, that exposure matters.\n\n\n    ## How GitLab's approach differs\n\n\n    GitLab doesn't train on customer data — at any tier, full stop. AI vendors powering GitLab Duo features are contractually prohibited from using customer inputs or outputs for their own purposes, [a commitment GitLab CEO Bill Staples](https://www.linkedin.com/posts/williamstaples_gitlab-1810-agentic-ai-now-open-to-even-activity-7443280763715985408-aHxf) has consistently reiterated.\n\n\n    [GitLab's AI Transparency Center](https://about.gitlab.com/ai-transparency-center/) documents exactly which models power which features, how data is handled, and what vendor commitments are in place. [GitLab's AI Continuity Plan](https://handbook.gitlab.com/handbook/product/ai/continuity-plan/) documents how vendor changes are managed, including any material changes to how AI vendors treat customer data. For institutions managing third-party AI risk under DORA or similar frameworks, vendor continuity and concentration are active governance concerns, and having a documented plan for both is part of what responsible AI tooling looks like.\n\n\n    For organizations that require AI processing to stay within their own infrastructure, [GitLab Duo Agent Platform](https://about.gitlab.com/gitlab-duo/) is available with GitLab Self-Managed deployments, including support for integration with self-hosted AI models. This means prompts and code never leave the customer's environment. GitLab also provides IP indemnification for Duo-generated output, with no filters required and no activation steps needed. Where your data lives remains your choice, no matter your deployment model or subscription tier.\n\n\n    > Whether your organization stays on Atlassian or begins evaluating alternatives, the conversation about who controls your data and how it gets used should be happening now. **The August 17 deadline is approaching, but you still have time to [try GitLab Ultimate with Duo Agent Platform for free today](https://gitlab.com/-/trials/new).**\n\n  category: ai-ml\n  tags:\n    - AI/ML\n    - product\nconfig:\n  featured: true\n  template: BlogPost\n  slug: atlassian-will-train-on-your-data-opt-out-with-gitlab\n",{"config":717,"title":697,"description":707},{"noIndex":10},"en-us/blog/atlassian-will-train-on-your-data-opt-out-with-gitlab",[720,712],"aiml",[12,712],"FXbgejjc-4pOYnTyszk0acxjKOt3ysgcpixQylgI4co",[724,733,742,751,761,771,780,790,798],{"content":725,"config":731},{"title":726,"heroImage":727,"category":9,"description":728,"authors":729},"Solving complex challenges with GitLab Duo Workflow","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097663/Blog/Hero%20Images/Blog/Hero%20Images/Workflow%201800x945_2gQoQIbY9NvjLFpXtsxtXy_1750097663612.png","Learn how a member of the GitLab Customer Success Management team uses agentic AI for real-world problem-solving, including addressing Helm chart limits in the package registry.",[730],"Johannes Bauer",{"externalUrl":-1,"slug":732},"solving-complex-challenges-with-gitlab-duo-workflow",{"content":734,"config":740},{"title":735,"heroImage":736,"category":9,"description":737,"authors":738},"GitLab Duo with Amazon Q: Agentic AI optimized for AWS generally available","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659604/Blog/Hero%20Images/Screenshot_2024-11-27_at_4.55.28_PM.png","The comprehensive AI-powered DevSecOps platform combined with the deepest set of cloud computing capabilities speeds dev cycles, increases automation, and improves code quality.",[739],"Emilio Salvador",{"externalUrl":-1,"slug":741},"gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws",{"content":743,"config":749},{"title":744,"heroImage":745,"category":9,"description":746,"authors":747},"Use GitLab Duo Workflow to improve application quality assurance","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097617/Blog/Hero%20Images/Blog/Hero%20Images/Workflow%201800x945_2gQoQIbY9NvjLFpXtsxtXy_1750097616649.png","Learn step-by-step how to add unit tests to a Java application using agentic AI (includes a video tutorial).",[748],"Cesar Saavedra",{"externalUrl":-1,"slug":750},"use-gitlab-duo-workflow-to-improve-application-quality-assurance",{"content":752,"config":759},{"title":753,"heroImage":754,"category":9,"description":755,"authors":756},"The GitLab AI Security Framework for security leaders","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664299/Blog/Hero%20Images/AdobeStock_887599633.jpg","Discover how GitLab Duo's security controls, third-party integrations, and retention policies help teams safely implement AI into their development workflow.",[757,758],"Kyle Smith","Ayoub Fandi",{"externalUrl":-1,"slug":760},"the-gitlab-ai-security-framework-for-security-leaders",{"content":762,"config":769},{"title":763,"heroImage":764,"category":9,"description":765,"authors":766},"GitLab Duo Self-Hosted: Enterprise AI built for data privacy","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097840/Blog/Hero%20Images/Blog/Hero%20Images/Self-Hosted%201800x945_1dL1II2ITh2PteObA9DBLD_1750097839679.png","Customers in regulated industries can now deploy GitLab Duo on self-managed infrastructure, leveraging the power of generative AI while helping to address data residency and privacy concerns.",[767,768],"Susie Bitters","Aathira Nair",{"externalUrl":-1,"slug":770},"gitlab-duo-self-hosted-enterprise-ai-built-for-data-privacy",{"content":772,"config":778},{"title":773,"heroImage":774,"category":9,"description":775,"authors":776},"GitLab Duo Workflow: Enterprise visibility and control for agentic AI","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749660174/Blog/Hero%20Images/Workflow_1800x945.png","Secure, autonomous, context-aware AI agents take on complex tasks, freeing developers to ship innovative software faster. Private beta waitlist now open.",[777],"Pini Wietchner",{"externalUrl":-1,"slug":779},"gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai",{"content":781,"config":788},{"title":782,"heroImage":783,"category":9,"description":784,"authors":785},"How GitLab uses prompt guardrails to help protect customers","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663918/Blog/Hero%20Images/aipower.jpg","Learn what prompt guardrails are, how they help mitigate security risks, and what unique considerations GitLab has taken into account when implementing them.",[786,787],"David O'Regan","Roger Woo",{"externalUrl":-1,"slug":789},"how-gitlab-uses-prompt-guardrails-to-help-protect-customers",{"content":791,"config":796},{"title":792,"heroImage":736,"category":9,"description":793,"authors":794},"DevSecOps + Agentic AI: Now on GitLab Self-Managed Ultimate on AWS","Start using AI-powered, DevSecOps-enhanced agents in your AWS GitLab Self-Managed Ultimate instance. Enjoy the benefits of GitLab Duo and Amazon Q in your organization.",[795],"Jackie Porter",{"externalUrl":-1,"slug":797},"devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws",{"content":799,"config":803},{"title":800,"heroImage":801,"category":9,"description":802,"authors":-1},"AI trends for 2025: Agentic AI, self-hosted models, and more","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749665039/Blog/Hero%20Images/display-the-source-article-ai-trends-coming-in-2026-image-0492-1800x945-fy25.png","Discover coming trends in AI for software development, from on-premises model deployments to proactive AI assistants.",{"externalUrl":804,"slug":-1},"https://about.gitlab.com/the-source/ai/ai-trends-for-2025-agentic-ai-self-hosted-models-and-more/",1777934854881]