# robots.txt for https://www.adresse-algerie.com User-agent: * # Allow crawling of all content by default Allow: / # Disallow crawling of Next.js internal files Disallow: /_next/ # Disallow crawling of API routes (if you have any) Disallow: /api/ # Add any other specific paths you want to block crawlers from accessing. # For example, if you had an admin section: # Disallow: /admin/ # Or specific sensitive files: # Disallow: /private/ # Point crawlers to the sitemap index file Sitemap: https://www.adresse-algerie.com/sitemap_index.xml