# robots.txt for Narmada Tent House # Allow all user agents to crawl the entire website User-agent: * Disallow: # Disallow crawlers from certain folders if you want them not to index # For example, disallowing an admin section: # Disallow: /admin/ # Allow specific user agents, like Googlebot, to crawl everything User-agent: Googlebot Disallow: # Block certain files if needed # Disallow: /private/ # Sitemap location (use your actual sitemap URL if available) Sitemap: https://www.narmadatent.com/sitemap.xml