# robots.txt for https://visitsajid.com/ # Allow all search engines to access everything User-agent: * Disallow: # Sitemap file location Sitemap: https://visitsajid.com/sitemap.xml # Directives for specific crawlers User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: / # Block certain crawlers (if needed) User-agent: BadBot Disallow: / # Crawl-delay setting (adjust if needed based on your server capacity) Crawl-delay: 10 # Disallow access to sensitive directories (example) Disallow: /cgi-bin/ Disallow: /tmp/ Disallow: /private/ # Block access to specific file types (example) Disallow: /*.pdf$ Disallow: /*.doc$ Disallow: /*.xls$ # End of robots.txt