# This robots.txt file controls crawling of URLs under https://michaelerb1.blogspot.com.
# All crawlers are disallowed to crawl files in the "includes" directory, such
# as .css, .js, but Google needs them for rendering, so Googlebot is allowed
# to crawl them.
# Ban bots that don't benefit us.
# --------------------------------

User-agent: Nuclei
User-agent: WikiDo
User-agent: Riddler
User-agent: PetalBot
User-agent: Zoominfobot
User-agent: Go-http-client
User-agent: Node/simplecrawler
User-agent: CazoodleBot
User-agent: dotbot/1.0
User-agent: Gigabot
User-agent: Barkrowler
User-agent: BLEXBot
User-agent: magpie-crawler
Disallow: /

User-agent: *
Disallow: /includes/

User-agent: X
Disallow: Y

User-agent: Googlebot
Allow: /includes/

User-agent: Mediapartners-Google
Disallow:

User-agent: Googlebot-news
Allow: /

User-agent: Googlebot-Image
Disallow: /

User-agent: *
Disallow: /blog/$

User-agent: *
Allow: /ads.txt
Disallow: /ads

Sitemap: https://michaelerb1.blogspot.com/

Sitemap: https://michaelerb1.blogspot.com/sitemap.xml

Sitemap: https://michaelerb1.blogspot.com/feeds/posts/default?orderby=UPDATED

Sitemap: https://michaelerb1.blogspot.com/robots.txt

Sitemap: https://michaelerb1.blogspot.com/sitemap-pages.xml

Sitemap: https://michaelerb1.blogspot.com/atom.xml?redirect=false&start-index=1&max-results=500