Robots.txt for brainly.com

        #Brainly Robots.txt 31.07.2017

User-agent: *
Allow: /ads.txt
Disallow: /advertisements/gift_clicks
Disallow: /app/ask?*
Disallow: /buddies/invite/
Disallow: /buddies_new/invite/
Disallow: /cdn-cgi/l/email-protection
Disallow: /login?*
Disallow: /question/add?*
Disallow: /signup?*
Disallow: /tasks/prev_task/
Disallow: /tasks/next_task/
Disallow: /tasks/latex/
Disallow: /tasks/solve_dynamic/
Disallow: /users/thank/
Disallow: /users/view_awards

Disallow: /api
Disallow: /bff
Disallow: /experts

User-agent: Yandex

Disallow: /advertisements/gift_clicks
Disallow: /app/ask?*
Disallow: /buddies/invite/
Disallow: /buddies_new/invite/
Disallow: /cdn-cgi/l/email-protection
Disallow: /login?*
Disallow: /question/add?*
Disallow: /signup?*
Disallow: /tasks/prev_task/
Disallow: /tasks/next_task/
Disallow: /tasks/latex/
Disallow: /tasks/solve_dynamic/
Disallow: /users/thank/
Disallow: /users/view_awards/

Clean-param: source
Clean-param: return
Clean-param: entry
Host: https://brainly.com
# Disallow Marketing bots

User-agent: SemrushBot-SA
Disallow: / #Semrush
Allow: /ads.txt

User-agent: SemrushBot
Disallow: / #Semrush
Allow: /ads.txt

User-agent: rogerbot
Disallow: / #MOZ
Allow: /ads.txt

User-agent: dotbot
Disallow: / #MOZ
Allow: /ads.txt

User-agent: BLEXBot
Disallow: / #Webmeup.com
Allow: /ads.txt

User-agent: spbot
Disallow: / #Openlinkprofiler
Allow: /ads.txt

user-agent: SEOdiver
disallow: / #SEOdiver
Allow: /ads.txt

User-agent: dataprovider
Disallow: / #DataProvider.com
Allow: /ads.txt

User-agent: magpie-crawler
Disallow: / #BrandWatch.com
Allow: /ads.txt

User-agent: GetIntent Crawler
Disallow: /
Allow: /ads.txt

User-agent: grapeshot
Disallow:

User-agent: Doubleverify
Disallow:

User-agent: White ops
Disallow:

User-agent: moatbot
Disallow:

User-agent: ias_crawler
Disallow:

User-agent: forensiq
Disallow:

User-agent: duckduckbot
Disallow:

User-agent: Leikibot
Disallow:

User-agent: Baidu-YunGuanCe-ScanBot(ce.baidu.com)
Disallow:

User-agent: Baidu-YunGuanCe-SLABot(ce.baidu.com)
Disallow:

User-agent: Baidu-YunGuanCe-PerfBot(ce.baidu.com)
Disallow:

User-agent: Baidu-YunGuanCe-VSBot(ce.baidu.com)
Disallow:

#Disallow exotic search engine crawlers

User-agent: SeznamBot
Disallow: /
Allow: /ads.txt

User-agent: Sogou web spider
Disallow: /
Allow: /ads.txt

User-agent: 360spider
Disallow: /
Allow: /ads.txt

User-agent: Baiduspider
Disallow: /
Allow: /ads.txt

User-agent: NaverBot
Disallow: /
Allow: /ads.txt

User-agent: Yeti
Disallow: /
Allow: /ads.txt

User-agent: coccocbot-web
Disallow: /
Allow: /ads.txt

User-agent: Qwantify
Disallow: /
Allow: /ads.txt

User-agent: Exabot
Disallow: /
Allow: /ads.txt

#Disallow other crawlers

User-agent: Linguee
Disallow: / #Language tool
Allow: /ads.txt

User-agent: SurdotlyBot
Disallow: / #Sur.ly
Allow: /ads.txt

User-agent: BUbiNG
Disallow: / #Bubing academic crawler
Allow: /ads.txt

# Good bots whitelisting:

User-agent: Twitterbot
Disallow:

User-agent: Mediapartners-Google
Disallow:

User-agent: Facebot
Disallow:

#Other bots
#Neticle Crawler v1.0 ( http://bot.neticle.hu/ ) https://bot.neticle.hu/ - brand monitoring
#Mega https://megaindex.com/crawler - link indexer tool (supports directives in user-agent:*)
#Obot - IBM X-Force service
#SafeDNSBot (https://www.safedns.com/searchbot)

User-agent: Applebot-Extended
Disallow: /

User-agent: GPTBot
Disallow: /