Free tools

Robots.txt Examples

Robots.txt file content for letterboxd.com.

Robot.txt file for: letterboxd.com

      # Dark Visitors robots.txt from https://darkvisitors.com/robots-txt-builder

# AI Data Scraper
# https://darkvisitors.com/agents/anthropic-ai

User-agent: anthropic-ai
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/bytespider

User-agent: Bytespider
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/ccbot

User-agent: CCBot
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/diffbot

User-agent: Diffbot
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/facebookbot

User-agent: FacebookBot
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/google-extended

User-agent: Google-Extended
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/gptbot

User-agent: GPTBot
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/omgili

User-agent: omgili
Disallow: /

# AI Data Scraper
# https://darkvisitors.com/agents/applebot-extended

User-agent: Applebot-Extended
Disallow: /

# All other crawlers
User-agent: *
Disallow: /*/by/*                     # sorting options
Disallow: /*/popular/this/*           # popularity sorting options
Disallow: /*/on/*                     # availability options
Disallow: /*/tag/*                    # Members? tag lists
Disallow: /*/genre/*                  # Films by genre
Disallow: /*/country/*                # Films by country
Disallow: /*/language/*               # Films by language
Disallow: /*/decade/*                 # Films by decade
Disallow: /films/year/*               # Films by year
Disallow: /films/*/year/*             # Films by year
Disallow: /films/*/size/large/*       # Films with large posters (and therefore stats)
Disallow: /*/friends/*                # stuff grouped for users' friends