# llm.txt for ProgComm # AI / LLM crawler access policy # Last updated: 2025-08-25 # Contact: info@progcomm.com # Website: https://progcomm.ai [site] name = "ProgComm" url = "https://progcomm.ai" description = "AI-Driven B2B Commerce Platform with Embedded Finance" contact = "info@progcomm.com" license = "All rights reserved. AI training/fine-tuning requires prior written consent." [structured-data] # Priority structured content for LLMs urls = [ "https://progcomm.ai/", "https://progcomm.ai/og.png", "https://progcomm.ai/ProgCommHeadLogo.png", "https://progcomm.ai/site.webmanifest", "https://progcomm.ai/ai.txt", "https://progcomm.ai/sitemap.xml" ] [policy] allow_indexing = true allow_snippets = true require_attribution_link = true allow_training = false allow_dataset_redistribution = false [disallowed-paths] # Do not use these for model training or dataset building paths = [ "/api", "/dashboard", "/admin", "/private", "/static/export" ] [crawlers] # Major AI crawlers and their permissions "GPTBot" = "Disallow: training; Allow: indexing for answer retrieval" "GPTBot-Image" = "Disallow: training" "CCBot" = "Disallow: training" "Claude-Web" = "Disallow: training" "Anthropic-ai" = "Disallow: training" "Google-Extended" = "Allow: snippets only; Disallow: training" "AppleBot-Extended" = "Allow: snippets only; Disallow: training" "PerplexityBot" = "Disallow: training; Allow: metadata/snippets" "YouBot" = "Disallow: training" "Amazonbot" = "Disallow: training" "FacebookBot" = "Disallow: training" "Bingbot" = "Allow: indexing/snippets" [enforcement] violation_contact = "info@progcomm.com" # If you operate a crawler, cache, or LLM using our content beyond the allowances above, # you must cease such activity upon notice.