debian-mirror-gitlab/public/robots.txt
2020-10-24 23:57:45 +05:30

65 lines
1.7 KiB
Text

# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-Agent: *
# Disallow: /
# Add a 1 second delay between successive requests to the same server, limits resources used by crawler
# Only some crawlers respect this setting, e.g. Googlebot does not
# Crawl-delay: 1
# Based on details in https://gitlab.com/gitlab-org/gitlab/blob/master/config/routes.rb,
# https://gitlab.com/gitlab-org/gitlab/blob/master/spec/routing, and using application
# Global routes
User-Agent: *
Disallow: /autocomplete/users
Disallow: /search
Disallow: /admin
Disallow: /profile
Disallow: /dashboard
Disallow: /users
Disallow: /help
Disallow: /s/
# Only specifically allow the Sign In page to avoid very ugly search results
Allow: /users/sign_in
# Generic resource routes like new, edit, raw
# This will block routes like:
# - /projects/new
# - /gitlab-org/gitlab-foss/issues/123/-/edit
User-Agent: *
Disallow: /*/new
Disallow: /*/edit
Disallow: /*/raw
# Group details
User-Agent: *
Disallow: /groups/*/analytics
Disallow: /groups/*/contribution_analytics
Disallow: /groups/*/group_members
# Project details
User-Agent: *
Disallow: /*/*.git
Disallow: /*/archive/
Disallow: /*/repository/archive*
Disallow: /*/activity
Disallow: /*/blame
Disallow: /*/commits
Disallow: /*/commit
Disallow: /*/commit/*.patch
Disallow: /*/commit/*.diff
Disallow: /*/compare
Disallow: /*/network
Disallow: /*/graphs
Disallow: /*/merge_requests/*.patch
Disallow: /*/merge_requests/*.diff
Disallow: /*/merge_requests/*/diffs
Disallow: /*/deploy_keys
Disallow: /*/hooks
Disallow: /*/services
Disallow: /*/protected_branches
Disallow: /*/uploads/
Disallow: /*/project_members
Disallow: /*/settings