Browse Source

Adiciona Robots.txt

Versão inicial do arquivo robots.txt

Adiciona ulr robots.txt

Adiciona teste para robots.txt
pull/3248/head
Vinícius Cantuária 5 years ago
parent
commit
e1e1acdf75
  1. 36
      sapl/templates/robots.txt
  2. 15
      sapl/test_urls.py
  3. 6
      sapl/urls.py

36
sapl/templates/robots.txt

@ -0,0 +1,36 @@
User-agent: semrushbot
Disallow: /
User-agent: BLEXBot
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: AhrefsBot
Disallow: /
User-agent: spbot
Disallow: /
User-agent: dotbot
Disallow: /
User-agent: ahrefsbot
Disallow: /
User-agent: mojeekbot
Disallow: /
User-agent: yandex
Disallow: /
User-agent: baiduspider
Disallow: /
User-agent: *
# Crawl Delay 60 seg.
Crawl-delay: 60
Disallow: /materia/docacessorio/pdf/*
Disallow: /materia/docacessorio/zip/*
Disallow: /private/

15
sapl/test_urls.py

@ -20,7 +20,6 @@ _lista_urls = lista_urls()
def create_perms_post_migrate(sapl_app_config):
searched_perms = list()
# The codenames and ctypes that should exist.
ctypes = set()
@ -74,7 +73,6 @@ btn_login = ('<input class="btn btn-success btn-sm" '
@pytest.mark.parametrize('url_item', _lista_urls)
def test_crudaux_formato_inicio_urls_associadas(url_item):
# Verifica se um crud é do tipo CrudAux, se sim, sua url deve começar
# com /sistema/
key, url, var, app_name = url_item
@ -107,7 +105,6 @@ def test_crudaux_formato_inicio_urls_associadas(url_item):
@pytest.mark.parametrize('url_item', _lista_urls)
def test_crudaux_list_do_crud_esta_na_pagina_sistema(url_item, admin_client):
# Verifica a url é de um CrudAux e, se for, testa se está
# na página Tabelas Auxiliares
key, url, var, app_name = url_item
@ -264,7 +261,6 @@ apps_url_patterns_prefixs_and_users = {
@pytest.mark.parametrize('url_item', _lista_urls)
def test_urlpatterns(url_item, admin_client):
key, url, var, app_name = url_item
url = '/' + (url % {v: 1 for v in var})
@ -395,7 +391,6 @@ for item in _lista_urls:
@pytest.mark.django_db(transaction=False)
@pytest.mark.parametrize('url_item', _lista_urls)
def test_permissions_urls_for_users_by_apps(url_item, client):
# username, url_item = request_com_oper_na_url
key, url, var, app_name = url_item
@ -559,3 +554,13 @@ def test_permissions_urls_for_users_by_apps(url_item, client):
if url.startswith(pr):
_assert_login(False)
break
def test_robots_txt_get(admin_client):
response = admin_client.get("/robots.txt")
assert response.status_code == 200
assert response["content-type"] == "text/plain"
lines = response.content.decode().splitlines()
assert lines[0] == "User-agent: semrushbot"

6
sapl/urls.py

@ -17,6 +17,7 @@ from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path
from django.views.generic.base import RedirectView, TemplateView
from django.views.static import serve as view_static_server
@ -59,10 +60,11 @@ urlpatterns = [
url(r'', include(sapl.api.urls)),
url(r'^favicon\.ico$', RedirectView.as_view(
url='/static/sapl/img/favicon.ico', permanent=True)),
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/sapl/img/favicon.ico', permanent=True)),
url(r'', include(sapl.redireciona_urls.urls)),
path("robots.txt", TemplateView.as_view(template_name="robots.txt", content_type="text/plain")),
]

Loading…
Cancel
Save