Динамический sitemap.xml, robots.txt

main
Сергей Ванюшкин 2023-10-03 23:54:00 +03:00
parent 03cf3ffd19
commit ecf383f020
9 changed files with 121 additions and 3 deletions

View File

@ -93,6 +93,10 @@ def create_app(test_config=None):
app.register_blueprint(bp_errors)
from pyproger.robots.robots import bp as bp_robots
app.register_blueprint(bp_robots)
@security.context_processor
def security_context_processor():
return dict(

View File

@ -45,4 +45,4 @@ CKEDITOR_SERVE_LOCAL = True
CKEDITOR_ENABLE_CODESNIPPET = True
CKEDITOR_CODE_THEME = "monokai_sublime"
# Настройки блога
POSTS_ON_PAGE = 6
POSTS_ON_PAGE = 2

View File

@ -1,3 +1,7 @@
from datetime import datetime, timezone
from sqlalchemy import func
from . import db
from .models import Page, Post, Tag, User
@ -55,3 +59,21 @@ def get_page(slug):
def get_menu_items():
menu_items = db.session.query(Page.name, Page.slug).all()
return menu_items
def get_posts_for_sitemap():
posts = (
db.session.query(
Post.slug,
Post.update_datetime,
)
.filter(Post.published.is_(True))
.all()
)
return posts
def get_pages_for_sitemap():
pages = db.session.query(Page.slug).all()
date = datetime.utcnow().strftime("%Y-%m-%d")
return pages, date

View File

@ -2,6 +2,7 @@ import datetime
from flask_security.models import fsqla
from sqlalchemy import Boolean, Column, DateTime, Integer, String, Text
from sqlalchemy.sql import func
from . import db
@ -78,12 +79,13 @@ class Post(db.Model):
create_datetime = Column(
DateTime(),
nullable=True,
default=datetime.datetime.utcnow(),
default=func.now(),
)
update_datetime = Column(
DateTime(),
nullable=True,
onupdate=datetime.datetime.utcnow(),
default=func.now(),
onupdate=func.now(),
)
text = Column(Text)

View File

10
pyproger/robots/robots.py Normal file
View File

@ -0,0 +1,10 @@
from flask import Blueprint
bp = Blueprint(
"bp_robots",
__name__,
template_folder="templates/robots",
static_folder="static",
)
from . import urls

31
pyproger/robots/urls.py Normal file
View File

@ -0,0 +1,31 @@
from flask import make_response, render_template
from sqlalchemy.sql import func
from pyproger.dbase.database import get_pages_for_sitemap, get_posts_for_sitemap
from .robots import bp
@bp.route("/sitemap.xml", methods=["GET"])
def sitemap_xml():
sm_posts = get_posts_for_sitemap()
sm_pages, date = get_pages_for_sitemap()
sm_render = render_template(
"robots/sitemap.xml",
sm_pages=sm_pages,
sm_posts=sm_posts,
date=date,
)
response = make_response(sm_render)
response.headers["Content-Type"] = "application/rss+xml"
response.mimetype = "application/xml"
return response
@bp.route("/robots.txt", methods=["GET"])
def robots_txt():
rt_render = render_template("robots/robots.txt")
response = make_response(rt_render)
response.headers["Content-Type"] = "text/plain; charset=utf-8"
response.mimetype = "text/plain"
return response

View File

@ -0,0 +1,37 @@
User-agent: Yandex
Disallow: /?
Disallow: /admin/
Disallow: /static/
Disallow: /post/$
Disallow: /tag/
Disallow: /tags/
Allow: /static/*.css
Allow: /static/*.js
Allow: /static/*.png
Allow: /static/*.jpg
Allow: /static/*.gif
User-agent: GoogleBot
Disallow: /?
Disallow: /admin/
Disallow: /static/
Disallow: /post/$
Disallow: /tag/
Disallow: /tags/
Allow: /static/*.css
Allow: /static/*.js
Allow: /static/*.png
Allow: /static/*.jpg
Allow: /static/*.gif
User-agent: *
Disallow: /?
Disallow: /admin/
Disallow: /static/
Disallow: /post/$
Disallow: /tag/
Disallow: /tags/
Crawl-Delay: 5
Sitemap: https://pyproger.ru/sitemap.xml
Host: https://pyproger.ru

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="https://www.sitemaps.org/schemas/sitemap/0.9">
{% for p in sm_posts %}
<url>
<loc>http://www.pyproger.ru/post/{{ p.slug }}</loc>
<lastmod>{{ p.update_datetime.strftime('%Y-%m-%d') }}</lastmod>
<changefreq>daily</changefreq>
<priority>1.0</priority>
</url>
{% endfor %}
</urlset>