refactor(pages): unify code for page import
parent
77a984170c
commit
28d905a24c
@ -0,0 +1,37 @@
|
||||
from pathlib import Path
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from .models import Page
|
||||
|
||||
content_path = Path(__file__).resolve().parent / "default_content"
|
||||
|
||||
|
||||
def import_pages(force, pages):
|
||||
for file in content_path.iterdir():
|
||||
if pages and file.stem not in pages:
|
||||
continue
|
||||
|
||||
slug = file.stem
|
||||
p, created = Page.objects.get_or_create(url=slug)
|
||||
if not created and not force:
|
||||
continue
|
||||
|
||||
soup = BeautifulSoup(file.read_text(), "html.parser")
|
||||
|
||||
if soup.title:
|
||||
p.title = soup.title.string
|
||||
soup.title.decompose()
|
||||
else:
|
||||
p.title = slug.title()
|
||||
|
||||
if visible := soup.find("meta", attrs={"name": "visible"}):
|
||||
p.visible = "content" not in visible.attrs or visible.attrs[
|
||||
"content"
|
||||
].lower() in ("1", "true", "yes")
|
||||
visible.decompose()
|
||||
|
||||
p.content = str(soup).strip()
|
||||
p.save()
|
||||
|
||||
yield p
|
Loading…
Reference in New Issue