mirror of
https://github.com/vale981/ablog
synced 2025-03-04 16:51:39 -05:00
Overhaul of package underneath for python3 only
This commit is contained in:
parent
4e9041f0bb
commit
2e3dad57ef
31 changed files with 1591 additions and 1104 deletions
|
@ -12,20 +12,6 @@ docs-install: &docs-install
|
||||||
version: 2
|
version: 2
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
egg-info-27:
|
|
||||||
docker:
|
|
||||||
- image: circleci/python:2.7
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: python setup.py egg_info
|
|
||||||
|
|
||||||
egg-info-35:
|
|
||||||
docker:
|
|
||||||
- image: circleci/python:3.5
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: python setup.py egg_info
|
|
||||||
|
|
||||||
egg-info-36:
|
egg-info-36:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/python:3.6
|
- image: circleci/python:3.6
|
||||||
|
@ -40,6 +26,31 @@ jobs:
|
||||||
- checkout
|
- checkout
|
||||||
- run: python setup.py egg_info
|
- run: python setup.py egg_info
|
||||||
|
|
||||||
|
twine-check:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.7
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- run: python setup.py sdist
|
||||||
|
- run: python -m pip install -U --user --force-reinstall twine
|
||||||
|
- run: python -m twine check dist/*
|
||||||
|
|
||||||
|
pip-install:
|
||||||
|
docker:
|
||||||
|
- image: continuumio/miniconda3
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- run: *apt-install
|
||||||
|
- run:
|
||||||
|
name: Update pip
|
||||||
|
command: pip install -U pip
|
||||||
|
- run:
|
||||||
|
name: Install Ablog
|
||||||
|
command: |
|
||||||
|
pip install --progress-bar off .[all]
|
||||||
|
pip install -e .[all]
|
||||||
|
python setup.py develop
|
||||||
|
|
||||||
html-docs:
|
html-docs:
|
||||||
docker:
|
docker:
|
||||||
- image: continuumio/miniconda3
|
- image: continuumio/miniconda3
|
||||||
|
@ -48,6 +59,7 @@ jobs:
|
||||||
- checkout
|
- checkout
|
||||||
- run: *apt-install
|
- run: *apt-install
|
||||||
- run: *docs-install
|
- run: *docs-install
|
||||||
|
- run: pip install -e .[all]
|
||||||
- run: python setup.py build_sphinx
|
- run: python setup.py build_sphinx
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: build/sphinx/html/
|
path: build/sphinx/html/
|
||||||
|
@ -61,11 +73,14 @@ workflows:
|
||||||
|
|
||||||
egg-info:
|
egg-info:
|
||||||
jobs:
|
jobs:
|
||||||
- egg-info-27
|
|
||||||
- egg-info-35
|
|
||||||
- egg-info-36
|
- egg-info-36
|
||||||
- egg-info-37
|
- egg-info-37
|
||||||
|
|
||||||
|
misc:
|
||||||
|
jobs:
|
||||||
|
- twine-check
|
||||||
|
- pip-install
|
||||||
|
|
||||||
documentation:
|
documentation:
|
||||||
jobs:
|
jobs:
|
||||||
- html-docs
|
- html-docs
|
||||||
|
|
26
.github/workflows/release.yml
vendored
Normal file
26
.github/workflows/release.yml
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
on: release
|
||||||
|
name: Release to PyPi
|
||||||
|
jobs:
|
||||||
|
tag-filter:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@master
|
||||||
|
- name: tag-filter
|
||||||
|
uses: actions/bin/filter@master
|
||||||
|
with:
|
||||||
|
args: tag
|
||||||
|
- name: check
|
||||||
|
uses: ross/python-actions/setup-py/3.7@master
|
||||||
|
with:
|
||||||
|
args: check
|
||||||
|
- name: sdist
|
||||||
|
uses: ross/python-actions/setup-py/3.7@master
|
||||||
|
with:
|
||||||
|
args: sdist
|
||||||
|
- name: upload
|
||||||
|
uses: ross/python-actions/twine@master
|
||||||
|
env:
|
||||||
|
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||||
|
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||||
|
with:
|
||||||
|
args: upload ./dist/sunpy-sphinx-theme-*.tar.gz
|
221
.gitignore
vendored
221
.gitignore
vendored
|
@ -1,23 +1,228 @@
|
||||||
|
### Python: https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore
|
||||||
|
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
MANIFEST
|
*$py.class
|
||||||
docs/manual/.ipynb_checkpoints/*
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
pip-wheel-metadata/
|
||||||
build/
|
build/
|
||||||
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
demo/
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
junit/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
# Sphinx documentation
|
# Sphinx documentation
|
||||||
docs/_*
|
docs/_build/
|
||||||
docs/.*
|
|
||||||
docs/tmp/
|
# PyBuilder
|
||||||
docs/api/
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# celery beat schedule file
|
||||||
|
celerybeat-schedule
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
|
||||||
|
### https://raw.github.com/github/gitignore/master/Global/OSX.gitignore
|
||||||
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
.AppleDouble
|
||||||
|
.LSOverride
|
||||||
|
|
||||||
|
# Icon must ends with two \r.
|
||||||
|
Icon
|
||||||
|
|
||||||
|
|
||||||
|
# Thumbnails
|
||||||
|
._*
|
||||||
|
|
||||||
|
# Files that might appear on external disk
|
||||||
|
.Spotlight-V100
|
||||||
|
.Trashes
|
||||||
|
|
||||||
|
### Linux: https://raw.githubusercontent.com/github/gitignore/master/Global/Linux.gitignore
|
||||||
|
|
||||||
*~
|
*~
|
||||||
.vscode
|
|
||||||
|
# temporary files which can be created if a process still has a handle open of a deleted file
|
||||||
|
.fuse_hidden*
|
||||||
|
|
||||||
|
# KDE directory preferences
|
||||||
|
.directory
|
||||||
|
|
||||||
|
# Linux trash folder which might appear on any partition or disk
|
||||||
|
.Trash-*
|
||||||
|
|
||||||
|
# .nfs files are created when an open file is removed but is still being accessed
|
||||||
|
.nfs*
|
||||||
|
|
||||||
|
### MacOS: https://raw.githubusercontent.com/github/gitignore/master/Global/macOS.gitignore
|
||||||
|
|
||||||
|
# General
|
||||||
|
.DS_Store
|
||||||
|
.AppleDouble
|
||||||
|
.LSOverride
|
||||||
|
|
||||||
|
# Icon must end with two \r
|
||||||
|
Icon
|
||||||
|
|
||||||
|
|
||||||
|
# Thumbnails
|
||||||
|
._*
|
||||||
|
|
||||||
|
# Files that might appear in the root of a volume
|
||||||
|
.DocumentRevisions-V100
|
||||||
|
.fseventsd
|
||||||
|
.Spotlight-V100
|
||||||
|
.TemporaryItems
|
||||||
|
.Trashes
|
||||||
|
.VolumeIcon.icns
|
||||||
|
.com.apple.timemachine.donotpresent
|
||||||
|
|
||||||
|
# Directories potentially created on remote AFP share
|
||||||
|
.AppleDB
|
||||||
|
.AppleDesktop
|
||||||
|
Network Trash Folder
|
||||||
|
Temporary Items
|
||||||
|
.apdisk
|
||||||
|
|
||||||
|
### Windows: https://raw.githubusercontent.com/github/gitignore/master/Global/Windows.gitignore
|
||||||
|
|
||||||
|
# Windows thumbnail cache files
|
||||||
|
Thumbs.db
|
||||||
|
ehthumbs.db
|
||||||
|
ehthumbs_vista.db
|
||||||
|
|
||||||
|
# Dump file
|
||||||
|
*.stackdump
|
||||||
|
|
||||||
|
# Folder config file
|
||||||
|
[Dd]esktop.ini
|
||||||
|
|
||||||
|
# Recycle Bin used on file shares
|
||||||
|
$RECYCLE.BIN/
|
||||||
|
|
||||||
|
# Windows Installer files
|
||||||
|
*.cab
|
||||||
|
*.msi
|
||||||
|
*.msix
|
||||||
|
*.msm
|
||||||
|
*.msp
|
||||||
|
|
||||||
|
# Windows shortcuts
|
||||||
|
*.lnk
|
||||||
|
|
||||||
|
### VScode: https://raw.githubusercontent.com/github/gitignore/master/Global/VisualStudioCode.gitignore
|
||||||
|
.vscode/*
|
||||||
|
|
||||||
|
### Extra Python Items and SunPy Specific
|
||||||
|
.hypothesis
|
||||||
|
.pytest_cache
|
||||||
|
sunpydata.sqlite
|
||||||
|
sunpydata.sqlite-journal
|
||||||
|
sunpy/_compiler.c
|
||||||
|
sunpy/cython_version.py
|
||||||
|
docs/_build
|
||||||
|
docs/generated
|
||||||
|
docs/api/
|
||||||
|
docs/whatsnew/latest_changelog.txt
|
||||||
|
examples/**/*.asdf
|
||||||
|
# This is incase you run the figure tests
|
||||||
|
figure_test_images*
|
||||||
|
tags
|
||||||
|
|
||||||
|
### Pycharm(?)
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Release script
|
||||||
|
.github_cache
|
||||||
|
|
||||||
|
docs/_build/
|
||||||
|
docs/.doctrees/
|
||||||
|
docs/_website/
|
||||||
|
docs/_latex/
|
||||||
test/
|
test/
|
||||||
|
|
17
.travis.yml
17
.travis.yml
|
@ -11,18 +11,11 @@ sudo: false
|
||||||
|
|
||||||
env:
|
env:
|
||||||
matrix:
|
matrix:
|
||||||
- PYTHON_VERSION=2.7 SPHINX_VERSION=1.6
|
- PYTHON_VERSION=3.6 SPHINX_VERSION=2.0
|
||||||
- PYTHON_VERSION=2.7 SPHINX_VERSION=1.8
|
- PYTHON_VERSION=3.6 SPHINX_VERSION=2.1
|
||||||
|
|
||||||
- PYTHON_VERSION=3.5 SPHINX_VERSION=1.6
|
- PYTHON_VERSION=3.7 SPHINX_VERSION=2.0
|
||||||
- PYTHON_VERSION=3.5 SPHINX_VERSION=1.8
|
- PYTHON_VERSION=3.7 SPHINX_VERSION=2.1
|
||||||
|
|
||||||
- PYTHON_VERSION=3.6 SPHINX_VERSION=1.6
|
|
||||||
- PYTHON_VERSION=3.6 SPHINX_VERSION=1.8
|
|
||||||
|
|
||||||
- PYTHON_VERSION=3.7 SPHINX_VERSION=1.6
|
|
||||||
- PYTHON_VERSION=3.7 SPHINX_VERSION=1.7
|
|
||||||
- PYTHON_VERSION=3.7 SPHINX_VERSION=1.8
|
|
||||||
|
|
||||||
global:
|
global:
|
||||||
- LOCALE=default
|
- LOCALE=default
|
||||||
|
@ -33,7 +26,7 @@ env:
|
||||||
install:
|
install:
|
||||||
- git clone git://github.com/astropy/ci-helpers.git
|
- git clone git://github.com/astropy/ci-helpers.git
|
||||||
- source ci-helpers/travis/setup_conda.sh
|
- source ci-helpers/travis/setup_conda.sh
|
||||||
- pip install -e .
|
- pip install -e .[all]
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- make tests
|
- make tests
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
include README.rst
|
include README.rst
|
||||||
include LICENSE.rst
|
include LICENSE.rst
|
||||||
include setup.py
|
include setup.py
|
||||||
|
include setup.cfg
|
||||||
|
include pyproject.toml
|
||||||
include ablog/*.py
|
include ablog/*.py
|
||||||
include ablog/templates/*.html
|
include ablog/templates/*.html
|
||||||
include ablog/locale/sphinx.pot
|
include ablog/locale/sphinx.pot
|
||||||
|
|
6
Makefile
6
Makefile
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
demo:
|
demo:
|
||||||
rm -rf demo
|
rm -rf demo
|
||||||
printf "demo\nABlog\nABlog Team\nhttp://ablog.readthedocs.org" | ablog start
|
printf "demo\nABlog\nABlog Team\nhttps://ablog.readthedocs.org" | ablog start
|
||||||
|
|
||||||
rebuild:
|
rebuild:
|
||||||
cd docs; watchmedo shell-command --patterns='*.rst' --command='ablog build' --recursive
|
cd docs; watchmedo shell-command --patterns='*.rst' --command='ablog build' --recursive
|
||||||
|
@ -20,9 +20,9 @@ test3:
|
||||||
cd docs; ablog build -T -b pickle
|
cd docs; ablog build -T -b pickle
|
||||||
|
|
||||||
test4:
|
test4:
|
||||||
mkdir -p test; cd test; printf "\nABlog\nABlog Team\nhttp://ablog.readthedocs.org" | ablog start; ablog build
|
mkdir -p test; cd test; printf "\nABlog\nABlog Team\nhttps://ablog.readthedocs.org" | ablog start; ablog build
|
||||||
|
|
||||||
test5:
|
test5:
|
||||||
mkdir -p test; cd test; printf "ablog\nABlog\nABlog Team\nhttp://ablog.readthedocs.org" | ablog start; cd ablog; ablog build
|
mkdir -p test; cd test; printf "ablog\nABlog\nABlog Team\nhttps://ablog.readthedocs.org" | ablog start; cd ablog; ablog build
|
||||||
|
|
||||||
tests: test test1 test2 test3 test4 test5
|
tests: test test1 test2 test3 test4 test5
|
||||||
|
|
54
README.rst
54
README.rst
|
@ -12,7 +12,8 @@ ABlog for Sphinx
|
||||||
Note
|
Note
|
||||||
----
|
----
|
||||||
|
|
||||||
Please note that is an official new home of `Ahmet Bakan's Ablog Sphinx extension <https://github.com/abakan/ablog/>`_. This version is maintined with the aim to keep it working for SunPy's website and thus new features are unlikely.
|
Please note that is an official new home of `Ahmet Bakan's Ablog Sphinx extension <https://github.com/abakan/ablog/>`__.
|
||||||
|
This version is maintined with the aim to keep it working for SunPy's website and thus new features are unlikely.
|
||||||
|
|
||||||
ABlog
|
ABlog
|
||||||
-----
|
-----
|
||||||
|
@ -27,15 +28,13 @@ ABlog is a Sphinx extension that converts any documentation or personal website
|
||||||
* `Easy GitHub Pages deploys`_
|
* `Easy GitHub Pages deploys`_
|
||||||
* `Jupiter Notebook Support for blog posts`_
|
* `Jupiter Notebook Support for blog posts`_
|
||||||
|
|
||||||
.. _Atom feeds: http://ablog.readthedocs.org/blog/atom.xml
|
.. _Atom feeds: https://ablog.readthedocs.org/blog/atom.xml
|
||||||
.. _Archive pages: http://ablog.readthedocs.org/blog/
|
.. _Archive pages: https://ablog.readthedocs.org/blog/
|
||||||
.. _Blog sidebars: http://ablog.readthedocs.org/manual/ablog-configuration-options/#sidebars
|
.. _Blog sidebars: https://ablog.readthedocs.org/manual/ablog-configuration-options/#sidebars
|
||||||
.. _Disqus integration: http://ablog.readthedocs.org/manual/ablog-configuration-options/#disqus-integration
|
.. _Disqus integration: https://ablog.readthedocs.org/manual/ablog-configuration-options/#disqus-integration
|
||||||
.. _Font-Awesome integration: http://ablog.readthedocs.org/manual/ablog-configuration-options/#fa
|
.. _Font-Awesome integration: https://ablog.readthedocs.org/manual/ablog-configuration-options/#fa
|
||||||
.. _Easy GitHub Pages deploys: http://ablog.readthedocs.org/manual/deploy-to-github-pages/
|
.. _Easy GitHub Pages deploys: https://ablog.readthedocs.org/manual/deploy-to-github-pages/
|
||||||
.. _Jupiter Notebook Support for blog posts: http://ablog.readthedocs.org/manual/notebook_support/
|
.. _Jupiter Notebook Support for blog posts: https://ablog.readthedocs.org/manual/notebook_support/
|
||||||
|
|
||||||
.. _installation:
|
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
------------
|
------------
|
||||||
|
@ -49,13 +48,13 @@ or anaconda_::
|
||||||
conda config --add channels conda-forge
|
conda config --add channels conda-forge
|
||||||
conda install ablog
|
conda install ablog
|
||||||
|
|
||||||
This will also install `Sphinx <http://sphinx-doc.org/>`_, Alabaster_, Werkzeug_, and Invoke_ respectively required for building your website, making it look good, generating feeds, and running deploy commands.
|
This will also install `Sphinx <http://sphinx-doc.org/>`__, Alabaster_, Werkzeug_, and Invoke_ respectively required for building your website, making it look good, generating feeds, and running deploy commands.
|
||||||
|
|
||||||
.. _pip: https://pip.pypa.io
|
.. _pip: https://pip.pypa.io
|
||||||
.. _anaconda: https://www.anaconda.com/
|
.. _anaconda: https://www.anaconda.com/
|
||||||
.. _Werkzeug: http://werkzeug.pocoo.org/
|
.. _Werkzeug: https://werkzeug.pocoo.org/
|
||||||
.. _Alabaster: https://github.com/bitprophet/alabaster
|
.. _Alabaster: https://github.com/bitprophet/alabaster
|
||||||
.. _Invoke: http://www.pyinvoke.org/
|
.. _Invoke: https://www.pyinvoke.org/
|
||||||
|
|
||||||
Getting Started
|
Getting Started
|
||||||
---------------
|
---------------
|
||||||
|
@ -81,12 +80,12 @@ If you already have a project, enable blogging by making following changes in ``
|
||||||
# 2b. if `templates_path` is defined
|
# 2b. if `templates_path` is defined
|
||||||
templates_path.append(ablog.get_html_templates_path())
|
templates_path.append(ablog.get_html_templates_path())
|
||||||
|
|
||||||
.. _ABlog Quick Start: http://ablog.readthedocs.org/manual/ablog-quick-start
|
.. _ABlog Quick Start: https://ablog.readthedocs.org/manual/ablog-quick-start
|
||||||
|
|
||||||
How it works
|
How it works
|
||||||
------------
|
------------
|
||||||
|
|
||||||
If you are new to Sphinx_ and reStructuredText markup language, you might find `reStructuredText Primer`_ useful.
|
If you are new to Sphinx and reStructuredText markup language, you might find `reStructuredText Primer`_ useful.
|
||||||
Once you have content (in ``.rst`` files), you can post *any page* using the ``post`` directive as follows:
|
Once you have content (in ``.rst`` files), you can post *any page* using the ``post`` directive as follows:
|
||||||
|
|
||||||
.. code-block:: rst
|
.. code-block:: rst
|
||||||
|
@ -110,27 +109,4 @@ You can also include a list of posts using ``postlist`` directive:
|
||||||
:format: {title}
|
:format: {title}
|
||||||
:sort:
|
:sort:
|
||||||
|
|
||||||
For ABlog documentation, this converts to the following where you can find more about configuring and using ABlog:
|
|
||||||
|
|
||||||
.. postlist::
|
|
||||||
:category: Manual
|
|
||||||
:list-style: circle
|
|
||||||
:format: {title}
|
|
||||||
:sort:
|
|
||||||
|
|
||||||
|
|
||||||
.. _reStructuredText Primer: http://sphinx-doc.org/rest.html
|
.. _reStructuredText Primer: http://sphinx-doc.org/rest.html
|
||||||
|
|
||||||
.. only:: html
|
|
||||||
|
|
||||||
.. image:: https://secure.travis-ci.org/sunpy/ablog.png?branch=devel
|
|
||||||
:target: http://travis-ci.org/#!/sunpy/ablog
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/ablog/badge/?version=latest
|
|
||||||
:target: http://ablog.readthedocs.org/
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:hidden:
|
|
||||||
:glob:
|
|
||||||
|
|
||||||
*/*
|
|
||||||
|
|
|
@ -1,87 +1,103 @@
|
||||||
# -*- coding: utf-8 -*-
|
"""
|
||||||
"""ABlog for Sphinx"""
|
ABlog for Sphinx.
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .blog import Blog, CONFIG
|
from .blog import CONFIG, Blog
|
||||||
from .post import (PostDirective, PostListDirective, UpdateDirective,
|
from .post import (
|
||||||
UpdateNode, process_posts, process_postlist, purge_posts,
|
PostDirective,
|
||||||
generate_archive_pages, generate_atom_feeds,
|
PostListDirective,
|
||||||
missing_reference)
|
UpdateDirective,
|
||||||
|
UpdateNode,
|
||||||
|
generate_archive_pages,
|
||||||
|
generate_atom_feeds,
|
||||||
|
missing_reference,
|
||||||
|
process_postlist,
|
||||||
|
process_posts,
|
||||||
|
purge_posts,
|
||||||
|
)
|
||||||
|
from .version import version as __version__
|
||||||
|
|
||||||
__version__ = '0.9.5'
|
__all__ = ["setup"]
|
||||||
|
|
||||||
__all__ = ['setup']
|
|
||||||
|
|
||||||
|
|
||||||
def anchor(post):
|
def anchor(post):
|
||||||
"""Return anchor string for posts that arepage sections."""
|
"""
|
||||||
|
Return anchor string for posts that arepage sections.
|
||||||
|
"""
|
||||||
|
|
||||||
if post.section:
|
if post.section:
|
||||||
return '#' + post.section
|
return "#" + post.section
|
||||||
else:
|
else:
|
||||||
return ''
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def builder_support(builder):
|
def builder_support(builder):
|
||||||
"""Return True when builder is supported. Supported builders output in
|
"""
|
||||||
html format, but exclude `PickleHTMLBuilder` and `JSONHTMLBuilder`,
|
Return True when builder is supported.
|
||||||
which run into issues when serializing blog objects."""
|
|
||||||
|
|
||||||
if hasattr(builder, 'builder'):
|
Supported builders output in html format, but exclude
|
||||||
|
`PickleHTMLBuilder` and `JSONHTMLBuilder`, which run into issues
|
||||||
|
when serializing blog objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if hasattr(builder, "builder"):
|
||||||
builder = builder.builder
|
builder = builder.builder
|
||||||
|
|
||||||
not_supported = set(['json', 'pickle'])
|
not_supported = {"json", "pickle"}
|
||||||
return builder.format == 'html' and not builder.name in not_supported
|
return builder.format == "html" and not builder.name in not_supported
|
||||||
|
|
||||||
|
|
||||||
def html_page_context(app, pagename, templatename, context, doctree):
|
def html_page_context(app, pagename, templatename, context, doctree):
|
||||||
|
|
||||||
if builder_support(app):
|
if builder_support(app):
|
||||||
context['ablog'] = blog = Blog(app)
|
context["ablog"] = blog = Blog(app)
|
||||||
context['anchor'] = anchor
|
context["anchor"] = anchor
|
||||||
# following is already available for archive pages
|
# following is already available for archive pages
|
||||||
if blog.blog_baseurl and 'feed_path' not in context:
|
if blog.blog_baseurl and "feed_path" not in context:
|
||||||
context['feed_path'] = blog.blog_path
|
context["feed_path"] = blog.blog_path
|
||||||
context['feed_title'] = blog.blog_title
|
context["feed_title"] = blog.blog_title
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
"""Setup ABlog extension."""
|
"""
|
||||||
|
Setup ABlog extension.
|
||||||
|
"""
|
||||||
|
|
||||||
for args in CONFIG:
|
for args in CONFIG:
|
||||||
app.add_config_value(*args)
|
app.add_config_value(*args)
|
||||||
|
|
||||||
app.add_directive('post', PostDirective)
|
app.add_directive("post", PostDirective)
|
||||||
app.add_directive('postlist', PostListDirective)
|
app.add_directive("postlist", PostListDirective)
|
||||||
|
|
||||||
app.connect('doctree-read', process_posts)
|
app.connect("doctree-read", process_posts)
|
||||||
|
|
||||||
app.connect('env-purge-doc', purge_posts)
|
app.connect("env-purge-doc", purge_posts)
|
||||||
app.connect('doctree-resolved', process_postlist)
|
app.connect("doctree-resolved", process_postlist)
|
||||||
app.connect('missing-reference', missing_reference)
|
app.connect("missing-reference", missing_reference)
|
||||||
app.connect('html-collect-pages', generate_archive_pages)
|
app.connect("html-collect-pages", generate_archive_pages)
|
||||||
app.connect('html-collect-pages', generate_atom_feeds)
|
app.connect("html-collect-pages", generate_atom_feeds)
|
||||||
app.connect('html-page-context', html_page_context)
|
app.connect("html-page-context", html_page_context)
|
||||||
|
|
||||||
app.add_directive('update', UpdateDirective)
|
app.add_directive("update", UpdateDirective)
|
||||||
app.add_node(UpdateNode,
|
app.add_node(
|
||||||
html=(lambda s, n: s.visit_admonition(n),
|
UpdateNode,
|
||||||
lambda s, n: s.depart_admonition(n)),
|
html=(lambda s, n: s.visit_admonition(n), lambda s, n: s.depart_admonition(n)),
|
||||||
latex=(lambda s, n: s.visit_admonition(n),
|
latex=(lambda s, n: s.visit_admonition(n), lambda s, n: s.depart_admonition(n)),
|
||||||
lambda s, n: s.depart_admonition(n)),
|
)
|
||||||
)
|
|
||||||
|
|
||||||
pkgdir = os.path.abspath(os.path.dirname(__file__))
|
pkgdir = os.path.abspath(os.path.dirname(__file__))
|
||||||
locale_dir = os.path.join(pkgdir, 'locale')
|
locale_dir = os.path.join(pkgdir, "locale")
|
||||||
app.config.locale_dirs.append(locale_dir)
|
app.config.locale_dirs.append(locale_dir)
|
||||||
|
|
||||||
return {'version': __version__} # identifies the version of our extension
|
return {"version": __version__} # identifies the version of our extension
|
||||||
|
|
||||||
|
|
||||||
def get_html_templates_path():
|
def get_html_templates_path():
|
||||||
"""Return path to ABlog templates folder."""
|
"""
|
||||||
|
Return path to ABlog templates folder.
|
||||||
|
"""
|
||||||
|
|
||||||
pkgdir = os.path.abspath(os.path.dirname(__file__))
|
pkgdir = os.path.abspath(os.path.dirname(__file__))
|
||||||
return os.path.join(pkgdir, 'templates')
|
return os.path.join(pkgdir, "templates")
|
||||||
|
|
383
ablog/blog.py
383
ablog/blog.py
|
@ -1,26 +1,27 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""Classes for handling posts and archives."""
|
"""
|
||||||
|
Classes for handling posts and archives.
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import datetime as dtmod
|
import datetime as dtmod
|
||||||
try:
|
|
||||||
from urlparse import urljoin
|
|
||||||
except ImportError:
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from unicodedata import normalize
|
from unicodedata import normalize
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.io import StringOutput
|
from docutils.io import StringOutput
|
||||||
from docutils.utils import new_document
|
from docutils.utils import new_document
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.util.osutil import relative_uri
|
from sphinx.util.osutil import relative_uri
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urlparse import urljoin
|
||||||
|
except ImportError:
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 0):
|
if sys.version_info >= (3, 0):
|
||||||
text_type = str
|
text_type = str
|
||||||
re_flag = 0
|
re_flag = 0
|
||||||
|
@ -31,64 +32,61 @@ else:
|
||||||
text_type = unicode
|
text_type = unicode
|
||||||
re_flag = re.UNICODE
|
re_flag = re.UNICODE
|
||||||
|
|
||||||
__all__ = ['Blog', 'Post', 'Collection']
|
__all__ = ["Blog", "Post", "Collection"]
|
||||||
|
|
||||||
|
|
||||||
def slugify(string):
|
def slugify(string):
|
||||||
"""Slugify *s*."""
|
"""
|
||||||
|
Slugify *s*.
|
||||||
|
"""
|
||||||
|
|
||||||
string = text_type(string)
|
string = text_type(string)
|
||||||
string = normalize('NFKD', string)
|
string = normalize("NFKD", string)
|
||||||
|
|
||||||
if re_flag is None:
|
if re_flag is None:
|
||||||
string = re.sub(r'[^\w\s-]', '', string).strip().lower()
|
string = re.sub(r"[^\w\s-]", "", string).strip().lower()
|
||||||
return re.sub(r'[-\s]+', '-', string)
|
return re.sub(r"[-\s]+", "-", string)
|
||||||
else:
|
else:
|
||||||
string = re.sub(r'[^\w\s-]', '', string, flags=re_flag).strip().lower()
|
string = re.sub(r"[^\w\s-]", "", string, flags=re_flag).strip().lower()
|
||||||
return re.sub(r'[-\s]+', '-', string, flags=re_flag)
|
return re.sub(r"[-\s]+", "-", string, flags=re_flag)
|
||||||
|
|
||||||
|
|
||||||
def os_path_join(path, *paths):
|
def os_path_join(path, *paths):
|
||||||
|
|
||||||
return os.path.join(path, *paths).replace(os.path.sep, '/')
|
return os.path.join(path, *paths).replace(os.path.sep, "/")
|
||||||
|
|
||||||
|
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
CONFIG = [
|
CONFIG = [
|
||||||
# name, default, rebuild
|
# name, default, rebuild
|
||||||
('blog_path', 'blog', True),
|
("blog_path", "blog", True),
|
||||||
('blog_title', 'Blog', True),
|
("blog_title", "Blog", True),
|
||||||
('blog_baseurl', None, True),
|
("blog_baseurl", None, True),
|
||||||
('blog_archive_titles', None, False),
|
("blog_archive_titles", None, False),
|
||||||
|
("blog_feed_archives", False, True),
|
||||||
('blog_feed_archives', False, True),
|
("blog_feed_fulltext", False, True),
|
||||||
('blog_feed_fulltext', False, True),
|
("blog_feed_subtitle", None, True),
|
||||||
('blog_feed_subtitle', None, True),
|
("blog_feed_titles", None, False),
|
||||||
('blog_feed_titles', None, False),
|
("blog_feed_length", None, None),
|
||||||
('blog_feed_length', None, None),
|
("blog_authors", {}, True),
|
||||||
|
("blog_default_author", None, True),
|
||||||
('blog_authors', {}, True),
|
("blog_locations", {}, True),
|
||||||
('blog_default_author', None, True),
|
("blog_default_location", None, True),
|
||||||
('blog_locations', {}, True),
|
("blog_languages", {}, True),
|
||||||
('blog_default_location', None, True),
|
("blog_default_language", None, True),
|
||||||
('blog_languages', {}, True),
|
("fontawesome_link_cdn", None, True),
|
||||||
('blog_default_language', None, True),
|
("fontawesome_included", False, True),
|
||||||
|
("fontawesome_css_file", "", True),
|
||||||
('fontawesome_link_cdn', None, True),
|
("post_date_format", "%d %B %Y", True),
|
||||||
('fontawesome_included', False, True),
|
("post_date_format_short", "%d %B", True),
|
||||||
('fontawesome_css_file', '', True),
|
("post_auto_image", 0, True),
|
||||||
|
("post_auto_excerpt", 1, True),
|
||||||
('post_date_format', '%d %B %Y', True),
|
("post_auto_orphan", True, True),
|
||||||
('post_date_format_short', '%d %B', True),
|
("post_redirect_refresh", 5, True),
|
||||||
('post_auto_image', 0, True),
|
("post_always_section", False, True),
|
||||||
('post_auto_excerpt', 1, True),
|
("disqus_shortname", None, True),
|
||||||
('post_auto_orphan', True, True),
|
("disqus_drafts", False, True),
|
||||||
('post_redirect_refresh', 5, True),
|
("disqus_pages", False, True),
|
||||||
('post_always_section', False, True),
|
|
||||||
|
|
||||||
('disqus_shortname', None, True),
|
|
||||||
('disqus_drafts', False, True),
|
|
||||||
('disqus_pages', False, True),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -100,7 +98,7 @@ FUTURE = datetime(9999, 12, 31)
|
||||||
def revise_pending_xrefs(doctree, docname):
|
def revise_pending_xrefs(doctree, docname):
|
||||||
|
|
||||||
for node in doctree.traverse(addnodes.pending_xref):
|
for node in doctree.traverse(addnodes.pending_xref):
|
||||||
node['refdoc'] = docname
|
node["refdoc"] = docname
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -110,9 +108,12 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
def link_posts(posts):
|
def link_posts(posts):
|
||||||
"""Link posts after sorting them post by published date."""
|
"""
|
||||||
|
Link posts after sorting them post by published date.
|
||||||
|
"""
|
||||||
|
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
|
|
||||||
posts = filter(attrgetter("order"), posts)
|
posts = filter(attrgetter("order"), posts)
|
||||||
posts = sorted(posts)
|
posts = sorted(posts)
|
||||||
posts[0].prev = posts[-1].next = None
|
posts[0].prev = posts[-1].next = None
|
||||||
|
@ -124,7 +125,9 @@ def link_posts(posts):
|
||||||
|
|
||||||
class Blog(Container):
|
class Blog(Container):
|
||||||
|
|
||||||
"""Handle blog operations."""
|
"""
|
||||||
|
Handle blog operations.
|
||||||
|
"""
|
||||||
|
|
||||||
# using a shared state
|
# using a shared state
|
||||||
_dict = {}
|
_dict = {}
|
||||||
|
@ -136,7 +139,9 @@ class Blog(Container):
|
||||||
self._init(app)
|
self._init(app)
|
||||||
|
|
||||||
def _init(self, app):
|
def _init(self, app):
|
||||||
"""Instantiate Blog."""
|
"""
|
||||||
|
Instantiate Blog.
|
||||||
|
"""
|
||||||
|
|
||||||
self.app = app
|
self.app = app
|
||||||
self.config = {}
|
self.config = {}
|
||||||
|
@ -147,75 +152,75 @@ class Blog(Container):
|
||||||
for opt in CONFIG:
|
for opt in CONFIG:
|
||||||
self.config[opt[0]] = getattr(app.config, opt[0])
|
self.config[opt[0]] = getattr(app.config, opt[0])
|
||||||
|
|
||||||
opt = self.config['blog_default_author']
|
opt = self.config["blog_default_author"]
|
||||||
if opt is not None and not isinstance(opt, list):
|
if opt is not None and not isinstance(opt, list):
|
||||||
self.config['blog_default_author'] = [opt]
|
self.config["blog_default_author"] = [opt]
|
||||||
|
|
||||||
opt = self.config['blog_default_location']
|
opt = self.config["blog_default_location"]
|
||||||
if opt is not None and not isinstance(opt, list):
|
if opt is not None and not isinstance(opt, list):
|
||||||
self.config['blog_default_location'] = [opt]
|
self.config["blog_default_location"] = [opt]
|
||||||
|
|
||||||
opt = self.config['blog_default_language']
|
opt = self.config["blog_default_language"]
|
||||||
if opt is not None and not isinstance(opt, list):
|
if opt is not None and not isinstance(opt, list):
|
||||||
self.config['blog_default_language'] = [opt]
|
self.config["blog_default_language"] = [opt]
|
||||||
|
|
||||||
# blog catalog contains all posts
|
# blog catalog contains all posts
|
||||||
self.blog = Catalog(self, 'blog', 'blog', None)
|
self.blog = Catalog(self, "blog", "blog", None)
|
||||||
|
|
||||||
# contains post collections by year
|
# contains post collections by year
|
||||||
self.archive = Catalog(self, 'archive', 'archive', None, reverse=True)
|
self.archive = Catalog(self, "archive", "archive", None, reverse=True)
|
||||||
self.archive.docname += '/archive'
|
self.archive.docname += "/archive"
|
||||||
refs['blog-archives'] = (self.archive.docname, 'Archives')
|
refs["blog-archives"] = (self.archive.docname, "Archives")
|
||||||
|
|
||||||
self.catalogs = cat = {} # catalogs of user set labels
|
self.catalogs = cat = {} # catalogs of user set labels
|
||||||
self.tags = cat['tags'] = Catalog(self, 'tags', 'tag', 'tag')
|
self.tags = cat["tags"] = Catalog(self, "tags", "tag", "tag")
|
||||||
refs['blog-tags'] = (self.tags.docname, 'Tags')
|
refs["blog-tags"] = (self.tags.docname, "Tags")
|
||||||
|
|
||||||
self.author = cat['author'] = Catalog(self, 'author',
|
self.author = cat["author"] = Catalog(self, "author", "author", "author")
|
||||||
'author', 'author')
|
refs["blog-authors"] = (self.author.docname, "Authors")
|
||||||
refs['blog-authors'] = (self.author.docname, 'Authors')
|
|
||||||
|
|
||||||
self.location = cat['location'] = Catalog(self, 'location',
|
self.location = cat["location"] = Catalog(self, "location", "location", "location")
|
||||||
'location', 'location')
|
refs["blog-locations"] = (self.location.docname, "Locations")
|
||||||
refs['blog-locations'] = (self.location.docname, 'Locations')
|
|
||||||
|
|
||||||
self.language = cat['language'] = Catalog(self, 'language',
|
self.language = cat["language"] = Catalog(self, "language", "language", "language")
|
||||||
'language', 'language')
|
refs["blog-languages"] = (self.language.docname, "Languages")
|
||||||
refs['blog-languages'] = (self.language.docname, 'Languages')
|
|
||||||
|
|
||||||
self.category = cat['category'] = Catalog(self, 'category',
|
self.category = cat["category"] = Catalog(self, "category", "category", "category")
|
||||||
'category', 'category')
|
refs["blog-categories"] = (self.category.docname, "Categories")
|
||||||
refs['blog-categories'] = (self.category.docname, 'Categories')
|
|
||||||
|
|
||||||
for catname in ['author', 'location', 'language']:
|
for catname in ["author", "location", "language"]:
|
||||||
catalog = self.catalogs[catname]
|
catalog = self.catalogs[catname]
|
||||||
items = self.config['blog_' + catname + 's'].items()
|
items = self.config["blog_" + catname + "s"].items()
|
||||||
for label, (name, link) in items:
|
for label, (name, link) in items:
|
||||||
catalog[label] = Collection(catalog, label, name, link)
|
catalog[label] = Collection(catalog, label, name, link)
|
||||||
|
|
||||||
self.posts = self.blog['post'] = Collection(self.blog, 'post',
|
self.posts = self.blog["post"] = Collection(self.blog, "post", "Posts", path=self.blog_path)
|
||||||
'Posts', path=self.blog_path)
|
self.drafts = self.blog["draft"] = Collection(
|
||||||
self.drafts = self.blog['draft'] = Collection(self.blog, 'draft',
|
self.blog, "draft", "Drafts", path=os_path_join(self.blog_path, "drafts")
|
||||||
'Drafts', path=os_path_join(self.blog_path, 'drafts'))
|
)
|
||||||
|
|
||||||
# add references to posts and drafts
|
# add references to posts and drafts
|
||||||
# e.g. :ref:`blog-posts`
|
# e.g. :ref:`blog-posts`
|
||||||
refs['blog-posts'] = (os_path_join(self.config['blog_path'], 'index'), 'Posts')
|
refs["blog-posts"] = (os_path_join(self.config["blog_path"], "index"), "Posts")
|
||||||
refs['blog-drafts'] = (os_path_join(self.config['blog_path'], 'drafts', 'index'), 'Drafts')
|
refs["blog-drafts"] = (os_path_join(self.config["blog_path"], "drafts", "index"), "Drafts")
|
||||||
refs['blog-feed'] = (os_path_join(self.config['blog_path'], 'atom.xml'), self.blog_title + ' Feed')
|
refs["blog-feed"] = (
|
||||||
|
os_path_join(self.config["blog_path"], "atom.xml"),
|
||||||
|
self.blog_title + " Feed",
|
||||||
|
)
|
||||||
|
|
||||||
# set some internal configuration options
|
# set some internal configuration options
|
||||||
self.config['fontawesome'] = (self.config['fontawesome_included'] or
|
self.config["fontawesome"] = (
|
||||||
self.config['fontawesome_link_cdn'] or
|
self.config["fontawesome_included"]
|
||||||
self.config['fontawesome_css_file'])
|
or self.config["fontawesome_link_cdn"]
|
||||||
|
or self.config["fontawesome_css_file"]
|
||||||
|
)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
attr = self.config[name]
|
attr = self.config[name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise AttributeError('ABlog has no configuration option {}'
|
raise AttributeError("ABlog has no configuration option {}".format(repr(name)))
|
||||||
.format(repr(name)))
|
|
||||||
return attr
|
return attr
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
|
@ -236,12 +241,16 @@ class Blog(Container):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def feed_path(self):
|
def feed_path(self):
|
||||||
"""RSS feed page name."""
|
"""
|
||||||
|
RSS feed page name.
|
||||||
|
"""
|
||||||
|
|
||||||
return os_path_join(self.blog_path, 'atom.xml')
|
return os_path_join(self.blog_path, "atom.xml")
|
||||||
|
|
||||||
def register(self, docname, info):
|
def register(self, docname, info):
|
||||||
"""Register post *docname*."""
|
"""
|
||||||
|
Register post *docname*.
|
||||||
|
"""
|
||||||
|
|
||||||
post = Post(self, docname, info)
|
post = Post(self, docname, info)
|
||||||
if post.date and post.date < TOMORROW:
|
if post.date and post.date < TOMORROW:
|
||||||
|
@ -252,7 +261,9 @@ class Blog(Container):
|
||||||
catalog.add(post)
|
catalog.add(post)
|
||||||
|
|
||||||
def recent(self, num, docname=None, **labels):
|
def recent(self, num, docname=None, **labels):
|
||||||
"""Yield *num* recent posts, excluding the one with `docname`."""
|
"""
|
||||||
|
Yield *num* recent posts, excluding the one with `docname`.
|
||||||
|
"""
|
||||||
|
|
||||||
if num is None:
|
if num is None:
|
||||||
num = len(self)
|
num = len(self)
|
||||||
|
@ -265,95 +276,107 @@ class Blog(Container):
|
||||||
yield post
|
yield post
|
||||||
|
|
||||||
def page_id(self, pagename):
|
def page_id(self, pagename):
|
||||||
"""Return pagename, trimming :file:`index` from end when found.
|
"""
|
||||||
Return value is used as disqus page identifier."""
|
Return pagename, trimming :file:`index` from end when found.
|
||||||
|
|
||||||
if self.config['blog_baseurl']:
|
Return value is used as disqus page identifier.
|
||||||
if pagename.endswith('index'):
|
"""
|
||||||
|
|
||||||
|
if self.config["blog_baseurl"]:
|
||||||
|
if pagename.endswith("index"):
|
||||||
pagename = pagename[:-5]
|
pagename = pagename[:-5]
|
||||||
pagename = pagename.strip('/')
|
pagename = pagename.strip("/")
|
||||||
return '/' + pagename + ('/' if pagename else '')
|
return "/" + pagename + ("/" if pagename else "")
|
||||||
|
|
||||||
def page_url(self, pagename):
|
def page_url(self, pagename):
|
||||||
"""Return page URL when :confval:`blog_baseurl` is set, otherwise
|
"""
|
||||||
``None``. When found, :file:`index.html` is trimmed from the end
|
Return page URL when :confval:`blog_baseurl` is set, otherwise
|
||||||
of the URL."""
|
``None``.
|
||||||
|
|
||||||
if self.config['blog_baseurl']:
|
When found, :file:`index.html` is trimmed from the end of the
|
||||||
url = urljoin(self.config['blog_baseurl'], pagename)
|
URL.
|
||||||
if url.endswith('index'):
|
"""
|
||||||
|
|
||||||
|
if self.config["blog_baseurl"]:
|
||||||
|
url = urljoin(self.config["blog_baseurl"], pagename)
|
||||||
|
if url.endswith("index"):
|
||||||
url = url[:-5]
|
url = url[:-5]
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
def html_builder_write_doc(self, docname, doctree):
|
def html_builder_write_doc(self, docname, doctree):
|
||||||
"""Part of :meth:`sphinx.builders.html.StandaloneHTMLBuilder.write_doc`
|
"""
|
||||||
method used to convert *doctree* to HTML."""
|
Part of :meth:`sphinx.builders.html.StandaloneHTMLBuilder.write_doc` method
|
||||||
|
used to convert *doctree* to HTML.
|
||||||
|
"""
|
||||||
|
|
||||||
destination = StringOutput(encoding='utf-8')
|
destination = StringOutput(encoding="utf-8")
|
||||||
doctree.settings = self.docsettings
|
doctree.settings = self.docsettings
|
||||||
|
|
||||||
self.secnumbers = {}
|
self.secnumbers = {}
|
||||||
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
|
self.imgpath = relative_uri(self.get_target_uri(docname), "_images")
|
||||||
self.dlpath = relative_uri(self.get_target_uri(docname), '_downloads')
|
self.dlpath = relative_uri(self.get_target_uri(docname), "_downloads")
|
||||||
self.current_docname = docname
|
self.current_docname = docname
|
||||||
self.docwriter.write(doctree, destination)
|
self.docwriter.write(doctree, destination)
|
||||||
self.docwriter.assemble_parts()
|
self.docwriter.assemble_parts()
|
||||||
return self.docwriter.parts['fragment']
|
return self.docwriter.parts["fragment"]
|
||||||
|
|
||||||
|
|
||||||
class BlogPageMixin(object):
|
class BlogPageMixin:
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.title
|
return self.title
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
|
||||||
return str(self) + ' <' + text_type(self.docname) + '>'
|
return str(self) + " <" + text_type(self.docname) + ">"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def blog(self):
|
def blog(self):
|
||||||
"""Reference to :class:`~ablog.blog.Blog` object."""
|
"""
|
||||||
|
Reference to :class:`~ablog.blog.Blog` object.
|
||||||
|
"""
|
||||||
|
|
||||||
return self._blog
|
return self._blog
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def title(self):
|
def title(self):
|
||||||
|
|
||||||
return getattr(self, 'name', getattr(self, '_title'))
|
return getattr(self, "name", getattr(self, "_title"))
|
||||||
|
|
||||||
|
|
||||||
class Post(BlogPageMixin):
|
class Post(BlogPageMixin):
|
||||||
|
|
||||||
"""Handle post metadata."""
|
"""
|
||||||
|
Handle post metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, blog, docname, info):
|
def __init__(self, blog, docname, info):
|
||||||
|
|
||||||
self._blog = blog
|
self._blog = blog
|
||||||
self.docname = docname
|
self.docname = docname
|
||||||
self.section = info['section']
|
self.section = info["section"]
|
||||||
self.order = info['order']
|
self.order = info["order"]
|
||||||
self.date = date = info['date']
|
self.date = date = info["date"]
|
||||||
self.update = info['update']
|
self.update = info["update"]
|
||||||
self.nocomments = info['nocomments']
|
self.nocomments = info["nocomments"]
|
||||||
self.published = date and date < TOMORROW
|
self.published = date and date < TOMORROW
|
||||||
self.draft = not self.published
|
self.draft = not self.published
|
||||||
self._title = info['title']
|
self._title = info["title"]
|
||||||
self.excerpt = info['excerpt']
|
self.excerpt = info["excerpt"]
|
||||||
self.doctree = info['doctree']
|
self.doctree = info["doctree"]
|
||||||
self._next = self._prev = -1
|
self._next = self._prev = -1
|
||||||
self._computed_date = date or FUTURE
|
self._computed_date = date or FUTURE
|
||||||
|
|
||||||
#self.language = info.get('language')
|
# self.language = info.get('language')
|
||||||
|
|
||||||
# archives
|
# archives
|
||||||
# self.blog = []
|
# self.blog = []
|
||||||
if self.published:
|
if self.published:
|
||||||
self.tags = info.get('tags')
|
self.tags = info.get("tags")
|
||||||
self.author = info.get('author')
|
self.author = info.get("author")
|
||||||
self.category = info.get('category')
|
self.category = info.get("category")
|
||||||
self.location = info.get('location')
|
self.location = info.get("location")
|
||||||
self.language = info.get('language')
|
self.language = info.get("language")
|
||||||
|
|
||||||
if not self.author and blog.blog_default_author:
|
if not self.author and blog.blog_default_author:
|
||||||
self.author = blog.blog_default_author
|
self.author = blog.blog_default_author
|
||||||
|
@ -366,14 +389,14 @@ class Post(BlogPageMixin):
|
||||||
self.archive.add(self)
|
self.archive.add(self)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.tags = info.get('tags')
|
self.tags = info.get("tags")
|
||||||
self.author = info.get('author')
|
self.author = info.get("author")
|
||||||
self.category = info.get('category')
|
self.category = info.get("category")
|
||||||
self.location = info.get('location')
|
self.location = info.get("location")
|
||||||
self.language = info.get('language')
|
self.language = info.get("language")
|
||||||
self.archive = []
|
self.archive = []
|
||||||
|
|
||||||
self.redirect = info.get('redirect')
|
self.redirect = info.get("redirect")
|
||||||
|
|
||||||
self.options = info
|
self.options = info
|
||||||
|
|
||||||
|
@ -381,12 +404,16 @@ class Post(BlogPageMixin):
|
||||||
return (self._computed_date, self.title) < (other._computed_date, other.title)
|
return (self._computed_date, self.title) < (other._computed_date, other.title)
|
||||||
|
|
||||||
def to_html(self, pagename, fulltext=False, drop_h1=True):
|
def to_html(self, pagename, fulltext=False, drop_h1=True):
|
||||||
"""Return excerpt or *fulltext* as HTML after resolving references
|
"""
|
||||||
with respect to *pagename*. By default, first `<h1>` tag is dropped
|
Return excerpt or *fulltext* as HTML after resolving references with
|
||||||
from the output. More than one can be dropped by setting *drop_h1*
|
respect to *pagename*.
|
||||||
to the desired number of tags to be dropped."""
|
|
||||||
|
|
||||||
doctree = new_document('')
|
By default, first `<h1>` tag is dropped from the output. More
|
||||||
|
than one can be dropped by setting *drop_h1* to the desired
|
||||||
|
number of tags to be dropped.
|
||||||
|
"""
|
||||||
|
|
||||||
|
doctree = new_document("")
|
||||||
if fulltext:
|
if fulltext:
|
||||||
deepcopy = self.doctree.deepcopy()
|
deepcopy = self.doctree.deepcopy()
|
||||||
if isinstance(deepcopy, nodes.document):
|
if isinstance(deepcopy, nodes.document):
|
||||||
|
@ -401,20 +428,21 @@ class Post(BlogPageMixin):
|
||||||
revise_pending_xrefs(doctree, pagename)
|
revise_pending_xrefs(doctree, pagename)
|
||||||
app.env.resolve_references(doctree, pagename, app.builder)
|
app.env.resolve_references(doctree, pagename, app.builder)
|
||||||
|
|
||||||
add_permalinks, app.builder.add_permalinks = (
|
add_permalinks, app.builder.add_permalinks = (app.builder.add_permalinks, False)
|
||||||
app.builder.add_permalinks, False)
|
|
||||||
|
|
||||||
html = html_builder_write_doc(app.builder, pagename, doctree)
|
html = html_builder_write_doc(app.builder, pagename, doctree)
|
||||||
|
|
||||||
app.builder.add_permalinks = add_permalinks
|
app.builder.add_permalinks = add_permalinks
|
||||||
|
|
||||||
if drop_h1:
|
if drop_h1:
|
||||||
html = re.sub('<h1>(.*?)</h1>', '', html, count=abs(int(drop_h1)))
|
html = re.sub("<h1>(.*?)</h1>", "", html, count=abs(int(drop_h1)))
|
||||||
return html
|
return html
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def next(self):
|
def next(self):
|
||||||
"""Next published post in chronological order."""
|
"""
|
||||||
|
Next published post in chronological order.
|
||||||
|
"""
|
||||||
|
|
||||||
if self._next == -1:
|
if self._next == -1:
|
||||||
link_posts(self._blog.posts)
|
link_posts(self._blog.posts)
|
||||||
|
@ -422,13 +450,17 @@ class Post(BlogPageMixin):
|
||||||
|
|
||||||
@next.setter
|
@next.setter
|
||||||
def next(self, post):
|
def next(self, post):
|
||||||
"""Set next published post in chronological order."""
|
"""
|
||||||
|
Set next published post in chronological order.
|
||||||
|
"""
|
||||||
|
|
||||||
self._next = post
|
self._next = post
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def prev(self):
|
def prev(self):
|
||||||
"""Previous published post in chronological order."""
|
"""
|
||||||
|
Previous published post in chronological order.
|
||||||
|
"""
|
||||||
|
|
||||||
if self._prev == -1:
|
if self._prev == -1:
|
||||||
link_posts(self._blog.posts)
|
link_posts(self._blog.posts)
|
||||||
|
@ -436,14 +468,18 @@ class Post(BlogPageMixin):
|
||||||
|
|
||||||
@prev.setter
|
@prev.setter
|
||||||
def prev(self, post):
|
def prev(self, post):
|
||||||
"""Set previous published post in chronological order."""
|
"""
|
||||||
|
Set previous published post in chronological order.
|
||||||
|
"""
|
||||||
|
|
||||||
self._prev = post
|
self._prev = post
|
||||||
|
|
||||||
|
|
||||||
class Catalog(BlogPageMixin):
|
class Catalog(BlogPageMixin):
|
||||||
|
|
||||||
"""Handles collections of posts."""
|
"""
|
||||||
|
Handles collections of posts.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, blog, name, xref, path, reverse=False):
|
def __init__(self, blog, name, xref, path, reverse=False):
|
||||||
|
|
||||||
|
@ -492,8 +528,10 @@ class Catalog(BlogPageMixin):
|
||||||
yield self.collections[key]
|
yield self.collections[key]
|
||||||
|
|
||||||
def add(self, post):
|
def add(self, post):
|
||||||
"""Add post to appropriate collection(s) and replace collections
|
"""
|
||||||
labels with collection objects."""
|
Add post to appropriate collection(s) and replace collections labels
|
||||||
|
with collection objects.
|
||||||
|
"""
|
||||||
|
|
||||||
colls = []
|
colls = []
|
||||||
for label in getattr(post, self.name, []):
|
for label in getattr(post, self.name, []):
|
||||||
|
@ -504,19 +542,21 @@ class Catalog(BlogPageMixin):
|
||||||
setattr(post, self.name, colls)
|
setattr(post, self.name, colls)
|
||||||
|
|
||||||
def _minmax(self):
|
def _minmax(self):
|
||||||
"""Return minimum and maximum sizes of collections."""
|
"""
|
||||||
|
Return minimum and maximum sizes of collections.
|
||||||
|
"""
|
||||||
|
|
||||||
if (self._coll_lens is None or
|
if self._coll_lens is None or len(self._coll_lens) != len(self.collections):
|
||||||
len(self._coll_lens) != len(self.collections)):
|
self._coll_lens = [len(coll) for coll in self.collections.values() if len(coll)]
|
||||||
self._coll_lens = [len(coll) for coll in self.collections.values()
|
|
||||||
if len(coll)]
|
|
||||||
self._min_max = min(self._coll_lens), max(self._coll_lens)
|
self._min_max = min(self._coll_lens), max(self._coll_lens)
|
||||||
return self._min_max
|
return self._min_max
|
||||||
|
|
||||||
|
|
||||||
class Collection(BlogPageMixin):
|
class Collection(BlogPageMixin):
|
||||||
|
|
||||||
"""Posts sharing a label, i.e. tag, category, author, or location."""
|
"""
|
||||||
|
Posts sharing a label, i.e. tag, category, author, or location.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, catalog, label, name=None, href=None, path=None, page=0):
|
def __init__(self, catalog, label, name=None, href=None, path=None, page=0):
|
||||||
|
|
||||||
|
@ -529,7 +569,7 @@ class Collection(BlogPageMixin):
|
||||||
self._posts = {}
|
self._posts = {}
|
||||||
self._posts_iter = None
|
self._posts_iter = None
|
||||||
self._path = path
|
self._path = path
|
||||||
self.xref = self.catalog.xref + '-' + slugify(label)
|
self.xref = self.catalog.xref + "-" + slugify(label)
|
||||||
self._slug = None
|
self._slug = None
|
||||||
self._html = None
|
self._html = None
|
||||||
|
|
||||||
|
@ -558,8 +598,7 @@ class Collection(BlogPageMixin):
|
||||||
posts.sort(reverse=True)
|
posts.sort(reverse=True)
|
||||||
self._posts_iter = posts
|
self._posts_iter = posts
|
||||||
|
|
||||||
for post in self._posts_iter:
|
yield from self._posts_iter
|
||||||
yield post
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
|
|
||||||
|
@ -576,28 +615,34 @@ class Collection(BlogPageMixin):
|
||||||
return self._catalog
|
return self._catalog
|
||||||
|
|
||||||
def add(self, post):
|
def add(self, post):
|
||||||
"""Add post to the collection."""
|
"""
|
||||||
|
Add post to the collection.
|
||||||
|
"""
|
||||||
|
|
||||||
post_name = post.docname
|
post_name = post.docname
|
||||||
if post.section:
|
if post.section:
|
||||||
post_name += '#' + post.section
|
post_name += "#" + post.section
|
||||||
self._posts[post_name] = post
|
self._posts[post_name] = post
|
||||||
|
|
||||||
def relsize(self, maxsize=5, minsize=1):
|
def relsize(self, maxsize=5, minsize=1):
|
||||||
"""Relative size used in tag clouds."""
|
"""
|
||||||
|
Relative size used in tag clouds.
|
||||||
|
"""
|
||||||
|
|
||||||
min_, max_ = self.catalog._minmax()
|
min_, max_ = self.catalog._minmax()
|
||||||
|
|
||||||
diff = maxsize - minsize
|
diff = maxsize - minsize
|
||||||
if len(self.catalog) == 1 or min_ == max_:
|
if len(self.catalog) == 1 or min_ == max_:
|
||||||
return int(round(diff / 2. + minsize))
|
return int(round(diff / 2.0 + minsize))
|
||||||
|
|
||||||
size = int(1. * (len(self) - min_) / (max_ - min_) * diff + minsize)
|
size = int(1.0 * (len(self) - min_) / (max_ - min_) * diff + minsize)
|
||||||
return size
|
return size
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def docname(self):
|
def docname(self):
|
||||||
"""Collection page document name."""
|
"""
|
||||||
|
Collection page document name.
|
||||||
|
"""
|
||||||
|
|
||||||
if self._path is None:
|
if self._path is None:
|
||||||
self._path = os_path_join(self.catalog.path, slugify(self.name))
|
self._path = os_path_join(self.catalog.path, slugify(self.name))
|
||||||
|
|
|
@ -1,70 +1,79 @@
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
import os
|
import os
|
||||||
import io
|
|
||||||
import sys
|
import sys
|
||||||
import glob
|
import glob
|
||||||
import ablog
|
|
||||||
import shutil
|
import shutil
|
||||||
import argparse
|
import argparse
|
||||||
from distutils.version import LooseVersion
|
|
||||||
|
|
||||||
from sphinx import __version__
|
import ablog
|
||||||
|
|
||||||
BUILDDIR = '_website'
|
from .start import ablog_start
|
||||||
DOCTREES = '.doctrees'
|
|
||||||
SPHINX_LT_17 = LooseVersion(__version__) < LooseVersion('1.7')
|
|
||||||
|
|
||||||
__all__ = ['ablog_build', 'ablog_clean',
|
BUILDDIR = "_website"
|
||||||
'ablog_serve', 'ablog_deploy', 'ablog_main']
|
DOCTREES = ".doctrees"
|
||||||
|
|
||||||
|
__all__ = ["ablog_build", "ablog_clean", "ablog_serve", "ablog_deploy", "ablog_main"]
|
||||||
|
|
||||||
|
|
||||||
def find_confdir(sourcedir=None):
|
def find_confdir(sourcedir=None):
|
||||||
"""Return path to current directory or its parent that contains conf.py"""
|
"""
|
||||||
|
Return path to current directory or its parent that contains conf.py.
|
||||||
|
"""
|
||||||
|
|
||||||
from os.path import isfile, join, abspath
|
from os.path import isfile, join, abspath
|
||||||
confdir = (sourcedir or os.getcwd())
|
|
||||||
|
|
||||||
def parent(d): return abspath(join(d, '..'))
|
confdir = sourcedir or os.getcwd()
|
||||||
|
|
||||||
while not isfile(join(confdir, 'conf.py')) and confdir != parent(confdir):
|
def parent(d):
|
||||||
|
return abspath(join(d, ".."))
|
||||||
|
|
||||||
|
while not isfile(join(confdir, "conf.py")) and confdir != parent(confdir):
|
||||||
confdir = parent(confdir)
|
confdir = parent(confdir)
|
||||||
|
|
||||||
conf = join(confdir, 'conf.py')
|
conf = join(confdir, "conf.py")
|
||||||
|
|
||||||
if isfile(conf) and 'ablog' in open(conf).read():
|
if isfile(conf) and "ablog" in open(conf).read():
|
||||||
return confdir
|
return confdir
|
||||||
else:
|
else:
|
||||||
sys.exit("Current directory and its parents doesn't "
|
sys.exit(
|
||||||
"contain configuration file (conf.py).")
|
"Current directory and its parents doesn't " "contain configuration file (conf.py)."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def read_conf(confdir):
|
def read_conf(confdir):
|
||||||
"""Return conf.py file as a module."""
|
"""
|
||||||
|
Return conf.py file as a module.
|
||||||
|
"""
|
||||||
|
|
||||||
sys.path.insert(0, confdir)
|
sys.path.insert(0, confdir)
|
||||||
conf = __import__('conf')
|
conf = __import__("conf")
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
return conf
|
return conf
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="ABlog for blogging with Sphinx",
|
description="ABlog for blogging with Sphinx",
|
||||||
epilog="See 'ablog <command> -h' for more information on a specific "
|
epilog="See 'ablog <command> -h' for more information on a specific " "command.",
|
||||||
"command.")
|
)
|
||||||
|
|
||||||
parser.add_argument('-v', '--version',
|
parser.add_argument(
|
||||||
help="print ABlog version and exit",
|
"-v",
|
||||||
action='version', version=ablog.__version__)
|
"--version",
|
||||||
|
help="print ABlog version and exit",
|
||||||
|
action="version",
|
||||||
|
version=ablog.__version__,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
commands = ablog_commands = parser.add_subparsers(title='commands')
|
commands = ablog_commands = parser.add_subparsers(title="commands")
|
||||||
|
|
||||||
|
|
||||||
def cmd(func=None, **kwargs):
|
def cmd(func=None, **kwargs):
|
||||||
|
|
||||||
if func is None:
|
if func is None:
|
||||||
|
|
||||||
def cmd_inner(func):
|
def cmd_inner(func):
|
||||||
return cmd(func, **kwargs)
|
return cmd(func, **kwargs)
|
||||||
|
|
||||||
return cmd_inner
|
return cmd_inner
|
||||||
else:
|
else:
|
||||||
command = commands.add_parser(**kwargs)
|
command = commands.add_parser(**kwargs)
|
||||||
|
@ -81,8 +90,10 @@ def arg(*args, **kwargs):
|
||||||
else:
|
else:
|
||||||
func = None
|
func = None
|
||||||
if func is None:
|
if func is None:
|
||||||
|
|
||||||
def arg_inner(func):
|
def arg_inner(func):
|
||||||
return arg(func, *args, **kwargs)
|
return arg(func, *args, **kwargs)
|
||||||
|
|
||||||
return arg_inner
|
return arg_inner
|
||||||
else:
|
else:
|
||||||
func.command.add_argument(*args, **kwargs)
|
func.command.add_argument(*args, **kwargs)
|
||||||
|
@ -91,146 +102,197 @@ def arg(*args, **kwargs):
|
||||||
|
|
||||||
def arg_website(func):
|
def arg_website(func):
|
||||||
|
|
||||||
arg(func, '-w', dest='website', type=str,
|
arg(
|
||||||
|
func,
|
||||||
|
"-w",
|
||||||
|
dest="website",
|
||||||
|
type=str,
|
||||||
help="path for website, default is %s when `ablog_website` "
|
help="path for website, default is %s when `ablog_website` "
|
||||||
"is not set in conf.py" % BUILDDIR)
|
"is not set in conf.py" % BUILDDIR,
|
||||||
|
)
|
||||||
return func
|
return func
|
||||||
|
|
||||||
|
|
||||||
def arg_doctrees(func):
|
def arg_doctrees(func):
|
||||||
|
|
||||||
arg(func, '-d', dest='doctrees', type=str,
|
arg(
|
||||||
|
func,
|
||||||
|
"-d",
|
||||||
|
dest="doctrees",
|
||||||
|
type=str,
|
||||||
help="path for the cached environment and doctree files, "
|
help="path for the cached environment and doctree files, "
|
||||||
"default %s when `ablog_doctrees` is not set in conf.py" %
|
"default %s when `ablog_doctrees` is not set in conf.py" % DOCTREES,
|
||||||
DOCTREES)
|
)
|
||||||
return func
|
return func
|
||||||
|
|
||||||
|
|
||||||
from .start import ablog_start
|
cmd(
|
||||||
cmd(ablog_start, name='start', help='start a new blog project',
|
ablog_start,
|
||||||
|
name="start",
|
||||||
|
help="start a new blog project",
|
||||||
description="Start a new blog project by answering a few questions. "
|
description="Start a new blog project by answering a few questions. "
|
||||||
"You will end up with a configuration file and sample pages.")
|
"You will end up with a configuration file and sample pages.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@arg('-P', dest='runpdb',
|
@arg("-P", dest="runpdb", action="store_true", default=False, help="run pdb on exception")
|
||||||
action='store_true', default=False,
|
@arg(
|
||||||
help="run pdb on exception")
|
"-T",
|
||||||
@arg('-T', dest='traceback',
|
dest="traceback",
|
||||||
action='store_true', default=False,
|
action="store_true",
|
||||||
help="show full traceback on exception")
|
default=False,
|
||||||
@arg('-W', dest='werror',
|
help="show full traceback on exception",
|
||||||
action='store_true', default=False,
|
)
|
||||||
help='turn warnings into errors')
|
@arg("-W", dest="werror", action="store_true", default=False, help="turn warnings into errors")
|
||||||
@arg('-N', dest='no_colors',
|
@arg("-N", dest="no_colors", action="store_true", default=False, help="do not emit colored output")
|
||||||
action='store_true', default=False,
|
@arg(
|
||||||
help='do not emit colored output')
|
"-Q",
|
||||||
@arg('-Q', dest='extra_quiet',
|
dest="extra_quiet",
|
||||||
action='store_true', default=False,
|
action="store_true",
|
||||||
help='no output at all, not even warnings')
|
default=False,
|
||||||
@arg('-q', dest='quiet',
|
help="no output at all, not even warnings",
|
||||||
action='store_true', default=False,
|
)
|
||||||
help='no output on stdout, just warnings on stderr')
|
@arg(
|
||||||
@arg('-v', dest='verbosity',
|
"-q",
|
||||||
action='count', default=0,
|
dest="quiet",
|
||||||
help='increase verbosity (can be repeated)')
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="no output on stdout, just warnings on stderr",
|
||||||
|
)
|
||||||
|
@arg("-v", dest="verbosity", action="count", default=0, help="increase verbosity (can be repeated)")
|
||||||
@arg_doctrees
|
@arg_doctrees
|
||||||
@arg_website
|
@arg_website
|
||||||
@arg('-s', dest='sourcedir', type=str,
|
@arg(
|
||||||
help="root path for source files, "
|
"-s",
|
||||||
"default is path to the folder that contains conf.py")
|
dest="sourcedir",
|
||||||
@arg('-b', dest='builder', type=str,
|
type=str,
|
||||||
help="builder to use, default `ablog_builder` or dirhtml")
|
help="root path for source files, " "default is path to the folder that contains conf.py",
|
||||||
@arg('-a', dest='allfiles', action='store_true', default=False,
|
)
|
||||||
help="write all files; default is to only write new and changed files")
|
@arg("-b", dest="builder", type=str, help="builder to use, default `ablog_builder` or dirhtml")
|
||||||
@cmd(name='build', help='build your blog project',
|
@arg(
|
||||||
description="Path options can be set in conf.py. "
|
"-a",
|
||||||
"Default values of paths are relative to conf.py.")
|
dest="allfiles",
|
||||||
def ablog_build(builder=None, sourcedir=None, website=None, doctrees=None,
|
action="store_true",
|
||||||
traceback=False, runpdb=False, allfiles=False, werror=False, verbosity=0,
|
default=False,
|
||||||
quiet=False, extra_quiet=False, no_colors=False, **kwargs):
|
help="write all files; default is to only write new and changed files",
|
||||||
|
)
|
||||||
|
@cmd(
|
||||||
|
name="build",
|
||||||
|
help="build your blog project",
|
||||||
|
description="Path options can be set in conf.py. "
|
||||||
|
"Default values of paths are relative to conf.py.",
|
||||||
|
)
|
||||||
|
def ablog_build(
|
||||||
|
builder=None,
|
||||||
|
sourcedir=None,
|
||||||
|
website=None,
|
||||||
|
doctrees=None,
|
||||||
|
traceback=False,
|
||||||
|
runpdb=False,
|
||||||
|
allfiles=False,
|
||||||
|
werror=False,
|
||||||
|
verbosity=0,
|
||||||
|
quiet=False,
|
||||||
|
extra_quiet=False,
|
||||||
|
no_colors=False,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
confdir = find_confdir(sourcedir)
|
confdir = find_confdir(sourcedir)
|
||||||
conf = read_conf(confdir)
|
conf = read_conf(confdir)
|
||||||
website = (website or
|
website = website or os.path.join(confdir, getattr(conf, "ablog_website", BUILDDIR))
|
||||||
os.path.join(confdir, getattr(conf, 'ablog_website', BUILDDIR)))
|
doctrees = doctrees or os.path.join(confdir, getattr(conf, "ablog_doctrees", DOCTREES))
|
||||||
doctrees = (doctrees or
|
sourcedir = sourcedir or confdir
|
||||||
os.path.join(confdir, getattr(conf, 'ablog_doctrees', DOCTREES)))
|
|
||||||
sourcedir = (sourcedir or confdir)
|
|
||||||
argv = sys.argv[:1]
|
argv = sys.argv[:1]
|
||||||
argv.extend(['-b', builder or getattr(conf, 'ablog_builder', 'dirhtml')])
|
argv.extend(["-b", builder or getattr(conf, "ablog_builder", "dirhtml")])
|
||||||
argv.extend(['-d', doctrees])
|
argv.extend(["-d", doctrees])
|
||||||
if traceback:
|
if traceback:
|
||||||
argv.extend(['-T'])
|
argv.extend(["-T"])
|
||||||
if runpdb:
|
if runpdb:
|
||||||
argv.extend(['-P'])
|
argv.extend(["-P"])
|
||||||
if allfiles:
|
if allfiles:
|
||||||
argv.extend(['-a'])
|
argv.extend(["-a"])
|
||||||
if werror:
|
if werror:
|
||||||
argv.extend(['-W'])
|
argv.extend(["-W"])
|
||||||
if verbosity > 0:
|
if verbosity > 0:
|
||||||
argv.extend(['-v'] * verbosity)
|
argv.extend(["-v"] * verbosity)
|
||||||
if quiet:
|
if quiet:
|
||||||
argv.extend(['-q'])
|
argv.extend(["-q"])
|
||||||
if extra_quiet:
|
if extra_quiet:
|
||||||
argv.extend(['-Q'])
|
argv.extend(["-Q"])
|
||||||
if no_colors:
|
if no_colors:
|
||||||
argv.extend(['-N'])
|
argv.extend(["-N"])
|
||||||
argv.extend([sourcedir, website])
|
argv.extend([sourcedir, website])
|
||||||
if SPHINX_LT_17:
|
|
||||||
from sphinx import main
|
from sphinx.cmd.build import main
|
||||||
sys.exit(main(argv))
|
|
||||||
else:
|
sys.exit(main(argv[1:]))
|
||||||
from sphinx.cmd.build import main
|
|
||||||
# As of Sphinx 1.7, the first argument is now no longer ignored
|
|
||||||
sys.exit(main(argv[1:]))
|
|
||||||
|
|
||||||
|
|
||||||
@arg('-D', dest='deep', action='store_true', default=False,
|
@arg(
|
||||||
help="deep clean, remove cached environment and doctree files")
|
"-D",
|
||||||
|
dest="deep",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="deep clean, remove cached environment and doctree files",
|
||||||
|
)
|
||||||
@arg_doctrees
|
@arg_doctrees
|
||||||
@arg_website
|
@arg_website
|
||||||
@cmd(name='clean', help='clean your blog build files',
|
@cmd(
|
||||||
description="Path options can be set in conf.py. "
|
name="clean",
|
||||||
"Default values of paths are relative to conf.py.")
|
help="clean your blog build files",
|
||||||
|
description="Path options can be set in conf.py. "
|
||||||
|
"Default values of paths are relative to conf.py.",
|
||||||
|
)
|
||||||
def ablog_clean(website=None, doctrees=None, deep=False, **kwargs):
|
def ablog_clean(website=None, doctrees=None, deep=False, **kwargs):
|
||||||
|
|
||||||
confdir = find_confdir()
|
confdir = find_confdir()
|
||||||
conf = read_conf(confdir)
|
conf = read_conf(confdir)
|
||||||
|
|
||||||
website = (website or
|
website = website or os.path.join(confdir, getattr(conf, "ablog_website", BUILDDIR))
|
||||||
os.path.join(confdir, getattr(conf, 'ablog_website', BUILDDIR)))
|
|
||||||
|
|
||||||
doctrees = (doctrees or
|
doctrees = doctrees or os.path.join(confdir, getattr(conf, "ablog_doctrees", DOCTREES))
|
||||||
os.path.join(confdir, getattr(conf, 'ablog_doctrees', DOCTREES)))
|
|
||||||
|
|
||||||
nothing = True
|
nothing = True
|
||||||
if glob.glob(os.path.join(website, '*')):
|
if glob.glob(os.path.join(website, "*")):
|
||||||
shutil.rmtree(website)
|
shutil.rmtree(website)
|
||||||
print('Removed {}.'.format(os.path.relpath(website)))
|
print("Removed {}.".format(os.path.relpath(website)))
|
||||||
nothing = False
|
nothing = False
|
||||||
|
|
||||||
if deep and glob.glob(os.path.join(doctrees, '*')):
|
if deep and glob.glob(os.path.join(doctrees, "*")):
|
||||||
shutil.rmtree(doctrees)
|
shutil.rmtree(doctrees)
|
||||||
print('Removed {}.'.format(os.path.relpath(doctrees)))
|
print("Removed {}.".format(os.path.relpath(doctrees)))
|
||||||
nothing = False
|
nothing = False
|
||||||
|
|
||||||
if nothing:
|
if nothing:
|
||||||
print('Nothing to clean.')
|
print("Nothing to clean.")
|
||||||
|
|
||||||
|
|
||||||
@arg('--patterns', dest='patterns', default='*.rst;*.txt',
|
@arg("--patterns", dest="patterns", default="*.rst;*.txt", help="patterns for triggering rebuilds")
|
||||||
help="patterns for triggering rebuilds")
|
@arg(
|
||||||
@arg('-r', dest='rebuild', action='store_true', default=False,
|
"-r",
|
||||||
help="rebuild when a file matching patterns change or get added")
|
dest="rebuild",
|
||||||
@arg('-n', dest='view', action='store_false', default=True,
|
action="store_true",
|
||||||
help="do not open website in a new browser tab")
|
default=False,
|
||||||
@arg('-p', dest='port', type=int, default=8000,
|
help="rebuild when a file matching patterns change or get added",
|
||||||
help='port number for HTTP server; default is 8000')
|
)
|
||||||
|
@arg(
|
||||||
|
"-n",
|
||||||
|
dest="view",
|
||||||
|
action="store_false",
|
||||||
|
default=True,
|
||||||
|
help="do not open website in a new browser tab",
|
||||||
|
)
|
||||||
|
@arg("-p", dest="port", type=int, default=8000, help="port number for HTTP server; default is 8000")
|
||||||
@arg_website
|
@arg_website
|
||||||
@cmd(name='serve', help='serve and view your project',
|
@cmd(
|
||||||
description="Serve options can be set in conf.py. "
|
name="serve",
|
||||||
"Default values of paths are relative to conf.py.")
|
help="serve and view your project",
|
||||||
def ablog_serve(website=None, port=8000, view=True, rebuild=False,
|
description="Serve options can be set in conf.py. "
|
||||||
patterns='*.rst;*.txt', **kwargs):
|
"Default values of paths are relative to conf.py.",
|
||||||
|
)
|
||||||
|
def ablog_serve(
|
||||||
|
website=None, port=8000, view=True, rebuild=False, patterns="*.rst;*.txt", **kwargs
|
||||||
|
):
|
||||||
|
|
||||||
confdir = find_confdir()
|
confdir = find_confdir()
|
||||||
conf = read_conf(confdir)
|
conf = read_conf(confdir)
|
||||||
|
@ -252,35 +314,36 @@ def ablog_serve(website=None, port=8000, view=True, rebuild=False,
|
||||||
httpd = socketserver.TCPServer(("", port), Handler)
|
httpd = socketserver.TCPServer(("", port), Handler)
|
||||||
|
|
||||||
ip, port = httpd.socket.getsockname()
|
ip, port = httpd.socket.getsockname()
|
||||||
print("Serving HTTP on {}:{}.".format(ip, port))
|
print(f"Serving HTTP on {ip}:{port}.")
|
||||||
print("Quit the server with Control-C.")
|
print("Quit the server with Control-C.")
|
||||||
|
|
||||||
website = (website or
|
website = website or os.path.join(confdir, getattr(conf, "ablog_website", "_website"))
|
||||||
os.path.join(confdir, getattr(conf, 'ablog_website', '_website')))
|
|
||||||
|
|
||||||
os.chdir(website)
|
os.chdir(website)
|
||||||
|
|
||||||
if rebuild:
|
if rebuild:
|
||||||
|
|
||||||
#from watchdog.watchmedo import observe_with
|
# from watchdog.watchmedo import observe_with
|
||||||
from watchdog.observers import Observer
|
from watchdog.observers import Observer
|
||||||
from watchdog.tricks import ShellCommandTrick
|
from watchdog.tricks import ShellCommandTrick
|
||||||
patterns = patterns.split(';')
|
|
||||||
ignore_patterns = [os.path.join(website, '*')]
|
patterns = patterns.split(";")
|
||||||
handler = ShellCommandTrick(shell_command='ablog build -s ' + confdir,
|
ignore_patterns = [os.path.join(website, "*")]
|
||||||
patterns=patterns,
|
handler = ShellCommandTrick(
|
||||||
ignore_patterns=ignore_patterns,
|
shell_command="ablog build -s " + confdir,
|
||||||
ignore_directories=False,
|
patterns=patterns,
|
||||||
wait_for_process=True,
|
ignore_patterns=ignore_patterns,
|
||||||
drop_during_process=False)
|
ignore_directories=False,
|
||||||
|
wait_for_process=True,
|
||||||
|
drop_during_process=False,
|
||||||
|
)
|
||||||
|
|
||||||
observer = Observer(timeout=1)
|
observer = Observer(timeout=1)
|
||||||
observer.schedule(handler, confdir, recursive=True)
|
observer.schedule(handler, confdir, recursive=True)
|
||||||
observer.start()
|
observer.start()
|
||||||
try:
|
try:
|
||||||
if view:
|
if view:
|
||||||
(webbrowser.open_new_tab('http://127.0.0.1:{}'.format(port)) and
|
(webbrowser.open_new_tab(f"http://127.0.0.1:{port}") and httpd.serve_forever())
|
||||||
httpd.serve_forever())
|
|
||||||
else:
|
else:
|
||||||
httpd.serve_forever()
|
httpd.serve_forever()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
@ -289,20 +352,17 @@ def ablog_serve(website=None, port=8000, view=True, rebuild=False,
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if view:
|
if view:
|
||||||
(webbrowser.open_new_tab('http://127.0.0.1:{}'.format(port)) and
|
(webbrowser.open_new_tab(f"http://127.0.0.1:{port}") and httpd.serve_forever())
|
||||||
httpd.serve_forever())
|
|
||||||
else:
|
else:
|
||||||
httpd.serve_forever()
|
httpd.serve_forever()
|
||||||
|
|
||||||
|
|
||||||
@arg('-t', dest='title', type=str,
|
@arg("-t", dest="title", type=str, help="post title; default is formed from filename")
|
||||||
help='post title; default is formed from filename')
|
@arg(dest="filename", type=str, help="filename, e.g. my-nth-post (.rst appended)")
|
||||||
@arg(dest='filename', type=str,
|
@cmd(name="post", help="create a blank post")
|
||||||
help='filename, e.g. my-nth-post (.rst appended)')
|
|
||||||
@cmd(name='post', help='create a blank post',)
|
|
||||||
def ablog_post(filename, title=None, **kwargs):
|
def ablog_post(filename, title=None, **kwargs):
|
||||||
|
|
||||||
POST_TEMPLATE = u'''
|
POST_TEMPLATE = """
|
||||||
%(title)s
|
%(title)s
|
||||||
%(equal)s
|
%(equal)s
|
||||||
|
|
||||||
|
@ -310,85 +370,111 @@ def ablog_post(filename, title=None, **kwargs):
|
||||||
:tags:
|
:tags:
|
||||||
:category:
|
:category:
|
||||||
|
|
||||||
'''
|
"""
|
||||||
from datetime import date
|
from datetime import date
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
# Generate basic post params.
|
# Generate basic post params.
|
||||||
today = date.today()
|
today = date.today()
|
||||||
if not filename.lower().endswith('.rst'):
|
if not filename.lower().endswith(".rst"):
|
||||||
filename += '.rst'
|
filename += ".rst"
|
||||||
|
|
||||||
today = today.strftime("%b %d, %Y")
|
today = today.strftime("%b %d, %Y")
|
||||||
if not title:
|
if not title:
|
||||||
title = filename[:-4].replace('-', ' ').title()
|
title = filename[:-4].replace("-", " ").title()
|
||||||
|
|
||||||
pars = {'date': today,
|
pars = {"date": today, "title": title, "equal": "=" * len(title)}
|
||||||
'title': title,
|
|
||||||
'equal': '=' * len(title)
|
|
||||||
}
|
|
||||||
|
|
||||||
if path.isfile(filename):
|
if path.isfile(filename):
|
||||||
pass
|
pass
|
||||||
# read the file, and add post directive
|
# read the file, and add post directive
|
||||||
# and save it
|
# and save it
|
||||||
else:
|
else:
|
||||||
with io.open(filename, 'w', encoding='utf-8') as out:
|
with open(filename, "w", encoding="utf-8") as out:
|
||||||
post_text = POST_TEMPLATE % pars
|
post_text = POST_TEMPLATE % pars
|
||||||
out.write(post_text)
|
out.write(post_text)
|
||||||
|
|
||||||
print('Blog post created: %s' % filename)
|
print("Blog post created: %s" % filename)
|
||||||
|
|
||||||
|
|
||||||
@arg('--github-token', dest='github_token', type=str,
|
@arg(
|
||||||
help="environment variable name storing GitHub access token")
|
"--github-token",
|
||||||
@arg('--push-quietly', dest='push_quietly', action='store_true', default=False,
|
dest="github_token",
|
||||||
help="be more quiet when pushing changes")
|
type=str,
|
||||||
@arg('-f', dest='push_force', action='store_true', default=False,
|
help="environment variable name storing GitHub access token",
|
||||||
help="owerwrite last commit, i.e. `commit --amend; push -f`")
|
)
|
||||||
@arg('-m', dest='message', type=str, help="commit message")
|
@arg(
|
||||||
@arg('-g', dest='github_pages', type=str,
|
"--push-quietly",
|
||||||
help="GitHub username for deploying to GitHub pages")
|
dest="push_quietly",
|
||||||
@arg('-p', dest='repodir', type=str,
|
action="store_true",
|
||||||
help="path to the location of repository to be deployed, e.g. "
|
default=False,
|
||||||
"`../username.github.io`, default is folder containing `conf.py`")
|
help="be more quiet when pushing changes",
|
||||||
|
)
|
||||||
|
@arg(
|
||||||
|
"-f",
|
||||||
|
dest="push_force",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="owerwrite last commit, i.e. `commit --amend; push -f`",
|
||||||
|
)
|
||||||
|
@arg("-m", dest="message", type=str, help="commit message")
|
||||||
|
@arg("-g", dest="github_pages", type=str, help="GitHub username for deploying to GitHub pages")
|
||||||
|
@arg(
|
||||||
|
"-p",
|
||||||
|
dest="repodir",
|
||||||
|
type=str,
|
||||||
|
help="path to the location of repository to be deployed, e.g. "
|
||||||
|
"`../username.github.io`, default is folder containing `conf.py`",
|
||||||
|
)
|
||||||
@arg_website
|
@arg_website
|
||||||
@cmd(name='deploy', help='deploy your website build files',
|
@cmd(
|
||||||
description="Path options can be set in conf.py. "
|
name="deploy",
|
||||||
"Default values of paths are relative to conf.py.")
|
help="deploy your website build files",
|
||||||
def ablog_deploy(website, message=None, github_pages=None,
|
description="Path options can be set in conf.py. "
|
||||||
push_quietly=False, push_force=False, github_token=None, repodir=None,
|
"Default values of paths are relative to conf.py.",
|
||||||
**kwargs):
|
)
|
||||||
|
def ablog_deploy(
|
||||||
|
website,
|
||||||
|
message=None,
|
||||||
|
github_pages=None,
|
||||||
|
push_quietly=False,
|
||||||
|
push_force=False,
|
||||||
|
github_token=None,
|
||||||
|
repodir=None,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
|
||||||
confdir = find_confdir()
|
confdir = find_confdir()
|
||||||
conf = read_conf(confdir)
|
conf = read_conf(confdir)
|
||||||
|
|
||||||
github_pages = (github_pages or getattr(conf, 'github_pages', None))
|
github_pages = github_pages or getattr(conf, "github_pages", None)
|
||||||
|
|
||||||
website = (website or
|
website = website or os.path.join(confdir, getattr(conf, "ablog_builddir", "_website"))
|
||||||
os.path.join(confdir, getattr(conf, 'ablog_builddir', '_website')))
|
|
||||||
|
|
||||||
tomove = glob.glob(os.path.join(website, '*'))
|
tomove = glob.glob(os.path.join(website, "*"))
|
||||||
if not tomove:
|
if not tomove:
|
||||||
print('Nothing to deploy, build first.')
|
print("Nothing to deploy, build first.")
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from invoke import run
|
from invoke import run
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ImportError("invoke is required by deploy command, "
|
raise ImportError("invoke is required by deploy command, " "run `pip install invoke`")
|
||||||
"run `pip install invoke`")
|
|
||||||
|
|
||||||
if github_pages:
|
if github_pages:
|
||||||
|
|
||||||
if repodir is None:
|
if repodir is None:
|
||||||
repodir = os.path.join(confdir, "{0}.github.io".format(github_pages))
|
repodir = os.path.join(confdir, f"{github_pages}.github.io")
|
||||||
if os.path.isdir(repodir):
|
if os.path.isdir(repodir):
|
||||||
os.chdir(repodir)
|
os.chdir(repodir)
|
||||||
run("git pull", echo=True)
|
run("git pull", echo=True)
|
||||||
else:
|
else:
|
||||||
run("git clone https://github.com/{0}/{0}.github.io.git {1}"
|
run(
|
||||||
.format(github_pages, repodir), echo=True)
|
"git clone https://github.com/{0}/{0}.github.io.git {1}".format(
|
||||||
|
github_pages, repodir
|
||||||
|
),
|
||||||
|
echo=True,
|
||||||
|
)
|
||||||
|
|
||||||
git_add = []
|
git_add = []
|
||||||
for tm in tomove:
|
for tm in tomove:
|
||||||
|
@ -406,41 +492,43 @@ def ablog_deploy(website, message=None, github_pages=None,
|
||||||
os.renames(fn, fnnew)
|
os.renames(fn, fnnew)
|
||||||
|
|
||||||
git_add.append(fnnew)
|
git_add.append(fnnew)
|
||||||
print('Moved {} files to {}.github.io'
|
print("Moved {} files to {}.github.io".format(len(git_add), github_pages))
|
||||||
.format(len(git_add), github_pages))
|
|
||||||
|
|
||||||
os.chdir(repodir)
|
os.chdir(repodir)
|
||||||
|
|
||||||
run("git add -f " + " ".join(['"{}"'.format(os.path.relpath(p))
|
run(
|
||||||
for p in git_add]), echo=True)
|
"git add -f " + " ".join(['"{}"'.format(os.path.relpath(p)) for p in git_add]),
|
||||||
if not os.path.isfile('.nojekyll'):
|
echo=True,
|
||||||
open('.nojekyll', 'w')
|
)
|
||||||
|
if not os.path.isfile(".nojekyll"):
|
||||||
|
open(".nojekyll", "w")
|
||||||
run("git add -f .nojekyll")
|
run("git add -f .nojekyll")
|
||||||
|
|
||||||
commit = 'git commit -m "{}"'.format(message or 'Updates.')
|
commit = 'git commit -m "{}"'.format(message or "Updates.")
|
||||||
if push_force:
|
if push_force:
|
||||||
commit += ' --amend'
|
commit += " --amend"
|
||||||
run(commit, echo=True)
|
run(commit, echo=True)
|
||||||
|
|
||||||
if github_token:
|
if github_token:
|
||||||
with open(os.path.join(repodir, '.git/credentials'), 'w') as out:
|
with open(os.path.join(repodir, ".git/credentials"), "w") as out:
|
||||||
out.write('https://{}:@github.com'
|
out.write("https://{}:@github.com".format(os.environ[github_token]))
|
||||||
.format(os.environ[github_token]))
|
|
||||||
run('git config credential.helper "store --file=.git/credentials"')
|
run('git config credential.helper "store --file=.git/credentials"')
|
||||||
push = 'git push'
|
push = "git push"
|
||||||
if push_quietly:
|
if push_quietly:
|
||||||
push += ' -q'
|
push += " -q"
|
||||||
if push_force:
|
if push_force:
|
||||||
push += ' -f'
|
push += " -f"
|
||||||
push += ' origin master'
|
push += " origin master"
|
||||||
run(push, echo=True)
|
run(push, echo=True)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print('No place to deploy.')
|
print("No place to deploy.")
|
||||||
|
|
||||||
|
|
||||||
def ablog_main():
|
def ablog_main():
|
||||||
"Ablog Main"
|
"""
|
||||||
|
Ablog Main.
|
||||||
|
"""
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
else:
|
else:
|
||||||
|
|
556
ablog/post.py
556
ablog/post.py
|
@ -1,75 +1,96 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
"""post and postlist directives."""
|
"""
|
||||||
|
post and postlist directives.
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from string import Formatter
|
from string import Formatter
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import Directive, directives
|
||||||
|
from docutils.parsers.rst.directives.admonitions import BaseAdmonition
|
||||||
|
from sphinx.locale import _
|
||||||
|
from sphinx.util.nodes import set_source_info
|
||||||
|
|
||||||
|
import ablog
|
||||||
|
|
||||||
|
from .blog import Blog, os_path_join, revise_pending_xrefs, slugify
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from dateutil.parser import parse as date_parser
|
from dateutil.parser import parse as date_parser
|
||||||
except ImportError:
|
except ImportError:
|
||||||
date_parser = None
|
date_parser = None
|
||||||
|
|
||||||
from docutils import nodes
|
|
||||||
from sphinx.locale import _
|
|
||||||
from sphinx.util.nodes import set_source_info
|
|
||||||
from docutils.parsers.rst import directives, Directive
|
|
||||||
from docutils.parsers.rst.directives.admonitions import BaseAdmonition
|
|
||||||
|
|
||||||
import ablog
|
text_type = str
|
||||||
from .blog import Blog, slugify, os_path_join, revise_pending_xrefs
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 0):
|
__all__ = [
|
||||||
text_type = str
|
"PostNode",
|
||||||
else:
|
"PostList",
|
||||||
text_type = unicode
|
"UpdateNode",
|
||||||
|
"PostDirective",
|
||||||
|
"UpdateDirective",
|
||||||
|
"PostListDirective",
|
||||||
|
"purge_posts",
|
||||||
|
"process_posts",
|
||||||
|
"process_postlist",
|
||||||
|
"generate_archive_pages",
|
||||||
|
"generate_atom_feeds",
|
||||||
|
"register_posts",
|
||||||
|
]
|
||||||
|
|
||||||
__all__ = ['PostNode', 'PostList', 'UpdateNode', 'PostDirective',
|
|
||||||
'UpdateDirective', 'PostListDirective', 'purge_posts',
|
|
||||||
'process_posts', 'process_postlist', 'generate_archive_pages',
|
|
||||||
'generate_atom_feeds', 'register_posts']
|
|
||||||
|
|
||||||
class PostNode(nodes.Element):
|
class PostNode(nodes.Element):
|
||||||
"""Represent ``post`` directive content and options in document tree."""
|
"""
|
||||||
|
Represent ``post`` directive content and options in document tree.
|
||||||
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class PostList(nodes.General, nodes.Element):
|
class PostList(nodes.General, nodes.Element):
|
||||||
"""Represent ``postlist`` directive converted to a list of links."""
|
"""
|
||||||
|
Represent ``postlist`` directive converted to a list of links.
|
||||||
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class UpdateNode(nodes.admonition):
|
class UpdateNode(nodes.admonition):
|
||||||
"""Represent ``update`` directive."""
|
"""
|
||||||
|
Represent ``update`` directive.
|
||||||
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class PostDirective(Directive):
|
class PostDirective(Directive):
|
||||||
"""Handle ``post`` directives."""
|
"""
|
||||||
|
Handle ``post`` directives.
|
||||||
|
"""
|
||||||
|
|
||||||
def _split(a): return [s.strip() for s in (a or '').split(',') if s.strip()]
|
def _split(a):
|
||||||
|
return [s.strip() for s in (a or "").split(",") if s.strip()]
|
||||||
|
|
||||||
has_content = True
|
has_content = True
|
||||||
required_arguments = 0
|
required_arguments = 0
|
||||||
optional_arguments = 1
|
optional_arguments = 1
|
||||||
final_argument_whitespace = True
|
final_argument_whitespace = True
|
||||||
option_spec = {
|
option_spec = {
|
||||||
'tags': _split,
|
"tags": _split,
|
||||||
'author': _split,
|
"author": _split,
|
||||||
'category': _split,
|
"category": _split,
|
||||||
'location': _split,
|
"location": _split,
|
||||||
'language': _split,
|
"language": _split,
|
||||||
'redirect': _split,
|
"redirect": _split,
|
||||||
'title': lambda a: a.strip(),
|
"title": lambda a: a.strip(),
|
||||||
'image': int,
|
"image": int,
|
||||||
'excerpt': int,
|
"excerpt": int,
|
||||||
'exclude': directives.flag,
|
"exclude": directives.flag,
|
||||||
'nocomments': directives.flag,
|
"nocomments": directives.flag,
|
||||||
}
|
}
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -77,21 +98,20 @@ class PostDirective(Directive):
|
||||||
node = PostNode()
|
node = PostNode()
|
||||||
node.document = self.state.document
|
node.document = self.state.document
|
||||||
set_source_info(self, node)
|
set_source_info(self, node)
|
||||||
self.state.nested_parse(self.content, self.content_offset,
|
self.state.nested_parse(self.content, self.content_offset, node, match_titles=1)
|
||||||
node, match_titles=1)
|
|
||||||
|
|
||||||
node['date'] = self.arguments[0] if self.arguments else None
|
node["date"] = self.arguments[0] if self.arguments else None
|
||||||
node['tags'] = self.options.get('tags', [])
|
node["tags"] = self.options.get("tags", [])
|
||||||
node['author'] = self.options.get('author', [])
|
node["author"] = self.options.get("author", [])
|
||||||
node['category'] = self.options.get('category', [])
|
node["category"] = self.options.get("category", [])
|
||||||
node['location'] = self.options.get('location', [])
|
node["location"] = self.options.get("location", [])
|
||||||
node['language'] = self.options.get('language', [])
|
node["language"] = self.options.get("language", [])
|
||||||
node['redirect'] = self.options.get('redirect', [])
|
node["redirect"] = self.options.get("redirect", [])
|
||||||
node['title'] = self.options.get('title', None)
|
node["title"] = self.options.get("title", None)
|
||||||
node['image'] = self.options.get('image', None)
|
node["image"] = self.options.get("image", None)
|
||||||
node['excerpt'] = self.options.get('excerpt', None)
|
node["excerpt"] = self.options.get("excerpt", None)
|
||||||
node['exclude'] = 'exclude' in self.options
|
node["exclude"] = "exclude" in self.options
|
||||||
node['nocomments'] = 'nocomments' in self.options
|
node["nocomments"] = "nocomments" in self.options
|
||||||
return [node]
|
return [node]
|
||||||
|
|
||||||
|
|
||||||
|
@ -100,30 +120,34 @@ class UpdateDirective(BaseAdmonition):
|
||||||
node_class = UpdateNode
|
node_class = UpdateNode
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
ad = super(UpdateDirective, self).run()
|
ad = super().run()
|
||||||
ad[0]['date'] = self.arguments[0] if self.arguments else ''
|
ad[0]["date"] = self.arguments[0] if self.arguments else ""
|
||||||
return ad
|
return ad
|
||||||
|
|
||||||
|
|
||||||
class PostListDirective(Directive):
|
class PostListDirective(Directive):
|
||||||
"""Handle ``postlist`` directives."""
|
"""
|
||||||
|
Handle ``postlist`` directives.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _split(a):
|
||||||
|
return {s.strip() for s in a.split(",")}
|
||||||
|
|
||||||
def _split(a): return set(s.strip() for s in a.split(','))
|
|
||||||
has_content = False
|
has_content = False
|
||||||
required_arguments = 0
|
required_arguments = 0
|
||||||
optional_arguments = 1
|
optional_arguments = 1
|
||||||
final_argument_whitespace = False
|
final_argument_whitespace = False
|
||||||
option_spec = {
|
option_spec = {
|
||||||
'tags': _split,
|
"tags": _split,
|
||||||
'author': _split,
|
"author": _split,
|
||||||
'category': _split,
|
"category": _split,
|
||||||
'location': _split,
|
"location": _split,
|
||||||
'language': _split,
|
"language": _split,
|
||||||
'format': lambda a: a.strip(),
|
"format": lambda a: a.strip(),
|
||||||
'date': lambda a: a.strip(),
|
"date": lambda a: a.strip(),
|
||||||
'sort': directives.flag,
|
"sort": directives.flag,
|
||||||
'excerpts': directives.flag,
|
"excerpts": directives.flag,
|
||||||
'list-style': lambda a: a.strip(),
|
"list-style": lambda a: a.strip(),
|
||||||
}
|
}
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -131,37 +155,40 @@ class PostListDirective(Directive):
|
||||||
node = PostList()
|
node = PostList()
|
||||||
node.document = self.state.document
|
node.document = self.state.document
|
||||||
set_source_info(self, node)
|
set_source_info(self, node)
|
||||||
self.state.nested_parse(self.content, self.content_offset,
|
self.state.nested_parse(self.content, self.content_offset, node, match_titles=1)
|
||||||
node, match_titles=1)
|
|
||||||
|
|
||||||
node['length'] = int(self.arguments[0]) if self.arguments else None
|
node["length"] = int(self.arguments[0]) if self.arguments else None
|
||||||
node['tags'] = self.options.get('tags', [])
|
node["tags"] = self.options.get("tags", [])
|
||||||
node['author'] = self.options.get('author', [])
|
node["author"] = self.options.get("author", [])
|
||||||
node['category'] = self.options.get('category', [])
|
node["category"] = self.options.get("category", [])
|
||||||
node['location'] = self.options.get('location', [])
|
node["location"] = self.options.get("location", [])
|
||||||
node['language'] = self.options.get('language', [])
|
node["language"] = self.options.get("language", [])
|
||||||
node['format'] = self.options.get('format', '{date} - {title}')
|
node["format"] = self.options.get("format", "{date} - {title}")
|
||||||
node['date'] = self.options.get('date', None)
|
node["date"] = self.options.get("date", None)
|
||||||
node['sort'] = 'sort' in self.options
|
node["sort"] = "sort" in self.options
|
||||||
node['excerpts'] = 'excerpts' in self.options
|
node["excerpts"] = "excerpts" in self.options
|
||||||
node['image'] = 'image' in self.options
|
node["image"] = "image" in self.options
|
||||||
node['list-style'] = self.options.get('list-style', 'none')
|
node["list-style"] = self.options.get("list-style", "none")
|
||||||
return [node]
|
return [node]
|
||||||
|
|
||||||
|
|
||||||
def purge_posts(app, env, docname):
|
def purge_posts(app, env, docname):
|
||||||
"""Remove post and reference to it from the standard domain when its
|
"""
|
||||||
document is removed or changed."""
|
Remove post and reference to it from the standard domain when its document
|
||||||
|
is removed or changed.
|
||||||
|
"""
|
||||||
|
|
||||||
if hasattr(env, 'ablog_posts'):
|
if hasattr(env, "ablog_posts"):
|
||||||
env.ablog_posts.pop(docname, None)
|
env.ablog_posts.pop(docname, None)
|
||||||
|
|
||||||
filename = os.path.split(docname)[1]
|
filename = os.path.split(docname)[1]
|
||||||
env.domains['std'].data['labels'].pop(filename, None)
|
env.domains["std"].data["labels"].pop(filename, None)
|
||||||
|
|
||||||
|
|
||||||
def _get_section_title(section):
|
def _get_section_title(section):
|
||||||
"""Return section title as text."""
|
"""
|
||||||
|
Return section title as text.
|
||||||
|
"""
|
||||||
|
|
||||||
for title in section.traverse(nodes.title):
|
for title in section.traverse(nodes.title):
|
||||||
return title.astext()
|
return title.astext()
|
||||||
|
@ -171,48 +198,51 @@ def _get_section_title(section):
|
||||||
|
|
||||||
|
|
||||||
def _get_update_dates(section, docname, post_date_format):
|
def _get_update_dates(section, docname, post_date_format):
|
||||||
"""Return list of dates of updates found section."""
|
"""
|
||||||
|
Return list of dates of updates found section.
|
||||||
|
"""
|
||||||
|
|
||||||
update_nodes = list(section.traverse(UpdateNode))
|
update_nodes = list(section.traverse(UpdateNode))
|
||||||
update_dates = []
|
update_dates = []
|
||||||
for update_node in update_nodes:
|
for update_node in update_nodes:
|
||||||
try:
|
try:
|
||||||
update = datetime.strptime(update_node['date'], post_date_format)
|
update = datetime.strptime(update_node["date"], post_date_format)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
if date_parser:
|
if date_parser:
|
||||||
try:
|
try:
|
||||||
update = date_parser(update_node['date'])
|
update = date_parser(update_node["date"])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError('invalid post date in: ' + docname)
|
raise ValueError("invalid post date in: " + docname)
|
||||||
else:
|
else:
|
||||||
raise ValueError('invalid post date (%s) in ' % (date) +
|
raise ValueError(
|
||||||
docname +
|
"invalid post date (%s) in " % (date)
|
||||||
". Expected format: %s" % post_date_format)
|
+ docname
|
||||||
|
+ ". Expected format: %s" % post_date_format
|
||||||
|
)
|
||||||
# Insert a new title element which contains the `Updated on {date}` logic.
|
# Insert a new title element which contains the `Updated on {date}` logic.
|
||||||
substitute = nodes.title(u'',
|
substitute = nodes.title("", "Updated on " + update.strftime(post_date_format))
|
||||||
'Updated on '
|
|
||||||
+ update.strftime(post_date_format)
|
|
||||||
)
|
|
||||||
update_node.insert(0, substitute)
|
update_node.insert(0, substitute)
|
||||||
update_node['classes'] = ['note', 'update']
|
update_node["classes"] = ["note", "update"]
|
||||||
|
|
||||||
update_dates.append(update)
|
update_dates.append(update)
|
||||||
return update_dates
|
return update_dates
|
||||||
|
|
||||||
|
|
||||||
def process_posts(app, doctree):
|
def process_posts(app, doctree):
|
||||||
"""Process posts and map posted document names to post details in the
|
"""
|
||||||
environment."""
|
Process posts and map posted document names to post details in the
|
||||||
|
environment.
|
||||||
|
"""
|
||||||
|
|
||||||
env = app.builder.env
|
env = app.builder.env
|
||||||
if not hasattr(env, 'ablog_posts'):
|
if not hasattr(env, "ablog_posts"):
|
||||||
env.ablog_posts = {}
|
env.ablog_posts = {}
|
||||||
|
|
||||||
post_nodes = list(doctree.traverse(PostNode))
|
post_nodes = list(doctree.traverse(PostNode))
|
||||||
if not post_nodes:
|
if not post_nodes:
|
||||||
return
|
return
|
||||||
post_date_format = app.config['post_date_format']
|
post_date_format = app.config["post_date_format"]
|
||||||
should_auto_orphan = app.config['post_auto_orphan']
|
should_auto_orphan = app.config["post_auto_orphan"]
|
||||||
docname = env.docname
|
docname = env.docname
|
||||||
|
|
||||||
if should_auto_orphan:
|
if should_auto_orphan:
|
||||||
|
@ -220,15 +250,15 @@ def process_posts(app, doctree):
|
||||||
# "document isn't included in any toctree" warning is not issued
|
# "document isn't included in any toctree" warning is not issued
|
||||||
# We do not simply assign to should_auto_orphan because if auto-orphan
|
# We do not simply assign to should_auto_orphan because if auto-orphan
|
||||||
# is false, we still want to respect the per-post :rst:dir`orphan` setting
|
# is false, we still want to respect the per-post :rst:dir`orphan` setting
|
||||||
app.env.metadata[docname]['orphan'] = True
|
app.env.metadata[docname]["orphan"] = True
|
||||||
|
|
||||||
blog = Blog(app)
|
blog = Blog(app)
|
||||||
auto_excerpt = blog.post_auto_excerpt
|
auto_excerpt = blog.post_auto_excerpt
|
||||||
multi_post = len(post_nodes) > 1 or blog.post_always_section
|
multi_post = len(post_nodes) > 1 or blog.post_always_section
|
||||||
|
|
||||||
for order, node in enumerate(post_nodes, start=1):
|
for order, node in enumerate(post_nodes, start=1):
|
||||||
if node['excerpt'] is None:
|
if node["excerpt"] is None:
|
||||||
node['excerpt'] = auto_excerpt
|
node["excerpt"] = auto_excerpt
|
||||||
|
|
||||||
if multi_post:
|
if multi_post:
|
||||||
# section title, and first few paragraphs of the section of post
|
# section title, and first few paragraphs of the section of post
|
||||||
|
@ -247,34 +277,34 @@ def process_posts(app, doctree):
|
||||||
|
|
||||||
# Making sure that post has a title because all post titles
|
# Making sure that post has a title because all post titles
|
||||||
# are needed when resolving post lists in documents
|
# are needed when resolving post lists in documents
|
||||||
title = node['title'] or _get_section_title(section)
|
title = node["title"] or _get_section_title(section)
|
||||||
|
|
||||||
# creating a summary here, before references are resolved
|
# creating a summary here, before references are resolved
|
||||||
excerpt = []
|
excerpt = []
|
||||||
if node.children:
|
if node.children:
|
||||||
if node['exclude']:
|
if node["exclude"]:
|
||||||
node.replace_self([])
|
node.replace_self([])
|
||||||
else:
|
else:
|
||||||
node.replace_self(node.children)
|
node.replace_self(node.children)
|
||||||
for child in node.children:
|
for child in node.children:
|
||||||
excerpt.append(child.deepcopy())
|
excerpt.append(child.deepcopy())
|
||||||
elif node['excerpt']:
|
elif node["excerpt"]:
|
||||||
count = 0
|
count = 0
|
||||||
for nod in section.traverse(nodes.paragraph):
|
for nod in section.traverse(nodes.paragraph):
|
||||||
excerpt.append(nod.deepcopy())
|
excerpt.append(nod.deepcopy())
|
||||||
count += 1
|
count += 1
|
||||||
if count >= (node['excerpt'] or 0):
|
if count >= (node["excerpt"] or 0):
|
||||||
break
|
break
|
||||||
node.replace_self([])
|
node.replace_self([])
|
||||||
else:
|
else:
|
||||||
node.replace_self([])
|
node.replace_self([])
|
||||||
nimg = node['image'] or blog.post_auto_image
|
nimg = node["image"] or blog.post_auto_image
|
||||||
if nimg:
|
if nimg:
|
||||||
for img, nod in enumerate(section.traverse(nodes.image), start=1):
|
for img, nod in enumerate(section.traverse(nodes.image), start=1):
|
||||||
if img == nimg:
|
if img == nimg:
|
||||||
excerpt.append(nod.deepcopy())
|
excerpt.append(nod.deepcopy())
|
||||||
break
|
break
|
||||||
date = node['date']
|
date = node["date"]
|
||||||
if date:
|
if date:
|
||||||
try:
|
try:
|
||||||
date = datetime.strptime(date, post_date_format)
|
date = datetime.strptime(date, post_date_format)
|
||||||
|
@ -283,11 +313,13 @@ def process_posts(app, doctree):
|
||||||
try:
|
try:
|
||||||
date = date_parser(date)
|
date = date_parser(date)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError('invalid post date in: ' + docname)
|
raise ValueError("invalid post date in: " + docname)
|
||||||
else:
|
else:
|
||||||
raise ValueError('invalid post date (%s) in ' % (date) +
|
raise ValueError(
|
||||||
docname +
|
"invalid post date (%s) in " % (date)
|
||||||
". Expected format: %s" % post_date_format)
|
+ docname
|
||||||
|
+ ". Expected format: %s" % post_date_format
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
date = None
|
date = None
|
||||||
|
@ -296,20 +328,20 @@ def process_posts(app, doctree):
|
||||||
# a potential problem here is that there may be files/folders with the
|
# a potential problem here is that there may be files/folders with the
|
||||||
# same name, so issuing a warning when that's the case may be a good idea
|
# same name, so issuing a warning when that's the case may be a good idea
|
||||||
folder, label = os.path.split(docname)
|
folder, label = os.path.split(docname)
|
||||||
if label == 'index':
|
if label == "index":
|
||||||
folder, label = os.path.split(folder)
|
folder, label = os.path.split(folder)
|
||||||
if not label:
|
if not label:
|
||||||
label = slugify(title)
|
label = slugify(title)
|
||||||
|
|
||||||
section_name = ''
|
section_name = ""
|
||||||
if multi_post and section.parent is not doctree:
|
if multi_post and section.parent is not doctree:
|
||||||
section_name = section.attributes['ids'][0]
|
section_name = section.attributes["ids"][0]
|
||||||
label += '-' + section_name
|
label += "-" + section_name
|
||||||
else:
|
else:
|
||||||
# create a reference for the post
|
# create a reference for the post
|
||||||
# if it is posting the document
|
# if it is posting the document
|
||||||
# ! this does not work for sections
|
# ! this does not work for sections
|
||||||
app.env.domains['std'].data['labels'][label] = (docname, label, title)
|
app.env.domains["std"].data["labels"][label] = (docname, label, title)
|
||||||
|
|
||||||
if section.parent is doctree:
|
if section.parent is doctree:
|
||||||
section_copy = section[0].deepcopy()
|
section_copy = section[0].deepcopy()
|
||||||
|
@ -318,29 +350,29 @@ def process_posts(app, doctree):
|
||||||
|
|
||||||
# multiple posting may result having post nodes
|
# multiple posting may result having post nodes
|
||||||
for nn in section_copy.traverse(PostNode):
|
for nn in section_copy.traverse(PostNode):
|
||||||
if nn['exclude']:
|
if nn["exclude"]:
|
||||||
nn.replace_self([])
|
nn.replace_self([])
|
||||||
else:
|
else:
|
||||||
nn.replace_self(node.children)
|
nn.replace_self(node.children)
|
||||||
|
|
||||||
postinfo = {
|
postinfo = {
|
||||||
'docname': docname,
|
"docname": docname,
|
||||||
'section': section_name,
|
"section": section_name,
|
||||||
'order': order,
|
"order": order,
|
||||||
'date': date,
|
"date": date,
|
||||||
'update': max(update_dates + [date]),
|
"update": max(update_dates + [date]),
|
||||||
'title': title,
|
"title": title,
|
||||||
'excerpt': excerpt,
|
"excerpt": excerpt,
|
||||||
'tags': node['tags'],
|
"tags": node["tags"],
|
||||||
'author': node['author'],
|
"author": node["author"],
|
||||||
'category': node['category'],
|
"category": node["category"],
|
||||||
'location': node['location'],
|
"location": node["location"],
|
||||||
'language': node['language'],
|
"language": node["language"],
|
||||||
'redirect': node['redirect'],
|
"redirect": node["redirect"],
|
||||||
'nocomments': node['nocomments'],
|
"nocomments": node["nocomments"],
|
||||||
'image': node['image'],
|
"image": node["image"],
|
||||||
'exclude': node['exclude'],
|
"exclude": node["exclude"],
|
||||||
'doctree': section_copy
|
"doctree": section_copy,
|
||||||
}
|
}
|
||||||
|
|
||||||
if docname not in env.ablog_posts:
|
if docname not in env.ablog_posts:
|
||||||
|
@ -349,25 +381,28 @@ def process_posts(app, doctree):
|
||||||
|
|
||||||
# instantiate catalogs and collections here
|
# instantiate catalogs and collections here
|
||||||
# so that references are created and no warnings are issued
|
# so that references are created and no warnings are issued
|
||||||
if app.builder.format == 'html':
|
if app.builder.format == "html":
|
||||||
stdlabel = env.domains['std'].data['labels']
|
stdlabel = env.domains["std"].data["labels"]
|
||||||
else:
|
else:
|
||||||
stdlabel = env.intersphinx_inventory.setdefault('std:label', {})
|
stdlabel = env.intersphinx_inventory.setdefault("std:label", {})
|
||||||
baseurl = getattr(env.config, 'blog_baseurl').rstrip('/') + '/'
|
baseurl = getattr(env.config, "blog_baseurl").rstrip("/") + "/"
|
||||||
project, version = env.config.project, text_type(env.config.version)
|
project, version = env.config.project, text_type(env.config.version)
|
||||||
|
|
||||||
for key in ['tags', 'author', 'category', 'location', 'language']:
|
for key in ["tags", "author", "category", "location", "language"]:
|
||||||
catalog = blog.catalogs[key]
|
catalog = blog.catalogs[key]
|
||||||
for label in postinfo[key]:
|
for label in postinfo[key]:
|
||||||
coll = catalog[label]
|
coll = catalog[label]
|
||||||
|
|
||||||
if postinfo['date']:
|
if postinfo["date"]:
|
||||||
coll = blog.archive[postinfo['date'].year]
|
coll = blog.archive[postinfo["date"].year]
|
||||||
|
|
||||||
|
|
||||||
def process_postlist(app, doctree, docname):
|
def process_postlist(app, doctree, docname):
|
||||||
"""Replace `PostList` nodes with lists of posts. Also, register all posts
|
"""
|
||||||
if they have not been registered yet."""
|
Replace `PostList` nodes with lists of posts.
|
||||||
|
|
||||||
|
Also, register all posts if they have not been registered yet.
|
||||||
|
"""
|
||||||
|
|
||||||
blog = Blog(app)
|
blog = Blog(app)
|
||||||
if not blog:
|
if not blog:
|
||||||
|
@ -375,7 +410,7 @@ def process_postlist(app, doctree, docname):
|
||||||
|
|
||||||
for node in doctree.traverse(PostList):
|
for node in doctree.traverse(PostList):
|
||||||
colls = []
|
colls = []
|
||||||
for cat in ['tags', 'author', 'category', 'location', 'language']:
|
for cat in ["tags", "author", "category", "location", "language"]:
|
||||||
for coll in node[cat]:
|
for coll in node[cat]:
|
||||||
if coll in blog.catalogs[cat].collections:
|
if coll in blog.catalogs[cat].collections:
|
||||||
colls.append(blog.catalogs[cat].collections[coll])
|
colls.append(blog.catalogs[cat].collections[coll])
|
||||||
|
@ -386,27 +421,24 @@ def process_postlist(app, doctree, docname):
|
||||||
posts = posts & set(coll)
|
posts = posts & set(coll)
|
||||||
posts = list(posts)
|
posts = list(posts)
|
||||||
posts.sort(reverse=True)
|
posts.sort(reverse=True)
|
||||||
posts = posts[:node.attributes['length']]
|
posts = posts[: node.attributes["length"]]
|
||||||
else:
|
else:
|
||||||
posts = list(blog.recent(node.attributes['length'], docname,
|
posts = list(blog.recent(node.attributes["length"], docname, **node.attributes))
|
||||||
**node.attributes))
|
|
||||||
|
|
||||||
if node.attributes['sort']:
|
if node.attributes["sort"]:
|
||||||
posts.sort() # in reverse chronological order, so no reverse=True
|
posts.sort() # in reverse chronological order, so no reverse=True
|
||||||
|
|
||||||
fmts = list(Formatter().parse(node.attributes['format']))
|
fmts = list(Formatter().parse(node.attributes["format"]))
|
||||||
not_in = set(['date', 'title', 'author', 'location', 'language',
|
not_in = {"date", "title", "author", "location", "language", "category", "tags", None}
|
||||||
'category', 'tags', None])
|
|
||||||
for text, key, __, __ in fmts:
|
for text, key, __, __ in fmts:
|
||||||
if key not in not_in:
|
if key not in not_in:
|
||||||
raise KeyError('{} is not recognized in postlist format'
|
raise KeyError(f"{key} is not recognized in postlist format")
|
||||||
.format(key))
|
|
||||||
|
|
||||||
excerpts = node.attributes['excerpts']
|
excerpts = node.attributes["excerpts"]
|
||||||
date_format = node.attributes['date'] or _(blog.post_date_format_short)
|
date_format = node.attributes["date"] or _(blog.post_date_format_short)
|
||||||
bl = nodes.bullet_list()
|
bl = nodes.bullet_list()
|
||||||
bl.attributes['classes'].append('postlist-style-' + node['list-style'])
|
bl.attributes["classes"].append("postlist-style-" + node["list-style"])
|
||||||
bl.attributes['classes'].append('postlist')
|
bl.attributes["classes"].append("postlist")
|
||||||
for post in posts:
|
for post in posts:
|
||||||
bli = nodes.list_item()
|
bli = nodes.list_item()
|
||||||
bl.append(bli)
|
bl.append(bli)
|
||||||
|
@ -418,30 +450,30 @@ def process_postlist(app, doctree, docname):
|
||||||
par.append(nodes.Text(text))
|
par.append(nodes.Text(text))
|
||||||
if key is None:
|
if key is None:
|
||||||
continue
|
continue
|
||||||
if key == 'date':
|
if key == "date":
|
||||||
par.append(nodes.Text(post.date.strftime(date_format)))
|
par.append(nodes.Text(post.date.strftime(date_format)))
|
||||||
else:
|
else:
|
||||||
if key == 'title':
|
if key == "title":
|
||||||
items = [post]
|
items = [post]
|
||||||
else:
|
else:
|
||||||
items = getattr(post, key)
|
items = getattr(post, key)
|
||||||
|
|
||||||
for i, item in enumerate(items, start=1):
|
for i, item in enumerate(items, start=1):
|
||||||
if key == 'title':
|
if key == "title":
|
||||||
ref = nodes.reference()
|
ref = nodes.reference()
|
||||||
ref['refuri'] = app.builder.get_relative_uri(docname, item.docname)
|
ref["refuri"] = app.builder.get_relative_uri(docname, item.docname)
|
||||||
ref['ids'] = []
|
ref["ids"] = []
|
||||||
ref['backrefs'] = []
|
ref["backrefs"] = []
|
||||||
ref['dupnames'] = []
|
ref["dupnames"] = []
|
||||||
ref['classes'] = []
|
ref["classes"] = []
|
||||||
ref['names'] = []
|
ref["names"] = []
|
||||||
ref['internal'] = True
|
ref["internal"] = True
|
||||||
ref.append(nodes.Text(text_type(item)))
|
ref.append(nodes.Text(text_type(item)))
|
||||||
else:
|
else:
|
||||||
ref = _missing_reference(app, item.xref, docname)
|
ref = _missing_reference(app, item.xref, docname)
|
||||||
par.append(ref)
|
par.append(ref)
|
||||||
if i < len(items):
|
if i < len(items):
|
||||||
par.append(nodes.Text(', '))
|
par.append(nodes.Text(", "))
|
||||||
if excerpts and post.excerpt:
|
if excerpts and post.excerpt:
|
||||||
for enode in post.excerpt:
|
for enode in post.excerpt:
|
||||||
enode = enode.deepcopy()
|
enode = enode.deepcopy()
|
||||||
|
@ -455,9 +487,8 @@ def process_postlist(app, doctree, docname):
|
||||||
|
|
||||||
def missing_reference(app, env, node, contnode):
|
def missing_reference(app, env, node, contnode):
|
||||||
|
|
||||||
target = node['reftarget']
|
target = node["reftarget"]
|
||||||
return _missing_reference(app, target, node.get('refdoc'),
|
return _missing_reference(app, target, node.get("refdoc"), contnode, node.get("refexplicit"))
|
||||||
contnode, node.get('refexplicit'))
|
|
||||||
|
|
||||||
|
|
||||||
def _missing_reference(app, target, refdoc, contnode=None, refexplicit=False):
|
def _missing_reference(app, target, refdoc, contnode=None, refexplicit=False):
|
||||||
|
@ -466,15 +497,14 @@ def _missing_reference(app, target, refdoc, contnode=None, refexplicit=False):
|
||||||
if target in blog.references:
|
if target in blog.references:
|
||||||
docname, dispname = blog.references[target]
|
docname, dispname = blog.references[target]
|
||||||
|
|
||||||
if 'html' in app.builder.name:
|
if "html" in app.builder.name:
|
||||||
internal = True
|
internal = True
|
||||||
uri = app.builder.get_relative_uri(refdoc, docname)
|
uri = app.builder.get_relative_uri(refdoc, docname)
|
||||||
else:
|
else:
|
||||||
internal = False
|
internal = False
|
||||||
uri = blog.blog_baseurl + '/' + docname
|
uri = blog.blog_baseurl + "/" + docname
|
||||||
|
|
||||||
newnode = nodes.reference('', '', internal=internal, refuri=uri,
|
newnode = nodes.reference("", "", internal=internal, refuri=uri, reftitle=dispname)
|
||||||
reftitle=dispname)
|
|
||||||
if refexplicit:
|
if refexplicit:
|
||||||
newnode.append(contnode)
|
newnode.append(contnode)
|
||||||
else:
|
else:
|
||||||
|
@ -486,8 +516,10 @@ def _missing_reference(app, target, refdoc, contnode=None, refexplicit=False):
|
||||||
|
|
||||||
|
|
||||||
def generate_archive_pages(app):
|
def generate_archive_pages(app):
|
||||||
"""Generate archive pages for all posts, categories, tags, authors, and
|
"""
|
||||||
drafts."""
|
Generate archive pages for all posts, categories, tags, authors, and
|
||||||
|
drafts.
|
||||||
|
"""
|
||||||
|
|
||||||
if not ablog.builder_support(app):
|
if not ablog.builder_support(app):
|
||||||
return
|
return
|
||||||
|
@ -495,80 +527,77 @@ def generate_archive_pages(app):
|
||||||
blog = Blog(app)
|
blog = Blog(app)
|
||||||
for post in blog.posts:
|
for post in blog.posts:
|
||||||
for redirect in post.redirect:
|
for redirect in post.redirect:
|
||||||
yield (redirect, {'redirect': post.docname, 'post': post},
|
yield (redirect, {"redirect": post.docname, "post": post}, "redirect.html")
|
||||||
'redirect.html')
|
|
||||||
|
|
||||||
found_docs = app.env.found_docs
|
found_docs = app.env.found_docs
|
||||||
atom_feed = bool(blog.blog_baseurl)
|
atom_feed = bool(blog.blog_baseurl)
|
||||||
feed_archives = blog.blog_feed_archives
|
feed_archives = blog.blog_feed_archives
|
||||||
blog_path = blog.blog_path
|
blog_path = blog.blog_path
|
||||||
for title, header, catalog in [
|
for title, header, catalog in [
|
||||||
(_('Authors'), _('Posts by'), blog.author),
|
(_("Authors"), _("Posts by"), blog.author),
|
||||||
(_('Locations'), _('Posts from'), blog.location),
|
(_("Locations"), _("Posts from"), blog.location),
|
||||||
(_('Languages'), _('Posts in'), blog.language),
|
(_("Languages"), _("Posts in"), blog.language),
|
||||||
(_('Categories'), _('Posts in'), blog.category),
|
(_("Categories"), _("Posts in"), blog.category),
|
||||||
(_('All posts'), _('Posted in'), blog.archive),
|
(_("All posts"), _("Posted in"), blog.archive),
|
||||||
(_('Tags'), _('Posts tagged'), blog.tags), ]:
|
(_("Tags"), _("Posts tagged"), blog.tags),
|
||||||
|
]:
|
||||||
|
|
||||||
if not catalog:
|
if not catalog:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
'parents': [],
|
"parents": [],
|
||||||
'title': title,
|
"title": title,
|
||||||
'header': header,
|
"header": header,
|
||||||
'catalog': catalog,
|
"catalog": catalog,
|
||||||
'summary': True,
|
"summary": True,
|
||||||
}
|
}
|
||||||
if catalog.docname not in found_docs:
|
if catalog.docname not in found_docs:
|
||||||
yield (catalog.docname, context, 'catalog.html')
|
yield (catalog.docname, context, "catalog.html")
|
||||||
|
|
||||||
for collection in catalog:
|
for collection in catalog:
|
||||||
|
|
||||||
if not collection:
|
if not collection:
|
||||||
continue
|
continue
|
||||||
context = {
|
context = {
|
||||||
'parents': [],
|
"parents": [],
|
||||||
'title': u'{0} {1}'.format(header, collection),
|
"title": f"{header} {collection}",
|
||||||
'header': header,
|
"header": header,
|
||||||
'collection': collection,
|
"collection": collection,
|
||||||
'summary': True,
|
"summary": True,
|
||||||
'feed_path': collection.path if feed_archives else blog_path,
|
"feed_path": collection.path if feed_archives else blog_path,
|
||||||
'archive_feed': atom_feed and feed_archives
|
"archive_feed": atom_feed and feed_archives,
|
||||||
}
|
}
|
||||||
context['feed_title'] = context['title']
|
context["feed_title"] = context["title"]
|
||||||
if collection.docname not in found_docs:
|
if collection.docname not in found_docs:
|
||||||
yield (collection.docname, context, 'collection.html')
|
yield (collection.docname, context, "collection.html")
|
||||||
|
|
||||||
#ppp = 5
|
# ppp = 5
|
||||||
# for page, i in enumerate(range(0, len(blog.posts), ppp)):
|
# for page, i in enumerate(range(0, len(blog.posts), ppp)):
|
||||||
if 1:
|
if 1:
|
||||||
context = {
|
context = {
|
||||||
'parents': [],
|
"parents": [],
|
||||||
'title': _('All Posts'),
|
"title": _("All Posts"),
|
||||||
'header': _('All'),
|
"header": _("All"),
|
||||||
'collection': blog.posts,
|
"collection": blog.posts,
|
||||||
'summary': True,
|
"summary": True,
|
||||||
'atom_feed': atom_feed,
|
"atom_feed": atom_feed,
|
||||||
'feed_path': blog.blog_path,
|
"feed_path": blog.blog_path,
|
||||||
}
|
}
|
||||||
docname = blog.posts.docname
|
docname = blog.posts.docname
|
||||||
# if page:
|
# if page:
|
||||||
# docname += '/' + str(page)
|
# docname += '/' + str(page)
|
||||||
yield (docname, context, 'collection.html')
|
yield (docname, context, "collection.html")
|
||||||
|
|
||||||
context = {
|
context = {"parents": [], "title": _("Drafts"), "collection": blog.drafts, "summary": True}
|
||||||
'parents': [],
|
yield (blog.drafts.docname, context, "collection.html")
|
||||||
'title': _('Drafts'),
|
|
||||||
'collection': blog.drafts,
|
|
||||||
'summary': True,
|
|
||||||
}
|
|
||||||
yield (blog.drafts.docname, context, 'collection.html')
|
|
||||||
|
|
||||||
|
|
||||||
def generate_atom_feeds(app):
|
def generate_atom_feeds(app):
|
||||||
"""Generate archive pages for all posts, categories, tags, authors, and
|
"""
|
||||||
drafts."""
|
Generate archive pages for all posts, categories, tags, authors, and
|
||||||
|
drafts.
|
||||||
|
"""
|
||||||
|
|
||||||
if not ablog.builder_support(app):
|
if not ablog.builder_support(app):
|
||||||
return
|
return
|
||||||
|
@ -585,23 +614,28 @@ def generate_atom_feeds(app):
|
||||||
app.warn("werkzeug is not found, continue without atom feeds support.")
|
app.warn("werkzeug is not found, continue without atom feeds support.")
|
||||||
return
|
return
|
||||||
|
|
||||||
feed_path = os.path.join(app.builder.outdir, blog.blog_path, 'atom.xml')
|
feed_path = os.path.join(app.builder.outdir, blog.blog_path, "atom.xml")
|
||||||
|
|
||||||
feeds = [(blog.posts,
|
feeds = [
|
||||||
blog.blog_path,
|
(
|
||||||
feed_path,
|
blog.posts,
|
||||||
blog.blog_title,
|
blog.blog_path,
|
||||||
os_path_join(url, blog.blog_path, 'atom.xml'))]
|
feed_path,
|
||||||
|
blog.blog_title,
|
||||||
|
os_path_join(url, blog.blog_path, "atom.xml"),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
if blog.blog_feed_archives:
|
if blog.blog_feed_archives:
|
||||||
|
|
||||||
for header, catalog in [
|
for header, catalog in [
|
||||||
(_('Posts by'), blog.author),
|
(_("Posts by"), blog.author),
|
||||||
(_('Posts from'), blog.location),
|
(_("Posts from"), blog.location),
|
||||||
(_('Posts in'), blog.language),
|
(_("Posts in"), blog.language),
|
||||||
(_('Posts in'), blog.category),
|
(_("Posts in"), blog.category),
|
||||||
(_('Posted in'), blog.archive),
|
(_("Posted in"), blog.archive),
|
||||||
(_('Posts tagged'), blog.tags), ]:
|
(_("Posts tagged"), blog.tags),
|
||||||
|
]:
|
||||||
|
|
||||||
for coll in catalog:
|
for coll in catalog:
|
||||||
# skip collections containing only drafts
|
# skip collections containing only drafts
|
||||||
|
@ -611,12 +645,15 @@ def generate_atom_feeds(app):
|
||||||
if not os.path.isdir(folder):
|
if not os.path.isdir(folder):
|
||||||
os.makedirs(folder)
|
os.makedirs(folder)
|
||||||
|
|
||||||
feeds.append((coll,
|
feeds.append(
|
||||||
coll.path,
|
(
|
||||||
os.path.join(folder, 'atom.xml'),
|
coll,
|
||||||
blog.blog_title + u' - ' + header +
|
coll.path,
|
||||||
u' ' + text_type(coll),
|
os.path.join(folder, "atom.xml"),
|
||||||
os_path_join(url, coll.path, 'atom.xml')))
|
blog.blog_title + " - " + header + " " + text_type(coll),
|
||||||
|
os_path_join(url, coll.path, "atom.xml"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Config options
|
# Config options
|
||||||
feed_length = blog.blog_feed_length
|
feed_length = blog.blog_feed_length
|
||||||
|
@ -624,42 +661,45 @@ def generate_atom_feeds(app):
|
||||||
|
|
||||||
for feed_posts, pagename, feed_path, feed_title, feed_url in feeds:
|
for feed_posts, pagename, feed_path, feed_title, feed_url in feeds:
|
||||||
|
|
||||||
feed = AtomFeed(feed_title,
|
feed = AtomFeed(
|
||||||
title_type='text',
|
feed_title,
|
||||||
url=url,
|
title_type="text",
|
||||||
feed_url=feed_url,
|
url=url,
|
||||||
subtitle=blog.blog_feed_subtitle,
|
feed_url=feed_url,
|
||||||
generator=('ABlog', 'http://ablog.readthedocs.org',
|
subtitle=blog.blog_feed_subtitle,
|
||||||
ablog.__version__))
|
generator=("ABlog", "https://ablog.readthedocs.org", ablog.__version__),
|
||||||
|
)
|
||||||
for i, post in enumerate(feed_posts):
|
for i, post in enumerate(feed_posts):
|
||||||
if feed_length and i == feed_length:
|
if feed_length and i == feed_length:
|
||||||
break
|
break
|
||||||
post_url = os_path_join(
|
post_url = os_path_join(url, app.builder.get_target_uri(post.docname))
|
||||||
url, app.builder.get_target_uri(post.docname))
|
|
||||||
if post.section:
|
if post.section:
|
||||||
post_url += '#' + post.section
|
post_url += "#" + post.section
|
||||||
|
|
||||||
if blog.blog_feed_titles:
|
if blog.blog_feed_titles:
|
||||||
content = None
|
content = None
|
||||||
else:
|
else:
|
||||||
content = post.to_html(pagename, fulltext=feed_fulltext)
|
content = post.to_html(pagename, fulltext=feed_fulltext)
|
||||||
feed.add(post.title,
|
feed.add(
|
||||||
content=content,
|
post.title,
|
||||||
title_type='text',
|
content=content,
|
||||||
content_type='html',
|
title_type="text",
|
||||||
author=', '.join(a.name for a in post.author),
|
content_type="html",
|
||||||
url=post_url,
|
author=", ".join(a.name for a in post.author),
|
||||||
id=post_url,
|
url=post_url,
|
||||||
updated=post.update, published=post.date)
|
id=post_url,
|
||||||
|
updated=post.update,
|
||||||
|
published=post.date,
|
||||||
|
)
|
||||||
|
|
||||||
parent_dir = os.path.dirname(feed_path)
|
parent_dir = os.path.dirname(feed_path)
|
||||||
if not os.path.isdir(parent_dir):
|
if not os.path.isdir(parent_dir):
|
||||||
os.makedirs(parent_dir)
|
os.makedirs(parent_dir)
|
||||||
|
|
||||||
with io.open(feed_path, 'w', encoding='utf-8') as out:
|
with open(feed_path, "w", encoding="utf-8") as out:
|
||||||
feed_str = feed.to_string()
|
feed_str = feed.to_string()
|
||||||
try:
|
try:
|
||||||
out.write(feed_str.encode('utf-8'))
|
out.write(feed_str.encode("utf-8"))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
out.write(feed_str)
|
out.write(feed_str)
|
||||||
|
|
||||||
|
@ -670,9 +710,11 @@ def generate_atom_feeds(app):
|
||||||
|
|
||||||
|
|
||||||
def register_posts(app):
|
def register_posts(app):
|
||||||
"""Register posts found in the Sphinx build environment."""
|
"""
|
||||||
|
Register posts found in the Sphinx build environment.
|
||||||
|
"""
|
||||||
|
|
||||||
blog = Blog(app)
|
blog = Blog(app)
|
||||||
for docname, posts in getattr(app.env, 'ablog_posts', {}).items():
|
for docname, posts in getattr(app.env, "ablog_posts", {}).items():
|
||||||
for postinfo in posts:
|
for postinfo in posts:
|
||||||
blog.register(docname, postinfo)
|
blog.register(docname, postinfo)
|
||||||
|
|
305
ablog/start.py
305
ablog/start.py
|
@ -1,46 +1,25 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
from os import path
|
|
||||||
from io import open
|
from io import open
|
||||||
from docutils.utils import column_width
|
from os import path
|
||||||
from textwrap import wrap
|
from textwrap import wrap
|
||||||
|
|
||||||
from sphinx import __version__
|
from docutils.utils import column_width
|
||||||
|
from pkg_resources import DistributionNotFound, get_distribution
|
||||||
|
from sphinx.cmd.quickstart import do_prompt, ensuredir, is_path
|
||||||
from sphinx.util import texescape
|
from sphinx.util import texescape
|
||||||
from sphinx.util.console import bold, nocolor, color_terminal
|
from sphinx.util.console import bold, color_terminal, nocolor
|
||||||
from sphinx.util.osutil import make_filename
|
from sphinx.util.osutil import make_filename
|
||||||
|
|
||||||
SPHINX_LT_17 = LooseVersion(__version__) < LooseVersion('1.7')
|
from .version import version as __version__
|
||||||
|
|
||||||
if SPHINX_LT_17:
|
w = lambda t, ls=80: "\n".join(wrap(t, ls))
|
||||||
from sphinx.quickstart import do_prompt, is_path, ensuredir
|
|
||||||
else:
|
|
||||||
from sphinx.cmd.quickstart import do_prompt, is_path, ensuredir
|
|
||||||
|
|
||||||
from ablog import __version__
|
__all__ = ["generate", "ask_user", "ablog_start"]
|
||||||
|
|
||||||
if sys.version_info >= (3, 0):
|
ABLOG_CONF = "#!/usr/bin/env python3\n"
|
||||||
text_type = str
|
ABLOG_CONF += """# -*- coding: utf-8 -*-
|
||||||
else:
|
|
||||||
text_type = unicode
|
|
||||||
|
|
||||||
w = lambda t, ls=80: '\n'.join(wrap(t, ls))
|
|
||||||
|
|
||||||
__all__ = ['generate', 'ask_user', 'ablog_start']
|
|
||||||
|
|
||||||
ABLOG_CONF = u''
|
|
||||||
|
|
||||||
# prevents that the file is checked for being written in Python 2.x syntax
|
|
||||||
if sys.version_info >= (3, 0):
|
|
||||||
ABLOG_CONF = u'#!/usr/bin/env python3\n'
|
|
||||||
|
|
||||||
|
|
||||||
ABLOG_CONF += u'''# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# %(project)s build configuration file, created by
|
# %(project)s build configuration file, created by
|
||||||
# `ablog start` on %(now)s.
|
# `ablog start` on %(now)s.
|
||||||
|
@ -94,7 +73,7 @@ blog_authors = {
|
||||||
# keys should be used in ``post`` directive to refer to the locations.
|
# keys should be used in ``post`` directive to refer to the locations.
|
||||||
# Default is ``{}``.
|
# Default is ``{}``.
|
||||||
#blog_locations = {
|
#blog_locations = {
|
||||||
# 'Earth': ('The Blue Planet', 'http://en.wikipedia.org/wiki/Earth),
|
# 'Earth': ('The Blue Planet', 'https://en.wikipedia.org/wiki/Earth),
|
||||||
#}
|
#}
|
||||||
|
|
||||||
|
|
||||||
|
@ -383,9 +362,9 @@ html_static_path = ['%(dot)sstatic']
|
||||||
htmlhelp_basename = '%(project_fn)sdoc'
|
htmlhelp_basename = '%(project_fn)sdoc'
|
||||||
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
|
||||||
ABLOG_INDEX = u'''
|
ABLOG_INDEX = """
|
||||||
.. %(project)s index file, created by `ablog start` on %(now)s.
|
.. %(project)s index file, created by `ablog start` on %(now)s.
|
||||||
You can adapt this file completely to your liking, but it should at least
|
You can adapt this file completely to your liking, but it should at least
|
||||||
contain the root `toctree` directive.
|
contain the root `toctree` directive.
|
||||||
|
@ -414,9 +393,9 @@ Here is a list of most recent posts:
|
||||||
|
|
||||||
about.rst
|
about.rst
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
|
||||||
ABLOG_ABOUT = u'''
|
ABLOG_ABOUT = """
|
||||||
.. _about:
|
.. _about:
|
||||||
|
|
||||||
About %(author)s
|
About %(author)s
|
||||||
|
@ -424,9 +403,9 @@ About %(author)s
|
||||||
|
|
||||||
The world wants to know more about you.
|
The world wants to know more about you.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
|
||||||
ABLOG_POST = u'''
|
ABLOG_POST = """
|
||||||
.. %(project)s post example, created by `ablog start` on %(post_date)s.
|
.. %(project)s post example, created by `ablog start` on %(post_date)s.
|
||||||
|
|
||||||
.. post:: %(post_date)s
|
.. post:: %(post_date)s
|
||||||
|
@ -439,64 +418,74 @@ First Post
|
||||||
World, hello again! This very first paragraph of the post will be used
|
World, hello again! This very first paragraph of the post will be used
|
||||||
as excerpt in archives and feeds. Find out how to control how much is shown
|
as excerpt in archives and feeds. Find out how to control how much is shown
|
||||||
in `Post Excerpts and Images
|
in `Post Excerpts and Images
|
||||||
<http://ablog.readthedocs.org/manual/post-excerpts-and-images/>`_. Remember
|
<https://ablog.readthedocs.org/manual/post-excerpts-and-images/>`_. Remember
|
||||||
that you can refer to posts by file name, e.g. ``:ref:`first-post``` results
|
that you can refer to posts by file name, e.g. ``:ref:`first-post``` results
|
||||||
in :ref:`first-post`. Find out more at `Cross-Referencing Blog Pages
|
in :ref:`first-post`. Find out more at `Cross-Referencing Blog Pages
|
||||||
<http://ablog.readthedocs.org/manual/cross-referencing-blog-pages/>`_.
|
<https://ablog.readthedocs.org/manual/cross-referencing-blog-pages/>`_.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
|
|
||||||
CONF_DEFAULTS = {
|
CONF_DEFAULTS = {
|
||||||
'sep': False,
|
"sep": False,
|
||||||
'dot': '_',
|
"dot": "_",
|
||||||
'language': None,
|
"language": None,
|
||||||
'suffix': '.rst',
|
"suffix": ".rst",
|
||||||
'master': 'index',
|
"master": "index",
|
||||||
'makefile': False,
|
"makefile": False,
|
||||||
'batchfile': False,
|
"batchfile": False,
|
||||||
'epub': False,
|
"epub": False,
|
||||||
'ext_todo': False,
|
"ext_todo": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def generate(d, overwrite=True, silent=False):
|
def generate(d, overwrite=True, silent=False):
|
||||||
'''Borrowed from Sphinx 1.3b3'''
|
"""
|
||||||
|
Borrowed from Sphinx 1.3b3.
|
||||||
|
"""
|
||||||
|
|
||||||
"""Generate project based on values in *d*."""
|
"""Generate project based on values in *d*."""
|
||||||
|
|
||||||
texescape.init()
|
texescape.init()
|
||||||
|
|
||||||
if 'mastertoctree' not in d:
|
if "mastertoctree" not in d:
|
||||||
d['mastertoctree'] = ''
|
d["mastertoctree"] = ""
|
||||||
if 'mastertocmaxdepth' not in d:
|
if "mastertocmaxdepth" not in d:
|
||||||
d['mastertocmaxdepth'] = 2
|
d["mastertocmaxdepth"] = 2
|
||||||
|
|
||||||
d['project_fn'] = make_filename(d['project'])
|
d["project_fn"] = make_filename(d["project"])
|
||||||
d['project_manpage'] = d['project_fn'].lower()
|
d["project_manpage"] = d["project_fn"].lower()
|
||||||
d['now'] = time.asctime()
|
d["now"] = time.asctime()
|
||||||
d['project_underline'] = column_width(d['project']) * '='
|
d["project_underline"] = column_width(d["project"]) * "="
|
||||||
|
|
||||||
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
|
d["copyright"] = time.strftime("%Y") + ", " + d["author"]
|
||||||
d['author_texescaped'] = text_type(d['author']
|
d["author_texescaped"] = str(d["author"]).translate(texescape.tex_escape_map)
|
||||||
).translate(texescape.tex_escape_map)
|
d["project_doc"] = d["project"] + " Documentation"
|
||||||
d['project_doc'] = d['project'] + ' Documentation'
|
d["project_doc_texescaped"] = str(d["project"] + " Documentation").translate(
|
||||||
d['project_doc_texescaped'] = text_type(d['project'] + ' Documentation'
|
texescape.tex_escape_map
|
||||||
).translate(texescape.tex_escape_map)
|
)
|
||||||
|
|
||||||
# escape backslashes and single quotes in strings that are put into
|
# escape backslashes and single quotes in strings that are put into
|
||||||
# a Python string literal
|
# a Python string literal
|
||||||
for key in ('project', 'project_doc', 'project_doc_texescaped',
|
for key in (
|
||||||
'author', 'author_texescaped', 'copyright',
|
"project",
|
||||||
'version', 'release', 'master'):
|
"project_doc",
|
||||||
d[key + '_str'] = d[key].replace('\\', '\\\\').replace("'", "\\'")
|
"project_doc_texescaped",
|
||||||
|
"author",
|
||||||
|
"author_texescaped",
|
||||||
|
"copyright",
|
||||||
|
"version",
|
||||||
|
"release",
|
||||||
|
"master",
|
||||||
|
):
|
||||||
|
d[key + "_str"] = d[key].replace("\\", "\\\\").replace("'", "\\'")
|
||||||
|
|
||||||
if not path.isdir(d['path']):
|
if not path.isdir(d["path"]):
|
||||||
ensuredir(d['path'])
|
ensuredir(d["path"])
|
||||||
|
|
||||||
srcdir = d['sep'] and path.join(d['path'], 'source') or d['path']
|
srcdir = d["sep"] and path.join(d["path"], "source") or d["path"]
|
||||||
|
|
||||||
ensuredir(srcdir)
|
ensuredir(srcdir)
|
||||||
d['exclude_patterns'] = ''
|
d["exclude_patterns"] = ""
|
||||||
# TODO: Work if we want this.
|
# TODO: Work if we want this.
|
||||||
# if d['sep']:
|
# if d['sep']:
|
||||||
# builddir = path.join(d['path'], 'build')
|
# builddir = path.join(d['path'], 'build')
|
||||||
|
@ -505,41 +494,42 @@ def generate(d, overwrite=True, silent=False):
|
||||||
# builddir = path.join(srcdir, d['dot'] + 'build')
|
# builddir = path.join(srcdir, d['dot'] + 'build')
|
||||||
# d['exclude_patterns'] = repr(d['dot'] + 'build')
|
# d['exclude_patterns'] = repr(d['dot'] + 'build')
|
||||||
# ensuredir(builddir)
|
# ensuredir(builddir)
|
||||||
ensuredir(path.join(srcdir, d['dot'] + 'templates'))
|
ensuredir(path.join(srcdir, d["dot"] + "templates"))
|
||||||
ensuredir(path.join(srcdir, d['dot'] + 'static'))
|
ensuredir(path.join(srcdir, d["dot"] + "static"))
|
||||||
|
|
||||||
def write_file(fpath, content, newline=None):
|
def write_file(fpath, content, newline=None):
|
||||||
if overwrite or not path.isfile(fpath):
|
if overwrite or not path.isfile(fpath):
|
||||||
print('Creating file %s.' % fpath)
|
print("Creating file %s." % fpath)
|
||||||
f = open(fpath, 'wt', encoding='utf-8', newline=newline)
|
f = open(fpath, "wt", encoding="utf-8", newline=newline)
|
||||||
try:
|
try:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
else:
|
else:
|
||||||
print('File %s already exists, skipping.' % fpath)
|
print("File %s already exists, skipping." % fpath)
|
||||||
|
|
||||||
conf_text = ABLOG_CONF % d
|
conf_text = ABLOG_CONF % d
|
||||||
write_file(path.join(srcdir, 'conf.py'), conf_text)
|
write_file(path.join(srcdir, "conf.py"), conf_text)
|
||||||
|
|
||||||
masterfile = path.join(srcdir, d['master'] + d['suffix'])
|
masterfile = path.join(srcdir, d["master"] + d["suffix"])
|
||||||
write_file(masterfile, ABLOG_INDEX % d)
|
write_file(masterfile, ABLOG_INDEX % d)
|
||||||
|
|
||||||
about = path.join(srcdir, 'about' + d['suffix'])
|
about = path.join(srcdir, "about" + d["suffix"])
|
||||||
write_file(about, ABLOG_ABOUT % d)
|
write_file(about, ABLOG_ABOUT % d)
|
||||||
|
|
||||||
d['post_date'] = datetime.datetime.today().strftime('%b %d, %Y')
|
d["post_date"] = datetime.datetime.today().strftime("%b %d, %Y")
|
||||||
firstpost = path.join(srcdir, 'first-post' + d['suffix'])
|
firstpost = path.join(srcdir, "first-post" + d["suffix"])
|
||||||
write_file(firstpost, ABLOG_POST % d)
|
write_file(firstpost, ABLOG_POST % d)
|
||||||
|
|
||||||
if silent:
|
if silent:
|
||||||
return
|
return
|
||||||
|
|
||||||
print(bold('Finished: An initial directory structure has been created.'))
|
print(bold("Finished: An initial directory structure has been created."))
|
||||||
|
|
||||||
|
|
||||||
def ask_user(d):
|
def ask_user(d):
|
||||||
"""Borrowed from Sphinx 1.3b3
|
"""
|
||||||
|
Borrowed from Sphinx 1.3b3.
|
||||||
|
|
||||||
Ask the user for quickstart values missing from *d*.
|
Ask the user for quickstart values missing from *d*.
|
||||||
|
|
||||||
|
@ -554,84 +544,89 @@ def ask_user(d):
|
||||||
|
|
||||||
d.update(CONF_DEFAULTS)
|
d.update(CONF_DEFAULTS)
|
||||||
|
|
||||||
print(bold('Welcome to the ABlog %s quick start utility.') % __version__)
|
print(bold("Welcome to the ABlog %s quick start utility.") % __version__)
|
||||||
print('')
|
print("")
|
||||||
print(w('Please enter values for the following settings (just press Enter '
|
print(
|
||||||
'to accept a default value, if one is given in brackets).'))
|
w(
|
||||||
|
"Please enter values for the following settings (just press Enter "
|
||||||
|
"to accept a default value, if one is given in brackets)."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
print('')
|
print("")
|
||||||
if 'path' in d:
|
if "path" in d:
|
||||||
print(bold('Selected root path: %s' % d['path']))
|
print(bold("Selected root path: %s" % d["path"]))
|
||||||
else:
|
else:
|
||||||
print('Enter the root path for your blog project.')
|
print("Enter the root path for your blog project.")
|
||||||
if SPHINX_LT_17:
|
d["path"] = do_prompt("Root path for your project", ".", is_path)
|
||||||
do_prompt(d, 'path', 'Root path for your project', '.', is_path)
|
|
||||||
else:
|
|
||||||
d['path'] = do_prompt('Root path for your project', '.', is_path)
|
|
||||||
|
|
||||||
while path.isfile(path.join(d['path'], 'conf.py')) or \
|
while path.isfile(path.join(d["path"], "conf.py")) or path.isfile(
|
||||||
path.isfile(path.join(d['path'], 'source', 'conf.py')):
|
path.join(d["path"], "source", "conf.py")
|
||||||
print('')
|
):
|
||||||
print(bold(w('Error: an existing conf.py has been found in the '
|
print("")
|
||||||
'selected root path.')))
|
print(bold(w("Error: an existing conf.py has been found in the " "selected root path.")))
|
||||||
print('ablog start will not overwrite existing Sphinx projects.')
|
print("ablog start will not overwrite existing Sphinx projects.")
|
||||||
print('')
|
print("")
|
||||||
if SPHINX_LT_17:
|
d["path"] = do_prompt("Please enter a new root path (or just Enter to exit)", "", is_path)
|
||||||
do_prompt(d, 'path','Please enter a new root path (or just Enter to exit)', '', is_path)
|
if not d["path"]:
|
||||||
else:
|
|
||||||
d['path'] = do_prompt('Please enter a new root path (or just Enter to exit)', '', is_path)
|
|
||||||
if not d['path']:
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if 'project' not in d:
|
if "project" not in d:
|
||||||
print('')
|
print("")
|
||||||
print(w('Project name will occur in several places in the website, '
|
print(
|
||||||
'including blog archive pages and atom feeds. Later, you can '
|
w(
|
||||||
'set separate names for different parts of the website in '
|
"Project name will occur in several places in the website, "
|
||||||
'configuration file.'))
|
"including blog archive pages and atom feeds. Later, you can "
|
||||||
if SPHINX_LT_17:
|
"set separate names for different parts of the website in "
|
||||||
do_prompt(d, 'project', 'Project name')
|
"configuration file."
|
||||||
else:
|
)
|
||||||
d['project'] = do_prompt('Project name')
|
)
|
||||||
|
d["project"] = do_prompt("Project name")
|
||||||
|
|
||||||
if 'author' not in d:
|
if "author" not in d:
|
||||||
print(w('This of author as the copyright holder of the content. '
|
print(
|
||||||
'If your blog has multiple authors, you might want to enter '
|
w(
|
||||||
'a team name here. Later, you can specify individual authors '
|
"This of author as the copyright holder of the content. "
|
||||||
'using `blog_authors` configuration option.'))
|
"If your blog has multiple authors, you might want to enter "
|
||||||
if SPHINX_LT_17:
|
"a team name here. Later, you can specify individual authors "
|
||||||
do_prompt(d, 'author', 'Author name(s)')
|
"using `blog_authors` configuration option."
|
||||||
else:
|
)
|
||||||
d['author'] = do_prompt('Author name(s)')
|
)
|
||||||
|
d["author"] = do_prompt("Author name(s)")
|
||||||
|
|
||||||
d['release'] = d['version'] = ''
|
d["release"] = d["version"] = ""
|
||||||
|
|
||||||
while path.isfile(path.join(d['path'], d['master'] + d['suffix'])) or \
|
while path.isfile(path.join(d["path"], d["master"] + d["suffix"])) or path.isfile(
|
||||||
path.isfile(path.join(d['path'], 'source', d['master'] + d['suffix'])):
|
path.join(d["path"], "source", d["master"] + d["suffix"])
|
||||||
print('')
|
):
|
||||||
print(bold(w('Error: the master file %s has already been found in the '
|
print("")
|
||||||
'selected root path.' % (d['master'] + d['suffix']))))
|
print(
|
||||||
print('ablog-start will not overwrite the existing file.')
|
bold(
|
||||||
print('')
|
w(
|
||||||
if SPHINX_LT_17:
|
"Error: the master file %s has already been found in the "
|
||||||
do_prompt(d, 'master', w('Please enter a new file name, or rename the '
|
"selected root path." % (d["master"] + d["suffix"])
|
||||||
'existing file and press Enter'), d['master'])
|
)
|
||||||
else:
|
)
|
||||||
d['master'] = do_prompt(w('Please enter a new file name, or rename the '
|
)
|
||||||
'existing file and press Enter'), d['master'])
|
print("ablog-start will not overwrite the existing file.")
|
||||||
|
print("")
|
||||||
|
d["master"] = do_prompt(
|
||||||
|
w("Please enter a new file name, or rename the " "existing file and press Enter"),
|
||||||
|
d["master"],
|
||||||
|
)
|
||||||
|
|
||||||
if 'blog_baseurl' not in d:
|
if "blog_baseurl" not in d:
|
||||||
print('')
|
print("")
|
||||||
print(w('Please enter the base URL for your project. Blog feeds will '
|
print(
|
||||||
'be generated relative to this URL. If you don\'t have one yet, '
|
w(
|
||||||
'you can set it in configuration file later.'))
|
"Please enter the base URL for your project. Blog feeds will "
|
||||||
if SPHINX_LT_17:
|
"be generated relative to this URL. If you don't have one yet, "
|
||||||
# APR: Not sure how do_prompt() worked prior to Sphinx 1.7; likely to be `lambda x: x` here too
|
"you can set it in configuration file later."
|
||||||
do_prompt(d, 'blog_baseurl', 'Base URL for your project', None, lambda x: True)
|
)
|
||||||
else:
|
)
|
||||||
d['blog_baseurl'] = do_prompt('Base URL for your project', None, lambda x: x)
|
d["blog_baseurl"] = do_prompt("Base URL for your project", None, lambda x: x)
|
||||||
|
|
||||||
print('')
|
print("")
|
||||||
|
|
||||||
|
|
||||||
def ablog_start(**kwargs):
|
def ablog_start(**kwargs):
|
||||||
|
@ -643,8 +638,8 @@ def ablog_start(**kwargs):
|
||||||
try:
|
try:
|
||||||
ask_user(d)
|
ask_user(d)
|
||||||
except (KeyboardInterrupt, EOFError):
|
except (KeyboardInterrupt, EOFError):
|
||||||
print('')
|
print("")
|
||||||
print('[Interrupted.]')
|
print("[Interrupted.]")
|
||||||
return
|
return
|
||||||
|
|
||||||
generate(d)
|
generate(d)
|
||||||
|
|
|
@ -46,8 +46,8 @@
|
||||||
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
|
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
|
||||||
})();
|
})();
|
||||||
</script>
|
</script>
|
||||||
<noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript>
|
<noscript>Please enable JavaScript to view the <a href="https://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript>
|
||||||
<a href="http://disqus.com" class="dsq-brlink">comments powered by <span class="logo-disqus">Disqus</span></a>
|
<a href="https://disqus.com" class="dsq-brlink">comments powered by <span class="logo-disqus">Disqus</span></a>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
|
|
7
ablog/version.py
Normal file
7
ablog/version.py
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# This file is for compatibility with astropy_helpers
|
||||||
|
from pkg_resources import DistributionNotFound, get_distribution
|
||||||
|
|
||||||
|
try:
|
||||||
|
version = get_distribution("ablog").version
|
||||||
|
except DistributionNotFound:
|
||||||
|
version = "unknown.dev"
|
36
appveyor.yml
36
appveyor.yml
|
@ -1,23 +1,9 @@
|
||||||
# AppVeyor.com is a Continuous Integration service to build and run tests under Windows
|
# AppVeyor.com is a Continuous Integration service to build and run tests under Windows
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
|
matrix:
|
||||||
global:
|
- PY_MAJOR_VER: 3
|
||||||
PYTHON: "C:\\conda"
|
PYTHON_ARCH: "x86_64"
|
||||||
MINICONDA_VERSION: "latest"
|
|
||||||
CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\ci-helpers\\appveyor\\windows_sdk.cmd"
|
|
||||||
PYTHON_ARCH: "64" # needs to be set for CMD_IN_ENV to succeed. If a mix
|
|
||||||
# of 32 bit and 64 bit builds are needed, move this
|
|
||||||
# to the matrix section.
|
|
||||||
CONDA_CHANNELS: "conda-forge"
|
|
||||||
CONDA_DEPENDENCIES: "sphinx werkzeug alabaster invoke graphviz nbsphinx"
|
|
||||||
PIP_DEPENDENCIES: "sphinx-automodapi"
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
- PYTHON_VERSION: "2.7"
|
|
||||||
- PYTHON_VERSION: "3.5"
|
|
||||||
- PYTHON_VERSION: "3.6"
|
|
||||||
- PYTHON_VERSION: "3.7"
|
|
||||||
|
|
||||||
build: false
|
build: false
|
||||||
|
|
||||||
|
@ -36,12 +22,16 @@ install:
|
||||||
cinst graphviz --no-progress
|
cinst graphviz --no-progress
|
||||||
}
|
}
|
||||||
- ps: $env:Path += ";C:\Program Files\Pandoc\"
|
- ps: $env:Path += ";C:\Program Files\Pandoc\"
|
||||||
- git clone git://github.com/astropy/ci-helpers.git
|
- ps: Start-FileDownload "https://repo.continuum.io/miniconda/Miniconda$env:PY_MAJOR_VER-latest-Windows-$env:PYTHON_ARCH.exe" C:\Miniconda.exe; echo "Finished downloading miniconda"
|
||||||
- powershell ci-helpers/appveyor/install-miniconda.ps1
|
- cmd: C:\Miniconda.exe /S /D=C:\Py
|
||||||
- SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%
|
- cmd: SET PATH=C:\Py;C:\Py\Scripts;C:\Py\Library\bin;%PATH%
|
||||||
- activate test
|
- cmd: conda config --set always_yes yes
|
||||||
- pip install -e .
|
- cmd: conda update conda --quiet
|
||||||
|
- cmd: conda config --add channels conda-forge
|
||||||
|
- cmd: conda config --set channel_priority strict
|
||||||
|
- cmd: conda install sphinx werkzeug alabaster invoke graphviz nbsphinx --quiet
|
||||||
|
- cmd: python -m pip install --upgrade pip
|
||||||
|
- cmd: pip install -e .[all]
|
||||||
|
|
||||||
test_script:
|
test_script:
|
||||||
- python setup.py build_sphinx
|
- python setup.py build_sphinx
|
||||||
|
|
19
docs/Makefile
Normal file
19
docs/Makefile
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line.
|
||||||
|
SPHINXOPTS =
|
||||||
|
SPHINXBUILD = sphinx-build
|
||||||
|
SOURCEDIR = .
|
||||||
|
BUILDDIR = _build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
16
docs/_static/a.svg
vendored
16
docs/_static/a.svg
vendored
|
@ -1,12 +1,12 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
<svg
|
<svg
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
xmlns:dc="https://purl.org/dc/elements/1.1/"
|
||||||
xmlns:cc="http://creativecommons.org/ns#"
|
xmlns:cc="https://creativecommons.org/ns#"
|
||||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
xmlns:rdf="https://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"
|
xmlns:svg="https://www.w3.org/2000/svg"
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="https://www.w3.org/2000/svg"
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
xmlns:sodipodi="https://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
xmlns:inkscape="https://www.inkscape.org/namespaces/inkscape"
|
||||||
height="850"
|
height="850"
|
||||||
width="850"
|
width="850"
|
||||||
id="svg2"
|
id="svg2"
|
||||||
|
@ -20,7 +20,7 @@
|
||||||
rdf:about="">
|
rdf:about="">
|
||||||
<dc:format>image/svg+xml</dc:format>
|
<dc:format>image/svg+xml</dc:format>
|
||||||
<dc:type
|
<dc:type
|
||||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
rdf:resource="https://purl.org/dc/dcmitype/StillImage" />
|
||||||
<dc:title></dc:title>
|
<dc:title></dc:title>
|
||||||
</cc:Work>
|
</cc:Work>
|
||||||
</rdf:RDF>
|
</rdf:RDF>
|
||||||
|
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 3.2 KiB |
16
docs/_static/ablog.svg
vendored
16
docs/_static/ablog.svg
vendored
|
@ -1,12 +1,12 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
<svg
|
<svg
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
xmlns:dc="https://purl.org/dc/elements/1.1/"
|
||||||
xmlns:cc="http://creativecommons.org/ns#"
|
xmlns:cc="https://creativecommons.org/ns#"
|
||||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
xmlns:rdf="https://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"
|
xmlns:svg="https://www.w3.org/2000/svg"
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="https://www.w3.org/2000/svg"
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
xmlns:sodipodi="https://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
xmlns:inkscape="https://www.inkscape.org/namespaces/inkscape"
|
||||||
height="1200"
|
height="1200"
|
||||||
width="3600"
|
width="3600"
|
||||||
id="svg4026"
|
id="svg4026"
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
rdf:about="">
|
rdf:about="">
|
||||||
<dc:format>image/svg+xml</dc:format>
|
<dc:format>image/svg+xml</dc:format>
|
||||||
<dc:type
|
<dc:type
|
||||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
rdf:resource="https://purl.org/dc/dcmitype/StillImage" />
|
||||||
<dc:title></dc:title>
|
<dc:title></dc:title>
|
||||||
</cc:Work>
|
</cc:Work>
|
||||||
</rdf:RDF>
|
</rdf:RDF>
|
||||||
|
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
4
docs/_static/css/font-awesome.css
vendored
4
docs/_static/css/font-awesome.css
vendored
|
@ -1,6 +1,6 @@
|
||||||
/*!
|
/*!
|
||||||
* Font Awesome 4.2.0 by @davegandy - http://fontawesome.io - @fontawesome
|
* Font Awesome 4.2.0 by @davegandy - https://fontawesome.io - @fontawesome
|
||||||
* License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
|
* License - https://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
|
||||||
*/
|
*/
|
||||||
/* FONT PATH
|
/* FONT PATH
|
||||||
* -------------------------- */
|
* -------------------------- */
|
||||||
|
|
6
docs/_static/fonts/fontawesome-webfont.svg
vendored
6
docs/_static/fonts/fontawesome-webfont.svg
vendored
|
@ -1,6 +1,6 @@
|
||||||
<?xml version="1.0" standalone="no"?>
|
<?xml version="1.0" standalone="no"?>
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "https://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
|
||||||
<svg xmlns="http://www.w3.org/2000/svg">
|
<svg xmlns="https://www.w3.org/2000/svg">
|
||||||
<metadata></metadata>
|
<metadata></metadata>
|
||||||
<defs>
|
<defs>
|
||||||
<font id="fontawesomeregular" horiz-adv-x="1536" >
|
<font id="fontawesomeregular" horiz-adv-x="1536" >
|
||||||
|
@ -517,4 +517,4 @@
|
||||||
<glyph unicode="" horiz-adv-x="1792" />
|
<glyph unicode="" horiz-adv-x="1792" />
|
||||||
<glyph unicode="" horiz-adv-x="1792" />
|
<glyph unicode="" horiz-adv-x="1792" />
|
||||||
</font>
|
</font>
|
||||||
</defs></svg>
|
</defs></svg>
|
||||||
|
|
Before Width: | Height: | Size: 280 KiB After Width: | Height: | Size: 280 KiB |
162
docs/conf.py
162
docs/conf.py
|
@ -1,75 +1,85 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
import alabaster
|
import alabaster
|
||||||
|
from pkg_resources import get_distribution
|
||||||
|
from sphinx import addnodes
|
||||||
|
|
||||||
import ablog
|
import ablog
|
||||||
|
|
||||||
ablog_builder = 'dirhtml'
|
ablog_builder = "dirhtml"
|
||||||
ablog_website = '_website'
|
ablog_website = "_website"
|
||||||
|
|
||||||
extensions = [
|
extensions = [
|
||||||
'sphinx.ext.autodoc',
|
"sphinx.ext.autodoc",
|
||||||
'sphinx.ext.doctest',
|
"sphinx.ext.doctest",
|
||||||
'sphinx.ext.intersphinx',
|
"sphinx.ext.intersphinx",
|
||||||
'sphinx.ext.todo',
|
"sphinx.ext.todo",
|
||||||
'sphinx.ext.ifconfig',
|
"sphinx.ext.ifconfig",
|
||||||
'sphinx.ext.extlinks',
|
"sphinx.ext.extlinks",
|
||||||
'sphinx_automodapi.automodapi',
|
"sphinx_automodapi.automodapi",
|
||||||
'alabaster',
|
"alabaster",
|
||||||
'nbsphinx',
|
"nbsphinx",
|
||||||
'ablog'
|
"ablog",
|
||||||
]
|
]
|
||||||
|
|
||||||
#language = 'de'
|
# language = 'de'
|
||||||
#language = 'tr'
|
# language = 'tr'
|
||||||
# PROJECT
|
# PROJECT
|
||||||
|
|
||||||
version = release = ablog.__version__
|
versionmod = get_distribution('ablog')
|
||||||
project = u'ABlog'
|
|
||||||
copyright = u'2014-2018, ABlog Team'
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
master_doc = 'index'
|
# |version| and |release|, also used in various other places throughout the
|
||||||
source_suffix = '.rst'
|
# built documents.
|
||||||
exclude_patterns = ['_build']
|
# The short X.Y version.
|
||||||
|
version = '.'.join(versionmod.version.split('.')[:3])
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = versionmod.version.split('+')[0]
|
||||||
|
# Is this version a development release
|
||||||
|
is_development = '.dev' in release
|
||||||
|
|
||||||
|
project = "ABlog"
|
||||||
|
copyright = "2014-2019, ABlog Team"
|
||||||
|
master_doc = "index"
|
||||||
|
source_suffix = ".rst"
|
||||||
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
|
|
||||||
# HTML OUTPUT
|
# HTML OUTPUT
|
||||||
|
|
||||||
html_title = "ABlog"
|
html_title = "ABlog"
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
html_use_index = True
|
html_use_index = True
|
||||||
html_domain_indices = False
|
html_domain_indices = False
|
||||||
html_show_sourcelink = True
|
html_show_sourcelink = True
|
||||||
html_favicon = '_static/ablog.ico'
|
html_favicon = "_static/ablog.ico"
|
||||||
|
|
||||||
# ABLOG
|
# ABLOG
|
||||||
|
|
||||||
templates_path = [ablog.get_html_templates_path()]
|
templates_path = [ablog.get_html_templates_path()]
|
||||||
|
|
||||||
blog_title = 'ABlog'
|
blog_title = "ABlog"
|
||||||
blog_baseurl = 'http://ablog.readthedocs.org'
|
blog_baseurl = "https://ablog.readthedocs.org"
|
||||||
blog_locations = {
|
blog_locations = {
|
||||||
'Pittsburgh': ('Pittsburgh, PA', 'http://en.wikipedia.org/wiki/Pittsburgh'),
|
"Pittsburgh": ("Pittsburgh, PA", "https://en.wikipedia.org/wiki/Pittsburgh"),
|
||||||
'SF': ('San Francisco, CA', 'http://en.wikipedia.org/wiki/San_Francisco'),
|
"SF": ("San Francisco, CA", "https://en.wikipedia.org/wiki/San_Francisco"),
|
||||||
'Denizli': ('Denizli, Turkey', 'http://en.wikipedia.org/wiki/Denizli'),
|
"Denizli": ("Denizli, Turkey", "https://en.wikipedia.org/wiki/Denizli"),
|
||||||
}
|
}
|
||||||
blog_languages = {
|
blog_languages = {"en": ("English", None)}
|
||||||
'en': ('English', None),
|
blog_default_language = "en"
|
||||||
}
|
|
||||||
blog_default_language = 'en'
|
|
||||||
blog_authors = {
|
blog_authors = {
|
||||||
'Ahmet': ('Ahmet Bakan', 'http://ahmetbakan.com'),
|
"Ahmet": ("Ahmet Bakan", "https://ahmetbakan.com"),
|
||||||
'Luc': ('Luc Saffre', 'http://saffre-rumma.net/luc/'),
|
"Luc": ("Luc Saffre", "https://saffre-rumma.net/luc/"),
|
||||||
'Mehmet': (u'Mehmet Gerçeker', 'https://github.com/mehmetg'),
|
"Mehmet": ("Mehmet Gerçeker", "https://github.com/mehmetg"),
|
||||||
}
|
}
|
||||||
blog_feed_archives = True
|
blog_feed_archives = True
|
||||||
blog_feed_fulltext = True
|
blog_feed_fulltext = True
|
||||||
blog_feed_length = None
|
blog_feed_length = None
|
||||||
disqus_shortname = 'ablogforsphinx'
|
disqus_shortname = "ablogforsphinx"
|
||||||
disqus_pages = True
|
disqus_pages = True
|
||||||
fontawesome_css_file = 'css/font-awesome.css'
|
fontawesome_css_file = "css/font-awesome.css"
|
||||||
|
|
||||||
# blog_feed_titles = False
|
# blog_feed_titles = False
|
||||||
# blog_archive_titles = False
|
# blog_archive_titles = False
|
||||||
|
@ -77,53 +87,55 @@ fontawesome_css_file = 'css/font-awesome.css'
|
||||||
|
|
||||||
# THEME
|
# THEME
|
||||||
|
|
||||||
html_style = 'alabaster.css'
|
html_style = "alabaster.css"
|
||||||
html_theme = 'alabaster'
|
html_theme = "alabaster"
|
||||||
html_sidebars = {
|
html_sidebars = {
|
||||||
'**': ['about.html',
|
"**": [
|
||||||
'postcard.html', 'recentposts.html',
|
"about.html",
|
||||||
'tagcloud.html', 'categories.html',
|
"postcard.html",
|
||||||
'archives.html',
|
"recentposts.html",
|
||||||
'searchbox.html']
|
"tagcloud.html",
|
||||||
|
"categories.html",
|
||||||
|
"archives.html",
|
||||||
|
"searchbox.html",
|
||||||
|
]
|
||||||
}
|
}
|
||||||
html_theme_path = [alabaster.get_path()]
|
html_theme_path = [alabaster.get_path()]
|
||||||
html_theme_options = {
|
html_theme_options = {
|
||||||
'travis_button': True,
|
"travis_button": True,
|
||||||
'github_user': 'sunpy',
|
"github_user": "sunpy",
|
||||||
'github_repo': 'ablog',
|
"github_repo": "ablog",
|
||||||
'description': 'ABlog for blogging with Sphinx',
|
"description": "ABlog for blogging with Sphinx",
|
||||||
'logo': 'ablog.png',
|
"logo": "ablog.png",
|
||||||
}
|
}
|
||||||
|
|
||||||
# SPHINX
|
# SPHINX
|
||||||
|
|
||||||
intersphinx_mapping = {
|
intersphinx_mapping = {
|
||||||
'python': ('http://docs.python.org/', None),
|
"python": ("https://docs.python.org/", None),
|
||||||
'sphinx': ('http://sphinx-doc.org/', None)
|
"sphinx": ("http://www.sphinx-doc.org/en/latest/", None),
|
||||||
}
|
}
|
||||||
|
|
||||||
extlinks = {
|
extlinks = {
|
||||||
'wiki': ('http://en.wikipedia.org/wiki/%s', ''),
|
"wiki": ("https://en.wikipedia.org/wiki/%s", ""),
|
||||||
'issue': ('https://github.com/sunpy/ablog/issues/%s', 'issue '),
|
"issue": ("https://github.com/sunpy/ablog/issues/%s", "issue "),
|
||||||
'pull': ('https://github.com/sunpy/ablog/pull/%s', 'pull request '),
|
"pull": ("https://github.com/sunpy/ablog/pull/%s", "pull request "),
|
||||||
}
|
}
|
||||||
|
|
||||||
exclude_patterns = ['docs/manual/.ipynb_checkpoints/*']
|
exclude_patterns = ["docs/manual/.ipynb_checkpoints/*"]
|
||||||
|
|
||||||
rst_epilog = '''
|
rst_epilog = """
|
||||||
.. _Sphinx: http://sphinx-doc.org/
|
.. _Sphinx: http://sphinx-doc.org/
|
||||||
.. _Python: http://python.org
|
.. _Python: https://python.org
|
||||||
.. _Disqus: http://disqus.com/
|
.. _Disqus: https://disqus.com/
|
||||||
.. _GitHub: https://github.com/sunpy/ablog
|
.. _GitHub: https://github.com/sunpy/ablog
|
||||||
.. _PyPI: https://pypi.python.org/pypi/ablog
|
.. _PyPI: https://pypi.python.org/pypi/ablog
|
||||||
.. _Read The Docs: https://readthedocs.org/
|
.. _Read The Docs: https://readthedocs.org/
|
||||||
.. _Alabaster: https://github.com/bitprophet/alabaster
|
.. _Alabaster: https://github.com/bitprophet/alabaster
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import re
|
|
||||||
from sphinx import addnodes
|
|
||||||
|
|
||||||
|
|
||||||
event_sig_re = re.compile(r'([a-zA-Z-]+)\s*\((.*)\)')
|
event_sig_re = re.compile(r"([a-zA-Z-]+)\s*\((.*)\)")
|
||||||
|
|
||||||
|
|
||||||
def parse_event(env, sig, signode):
|
def parse_event(env, sig, signode):
|
||||||
|
@ -134,7 +146,7 @@ def parse_event(env, sig, signode):
|
||||||
name, args = m.groups()
|
name, args = m.groups()
|
||||||
signode += addnodes.desc_name(name, name)
|
signode += addnodes.desc_name(name, name)
|
||||||
plist = addnodes.desc_parameterlist()
|
plist = addnodes.desc_parameterlist()
|
||||||
for arg in args.split(','):
|
for arg in args.split(","):
|
||||||
arg = arg.strip()
|
arg = arg.strip()
|
||||||
plist += addnodes.desc_parameter(arg, arg)
|
plist += addnodes.desc_parameter(arg, arg)
|
||||||
signode += plist
|
signode += plist
|
||||||
|
@ -144,11 +156,13 @@ def parse_event(env, sig, signode):
|
||||||
def setup(app):
|
def setup(app):
|
||||||
from sphinx.ext.autodoc import cut_lines
|
from sphinx.ext.autodoc import cut_lines
|
||||||
from sphinx.util.docfields import GroupedField
|
from sphinx.util.docfields import GroupedField
|
||||||
app.connect('autodoc-process-docstring', cut_lines(4, what=['module']))
|
|
||||||
app.add_object_type('confval', 'confval',
|
app.connect("autodoc-process-docstring", cut_lines(4, what=["module"]))
|
||||||
objname='configuration value',
|
app.add_object_type(
|
||||||
indextemplate='pair: %s; configuration value')
|
"confval",
|
||||||
fdesc = GroupedField('parameter', label='Parameters',
|
"confval",
|
||||||
names=['param'], can_collapse=True)
|
objname="configuration value",
|
||||||
app.add_object_type('event', 'event', 'pair: %s; event', parse_event,
|
indextemplate="pair: %s; configuration value",
|
||||||
doc_field_types=[fdesc])
|
)
|
||||||
|
fdesc = GroupedField("parameter", label="Parameters", names=["param"], can_collapse=True)
|
||||||
|
app.add_object_type("event", "event", "pair: %s; event", parse_event, doc_field_types=[fdesc])
|
||||||
|
|
|
@ -11,12 +11,12 @@ website project into a full-fledged blog with:
|
||||||
* `Font-Awesome integration`_
|
* `Font-Awesome integration`_
|
||||||
* `Easy GitHub Pages deploys`_
|
* `Easy GitHub Pages deploys`_
|
||||||
|
|
||||||
.. _Atom feeds: http://ablog.readthedocs.org/blog/atom.xml
|
.. _Atom feeds: https://ablog.readthedocs.org/blog/atom.xml
|
||||||
.. _Archive pages: http://ablog.readthedocs.org/blog/
|
.. _Archive pages: https://ablog.readthedocs.org/blog/
|
||||||
.. _Blog sidebars: http://ablog.readthedocs.org/manual/ablog-configuration-options/#sidebars
|
.. _Blog sidebars: https://ablog.readthedocs.org/manual/ablog-configuration-options/#sidebars
|
||||||
.. _Disqus integration: http://ablog.readthedocs.org/manual/ablog-configuration-options/#disqus-integration
|
.. _Disqus integration: https://ablog.readthedocs.org/manual/ablog-configuration-options/#disqus-integration
|
||||||
.. _Font-Awesome integration: http://ablog.readthedocs.org/manual/ablog-configuration-options/#fa
|
.. _Font-Awesome integration: https://ablog.readthedocs.org/manual/ablog-configuration-options/#fa
|
||||||
.. _Easy GitHub Pages deploys: http://ablog.readthedocs.org/manual/deploy-to-github-pages/
|
.. _Easy GitHub Pages deploys: https://ablog.readthedocs.org/manual/deploy-to-github-pages/
|
||||||
|
|
||||||
.. _installation:
|
.. _installation:
|
||||||
|
|
||||||
|
@ -33,9 +33,9 @@ making it look good, generating feeds, running deploy commands, and parsing
|
||||||
dates.
|
dates.
|
||||||
|
|
||||||
.. _pip: https://pip.pypa.io
|
.. _pip: https://pip.pypa.io
|
||||||
.. _Werkzeug: http://werkzeug.pocoo.org/
|
.. _Werkzeug: https://werkzeug.pocoo.org/
|
||||||
.. _Alabaster: https://github.com/bitprophet/alabaster
|
.. _Alabaster: https://github.com/bitprophet/alabaster
|
||||||
.. _Invoke: http://www.pyinvoke.org/
|
.. _Invoke: https://www.pyinvoke.org/
|
||||||
.. _dateutil: https://pypi.python.org/pypi/python-dateutil
|
.. _dateutil: https://pypi.python.org/pypi/python-dateutil
|
||||||
|
|
||||||
Getting Started
|
Getting Started
|
||||||
|
@ -62,7 +62,7 @@ If you already have a project, enable blogging by making following changes in ``
|
||||||
# 2b. if `templates_path` is defined
|
# 2b. if `templates_path` is defined
|
||||||
templates_path.append(ablog.get_html_templates_path())
|
templates_path.append(ablog.get_html_templates_path())
|
||||||
|
|
||||||
.. _ABlog Quick Start: http://ablog.readthedocs.org/manual/ablog-quick-start
|
.. _ABlog Quick Start: https://ablog.readthedocs.org/manual/ablog-quick-start
|
||||||
|
|
||||||
|
|
||||||
How it works
|
How it works
|
||||||
|
@ -109,10 +109,10 @@ can find more about configuring and using ABlog:
|
||||||
.. only:: html
|
.. only:: html
|
||||||
|
|
||||||
.. image:: https://secure.travis-ci.org/sunpy/ablog.png?branch=devel
|
.. image:: https://secure.travis-ci.org/sunpy/ablog.png?branch=devel
|
||||||
:target: http://travis-ci.org/#!/sunpy/ablog
|
:target: https://travis-ci.org/#!/sunpy/ablog
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/ablog/badge/?version=latest
|
.. image:: https://readthedocs.org/projects/ablog/badge/?version=latest
|
||||||
:target: http://ablog.readthedocs.org/
|
:target: https://ablog.readthedocs.org/
|
||||||
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
|
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
@ECHO OFF
|
||||||
|
|
||||||
|
pushd %~dp0
|
||||||
|
|
||||||
|
REM Command file for Sphinx documentation
|
||||||
|
|
||||||
|
if "%SPHINXBUILD%" == "" (
|
||||||
|
set SPHINXBUILD=sphinx-build
|
||||||
|
)
|
||||||
|
set SOURCEDIR=.
|
||||||
|
set BUILDDIR=_build
|
||||||
|
|
||||||
|
if "%1" == "" goto help
|
||||||
|
|
||||||
|
%SPHINXBUILD% >NUL 2>NUL
|
||||||
|
if errorlevel 9009 (
|
||||||
|
echo.
|
||||||
|
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||||
|
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||||
|
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||||
|
echo.may add the Sphinx directory to PATH.
|
||||||
|
echo.
|
||||||
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
|
echo.https://sphinx-doc.org/
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||||
|
goto end
|
||||||
|
|
||||||
|
:help
|
||||||
|
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
||||||
|
|
||||||
|
:end
|
||||||
|
popd
|
|
@ -49,7 +49,7 @@ Authors, languages, & locations
|
||||||
blog_authors = {
|
blog_authors = {
|
||||||
'Ahmet': ('Ahmet Bakan', 'http://ahmetbakan.com'),
|
'Ahmet': ('Ahmet Bakan', 'http://ahmetbakan.com'),
|
||||||
'Durden': ('Tyler Durden',
|
'Durden': ('Tyler Durden',
|
||||||
'http://en.wikipedia.org/wiki/Tyler_Durden'),
|
'https://en.wikipedia.org/wiki/Tyler_Durden'),
|
||||||
}
|
}
|
||||||
|
|
||||||
.. confval:: blog_languages
|
.. confval:: blog_languages
|
||||||
|
@ -177,7 +177,7 @@ Font awesome
|
||||||
ABlog templates will use of `Font Awesome`_ icons if one of the following
|
ABlog templates will use of `Font Awesome`_ icons if one of the following
|
||||||
is set:
|
is set:
|
||||||
|
|
||||||
.. _Font Awesome: http://fontawesome.io/
|
.. _Font Awesome: https://fontawesome.io/
|
||||||
|
|
||||||
|
|
||||||
.. confval:: fontawesome_link_cdn
|
.. confval:: fontawesome_link_cdn
|
||||||
|
@ -185,14 +185,14 @@ is set:
|
||||||
URL to `Font Awesome`_ :file:`.css` hosted at `Bootstrap CDN`_ or anywhere
|
URL to `Font Awesome`_ :file:`.css` hosted at `Bootstrap CDN`_ or anywhere
|
||||||
else. Default: ``None``
|
else. Default: ``None``
|
||||||
|
|
||||||
.. _Bootstrap CDN: http://www.bootstrapcdn.com/fontawesome/
|
.. _Bootstrap CDN: https://www.bootstrapcdn.com/fontawesome/
|
||||||
|
|
||||||
.. update:: Jul 29, 2015
|
.. update:: Jul 29, 2015
|
||||||
|
|
||||||
:confval:`fontawesome_link_cdn` was a *boolean* option, and now became a
|
:confval:`fontawesome_link_cdn` was a *boolean* option, and now became a
|
||||||
*string* to enable using desired version of `Font Awesome`_.
|
*string* to enable using desired version of `Font Awesome`_.
|
||||||
To get the old behavior, use
|
To get the old behavior, use
|
||||||
``‘http://netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.min.css'``.
|
``‘https://netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.min.css'``.
|
||||||
|
|
||||||
|
|
||||||
.. confval:: fontawesome_included
|
.. confval:: fontawesome_included
|
||||||
|
|
|
@ -58,4 +58,4 @@ involves the following steps:
|
||||||
.. _init_catalog: http://babel.edgewall.org/wiki/Documentation/setup.html#init-catalog
|
.. _init_catalog: http://babel.edgewall.org/wiki/Documentation/setup.html#init-catalog
|
||||||
|
|
||||||
.. _update_catalog: http://babel.edgewall.org/wiki/Documentation/setup.html#update-catalog
|
.. _update_catalog: http://babel.edgewall.org/wiki/Documentation/setup.html#update-catalog
|
||||||
.. _compile_catalog: http://babel.edgewall.org/wiki/Documentation/setup.html#id4
|
.. _compile_catalog: http://babel.edgewall.org/wiki/Documentation/setup.html#id4
|
||||||
|
|
|
@ -85,7 +85,7 @@ Analytics
|
||||||
ABlog uses Alabaster_ theme by default. You can use theme options to set
|
ABlog uses Alabaster_ theme by default. You can use theme options to set
|
||||||
your `Google Analytics`_ identifier to enable tracking.
|
your `Google Analytics`_ identifier to enable tracking.
|
||||||
|
|
||||||
.. _Google Analytics: http://www.google.com/analytics/
|
.. _Google Analytics: https://www.google.com/analytics/
|
||||||
|
|
||||||
Configuration
|
Configuration
|
||||||
-------------
|
-------------
|
||||||
|
@ -121,5 +121,3 @@ not mention yet. Here they are:
|
||||||
* :file:`.doctree` folder, created after build command is called, is
|
* :file:`.doctree` folder, created after build command is called, is
|
||||||
where Sphinx_ stores the state of your project. Files in this folder
|
where Sphinx_ stores the state of your project. Files in this folder
|
||||||
saves time when you rebuild your project.
|
saves time when you rebuild your project.
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ Automate GitHub Pages Deploys
|
||||||
If being away from your personal computer is holding you back from blogging, keep
|
If being away from your personal computer is holding you back from blogging, keep
|
||||||
reading. This post will show you how to automate builds and deploys using
|
reading. This post will show you how to automate builds and deploys using
|
||||||
Travis CI. Once you set this up, all you need to do post an article will be
|
Travis CI. Once you set this up, all you need to do post an article will be
|
||||||
pushing to GitHub or creating a new file on `GitHub.com <http://github.com>`_
|
pushing to GitHub or creating a new file on `GitHub.com <https://github.com>`_
|
||||||
from any computer!
|
from any computer!
|
||||||
|
|
||||||
For this to work, you need to be hosting your website on GitHub pages.
|
For this to work, you need to be hosting your website on GitHub pages.
|
||||||
|
@ -108,4 +108,4 @@ See :ref:`deploy-to-github-pages` and :ref:`commands` to find out more about
|
||||||
deploy options.
|
deploy options.
|
||||||
|
|
||||||
Finally, you can find out more about :file:`.travis.yml` file and customizing your built on
|
Finally, you can find out more about :file:`.travis.yml` file and customizing your built on
|
||||||
Travis CI `user documentation <http://docs.travis-ci.com/user/customizing-the-build/>`_.
|
Travis CI `user documentation <https://docs.travis-ci.com/user/customizing-the-build/>`_.
|
||||||
|
|
|
@ -41,4 +41,4 @@ to get published:
|
||||||
|
|
||||||
Let us know how this works for you!
|
Let us know how this works for you!
|
||||||
|
|
||||||
.. _Jekyll: http://jekyllrb.com/
|
.. _Jekyll: https://jekyllrb.com/
|
||||||
|
|
25
pyproject.toml
Normal file
25
pyproject.toml
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools", "setuptools_scm", "wheel"]
|
||||||
|
build-backend = 'setuptools.build_meta'
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 100
|
||||||
|
include = '\.pyi?$'
|
||||||
|
exclude = '''
|
||||||
|
(
|
||||||
|
/(
|
||||||
|
\.eggs
|
||||||
|
| \.git
|
||||||
|
| \.mypy_cache
|
||||||
|
| \.tox
|
||||||
|
| \.venv
|
||||||
|
| _build
|
||||||
|
| buck-out
|
||||||
|
| build
|
||||||
|
| dist
|
||||||
|
| astropy_helpers
|
||||||
|
| docs
|
||||||
|
)/
|
||||||
|
| ah_bootstrap.py
|
||||||
|
)
|
||||||
|
'''
|
44
setup.cfg
Normal file
44
setup.cfg
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
[metadata]
|
||||||
|
name = ablog
|
||||||
|
author = The SunPy Community
|
||||||
|
author_email = sunpy@googlegroups.com
|
||||||
|
description = A Sphinx extension that converts any documentation or personal website project into a full-fledged blog.
|
||||||
|
long_description = file: README.rst
|
||||||
|
license = MIT
|
||||||
|
url = https://ablog.readthedocs.org/
|
||||||
|
edit_on_github = True
|
||||||
|
github_project = sunpy/ablog
|
||||||
|
|
||||||
|
[options]
|
||||||
|
python_requires = >=3.6
|
||||||
|
packages = find:
|
||||||
|
include_package_data = True
|
||||||
|
setup_requires = setuptools_scm
|
||||||
|
install_requires =
|
||||||
|
werkzeug
|
||||||
|
sphinx>=2.0
|
||||||
|
alabaster
|
||||||
|
invoke
|
||||||
|
python-dateutil
|
||||||
|
sphinx-automodapi
|
||||||
|
|
||||||
|
[options.extras_require]
|
||||||
|
notebook =
|
||||||
|
nbsphinx
|
||||||
|
ipython
|
||||||
|
|
||||||
|
[options.entry_points]
|
||||||
|
console_scripts =
|
||||||
|
ablog = ablog.commands:ablog_main
|
||||||
|
|
||||||
|
[tool:isort]
|
||||||
|
line_length = 100
|
||||||
|
not_skip = __init__.py
|
||||||
|
sections = FUTURE, STDLIB, THIRDPARTY, FIRSTPARTY, LOCALFOLDER
|
||||||
|
default_section = THIRDPARTY
|
||||||
|
known_first_party = ablog
|
||||||
|
multi_line_output = 3
|
||||||
|
balanced_wrapping = True
|
||||||
|
include_trailing_comma = True
|
||||||
|
length_sort = False
|
||||||
|
length_sort_stdlib = True
|
78
setup.py
78
setup.py
|
@ -1,68 +1,20 @@
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
from setuptools.config import read_configuration
|
||||||
|
|
||||||
__version__ = ''
|
################################################################################
|
||||||
with open('ablog/__init__.py') as inp:
|
# Programmatically generate some extras combos.
|
||||||
for line in inp:
|
################################################################################
|
||||||
if (line.startswith('__version__')):
|
extras = read_configuration("setup.cfg")["options"]["extras_require"]
|
||||||
exec(line.strip())
|
|
||||||
break
|
|
||||||
long_description = '''
|
|
||||||
ABlog for Sphinx
|
|
||||||
================
|
|
||||||
|
|
||||||
Please note that is an official continuation of
|
# Dev is everything
|
||||||
`Eric Holscher's Ablog Sphinx extension <https://github.com/abakan/ablog/>`_.
|
extras["dev"] = list(chain(*extras.values()))
|
||||||
|
|
||||||
A Sphinx extension that converts any documentation or personal website project
|
# All is everything but tests and docs
|
||||||
into a full-fledged blog. See http://ablog.readthedocs.org for details.
|
exclude_keys = ("tests", "docs", "dev")
|
||||||
|
ex_extras = dict(filter(lambda i: i[0] not in exclude_keys, extras.items()))
|
||||||
|
# Concatenate all the values together for 'all'
|
||||||
|
extras["all"] = list(chain.from_iterable(ex_extras.values()))
|
||||||
|
|
||||||
.. image:: https://secure.travis-ci.org/sunpy/ablog.png?branch=devel
|
setup(extras_require=extras, use_scm_version=True)
|
||||||
:target: http://travis-ci.org/#!/sunpy/ablog
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/ablog/badge/?version=latest
|
|
||||||
:target: http://ablog.readthedocs.org/
|
|
||||||
'''
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name='ablog',
|
|
||||||
version=__version__,
|
|
||||||
author='SunPy Developers',
|
|
||||||
author_email='nabil.freij@gmail.com',
|
|
||||||
description='ABlog allows you to blog with Sphinx',
|
|
||||||
long_description=long_description,
|
|
||||||
url='http://ablog.readthedocs.org/',
|
|
||||||
packages=['ablog'],
|
|
||||||
package_dir={'ablog': 'ablog'},
|
|
||||||
package_data={'ablog': [
|
|
||||||
'templates/*.html',
|
|
||||||
'locale/sphinx.pot',
|
|
||||||
'locale/*/LC_MESSAGES/sphinx.*o']},
|
|
||||||
license='MIT License',
|
|
||||||
keywords=('Sphinx, extension, blogging, atom feeds'),
|
|
||||||
classifiers=[
|
|
||||||
'Development Status :: 4 - Beta',
|
|
||||||
'Topic :: Software Development :: Documentation',
|
|
||||||
'License :: OSI Approved :: MIT License',
|
|
||||||
'Operating System :: MacOS',
|
|
||||||
'Operating System :: Microsoft :: Windows',
|
|
||||||
'Operating System :: POSIX',
|
|
||||||
'Programming Language :: Python',
|
|
||||||
'Programming Language :: Python :: 2',
|
|
||||||
'Programming Language :: Python :: 3',
|
|
||||||
],
|
|
||||||
provides=['ablog'],
|
|
||||||
install_requires=['werkzeug', 'sphinx>=1.6', 'alabaster', 'invoke',
|
|
||||||
'python-dateutil', 'sphinx-automodapi'],
|
|
||||||
extra_requires={'notebook': ['nbsphinx', 'ipython']},
|
|
||||||
message_extractors={
|
|
||||||
'ablog': [
|
|
||||||
('**.html', 'jinja2', None),
|
|
||||||
('**.py', 'python', None),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
entry_points={
|
|
||||||
'console_scripts': [
|
|
||||||
'ablog = ablog.commands:ablog_main',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue