Minor changes. Formatting changes. Add some Python version environments for testing. Extended get_file_list to allow a list of globs rather than just a single glob.
This commit is contained in:
parent
b8bc24cf6f
commit
4780764a60
1
TODO.md
1
TODO.md
|
@ -5,3 +5,4 @@
|
||||||
* pre- and post-scripts that will be run from __main__, either some shipped with pixywerk or project-level.
|
* pre- and post-scripts that will be run from __main__, either some shipped with pixywerk or project-level.
|
||||||
* Library of template modules? ATOM et al.
|
* Library of template modules? ATOM et al.
|
||||||
* Some off the shelf website templates and a template manager.
|
* Some off the shelf website templates and a template manager.
|
||||||
|
* Live refreshing server thing which maps a pixywerk tree into a web server's memory and updates on change.
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
__version__ = '0.3.0'
|
|
@ -17,9 +17,16 @@ from .metadata import MetaTree
|
||||||
from .processchain import ProcessorChains
|
from .processchain import ProcessorChains
|
||||||
from .processors.processors import PassthroughException
|
from .processors.processors import PassthroughException
|
||||||
from .pygments import pygments_get_css, pygments_markup_contents_html
|
from .pygments import pygments_get_css, pygments_markup_contents_html
|
||||||
from .template_tools import (date_iso8601, file_content, file_list,
|
from .template_tools import (
|
||||||
file_list_hier, file_metadata, file_name,
|
date_iso8601,
|
||||||
file_raw, time_iso8601)
|
file_content,
|
||||||
|
file_list,
|
||||||
|
file_list_hier,
|
||||||
|
file_metadata,
|
||||||
|
file_name,
|
||||||
|
file_raw,
|
||||||
|
time_iso8601,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
@ -38,12 +45,11 @@ def get_args(args: List[str]) -> argparse.Namespace:
|
||||||
"-c", "--clean", help="Remove the target tree before proceeding (by renaming to .bak).", action="store_true"
|
"-c", "--clean", help="Remove the target tree before proceeding (by renaming to .bak).", action="store_true"
|
||||||
)
|
)
|
||||||
parser.add_argument("-s", "--safe", help="Abort if the target directory already exists.", action="store_true")
|
parser.add_argument("-s", "--safe", help="Abort if the target directory already exists.", action="store_true")
|
||||||
|
parser.add_argument("-f", "--follow-links", help="Follow symbolic links in the input tree.", action="store_true")
|
||||||
parser.add_argument("-t", "--template", help="The template directory (default: root/templates)", default=None)
|
parser.add_argument("-t", "--template", help="The template directory (default: root/templates)", default=None)
|
||||||
parser.add_argument("-d", "--dry-run", help="Perform a dry-run.", action="store_true")
|
parser.add_argument("-d", "--dry-run", help="Perform a dry-run.", action="store_true")
|
||||||
parser.add_argument("-v", "--verbose", help="Output verbosely.", action="store_true")
|
parser.add_argument("-v", "--verbose", help="Output verbosely.", action="store_true")
|
||||||
parser.add_argument("--processors", help="Specify a path to a processor configuration file.", default=None)
|
parser.add_argument("--processors", help="Specify a path to a processor configuration file.", default=None)
|
||||||
# parser.add_argument("--prescript", help="Specify one or more prescripts to run (in order specified) with context of the compile.", default=[], action="append")
|
|
||||||
# parser.add_argument("--postscript", help="Specify one or more postsscripts to run (in order specified) with context of the compile.", default=[], action="append")
|
|
||||||
result = parser.parse_args(args)
|
result = parser.parse_args(args)
|
||||||
|
|
||||||
# validate arguments
|
# validate arguments
|
||||||
|
@ -102,7 +108,7 @@ def main() -> int:
|
||||||
"pygments_markup_contents_html": pygments_markup_contents_html,
|
"pygments_markup_contents_html": pygments_markup_contents_html,
|
||||||
}
|
}
|
||||||
|
|
||||||
for root, _, files in os.walk(args.root):
|
for root, _, files in os.walk(args.root, followlinks=args.follow_links):
|
||||||
workroot = os.path.relpath(root, args.root)
|
workroot = os.path.relpath(root, args.root)
|
||||||
if workroot == ".":
|
if workroot == ".":
|
||||||
workroot = ""
|
workroot = ""
|
||||||
|
|
|
@ -93,7 +93,7 @@ class MetaTree:
|
||||||
"""Retrieve the metadata for a given path
|
"""Retrieve the metadata for a given path
|
||||||
|
|
||||||
The general procedure is to iterate the tree, at each level
|
The general procedure is to iterate the tree, at each level
|
||||||
m load .meta (JSON formatted dictionary) for that level, and
|
load .meta (JSON formatted dictionary) for that level, and
|
||||||
then finally load the path.meta, and merge these dictionaries
|
then finally load the path.meta, and merge these dictionaries
|
||||||
in descendant order.
|
in descendant order.
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ import datetime
|
||||||
import glob
|
import glob
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
from typing import Callable, Dict, Iterable, List, Union, cast
|
from typing import Callable, Dict, Iterable, List, Union, cast, Tuple
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
|
@ -11,12 +11,15 @@ from .processchain import ProcessorChains
|
||||||
|
|
||||||
|
|
||||||
def file_list(root: str, listcache: Dict) -> Callable:
|
def file_list(root: str, listcache: Dict) -> Callable:
|
||||||
def get_file_list(path_glob: str, *, sort_order: str = "ctime", reverse: bool = False, limit: int = 0) -> Iterable:
|
def get_file_list(path_glob: Union[str, List[str], Tuple[str]], *, sort_order: str = "ctime", reverse: bool = False, limit: int = 0) -> Iterable:
|
||||||
stattable = cast(List, [])
|
stattable = cast(List, [])
|
||||||
if path_glob in listcache:
|
if isinstance(path_glob, str):
|
||||||
stattable = listcache[path_glob]
|
path_glob = [path_glob]
|
||||||
|
for pglob in path_glob:
|
||||||
|
if pglob in listcache:
|
||||||
|
stattable.extend(listcache[pglob])
|
||||||
else:
|
else:
|
||||||
for fil in glob.glob(os.path.join(root, path_glob)):
|
for fil in glob.glob(os.path.join(root, pglob)):
|
||||||
if os.path.isdir(fil):
|
if os.path.isdir(fil):
|
||||||
continue
|
continue
|
||||||
if fil.endswith(".meta") or fil.endswith("~"):
|
if fil.endswith(".meta") or fil.endswith("~"):
|
||||||
|
@ -32,7 +35,7 @@ def file_list(root: str, listcache: Dict) -> Callable:
|
||||||
"ext": os.path.splitext(fil)[1],
|
"ext": os.path.splitext(fil)[1],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
listcache[path_glob] = stattable
|
listcache[pglob] = stattable
|
||||||
ret = sorted(stattable, key=lambda x: x[sort_order], reverse=reverse)
|
ret = sorted(stattable, key=lambda x: x[sort_order], reverse=reverse)
|
||||||
if limit > 0:
|
if limit > 0:
|
||||||
return itertools.islice(ret, limit)
|
return itertools.islice(ret, limit)
|
||||||
|
@ -48,9 +51,6 @@ def file_list_hier(root: str, flist: Callable) -> Callable:
|
||||||
def get_file_list_hier(path: str, glob: str, *, sort_order: str = "ctime", reverse: bool = False) -> Iterable:
|
def get_file_list_hier(path: str, glob: str, *, sort_order: str = "ctime", reverse: bool = False) -> Iterable:
|
||||||
output = []
|
output = []
|
||||||
|
|
||||||
def collect(pth):
|
|
||||||
print(arg, pth, files)
|
|
||||||
|
|
||||||
for pth in os.walk(os.path.join(root, path)):
|
for pth in os.walk(os.path.join(root, path)):
|
||||||
output.extend(
|
output.extend(
|
||||||
flist(
|
flist(
|
||||||
|
|
4
tox.ini
4
tox.ini
|
@ -1,5 +1,5 @@
|
||||||
[tox]
|
[tox]
|
||||||
envlist=py{36,37}-{code-quality, unit} #, py37-sphinx
|
envlist=py{36,37,38,39}-{code-quality, unit} #, py37-sphinx
|
||||||
skipsdist = true
|
skipsdist = true
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
|
@ -17,6 +17,8 @@ commands =
|
||||||
basepython =
|
basepython =
|
||||||
py36: python3.6
|
py36: python3.6
|
||||||
py37: python3.7
|
py37: python3.7
|
||||||
|
py38: python3.8
|
||||||
|
py39: python3.9
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 120
|
max-line-length = 120
|
||||||
|
|
Loading…
Reference in New Issue