Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

packtivity

Package Overview
Dependencies
Maintainers
2
Versions
130
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

packtivity - npm Package Compare versions

Comparing version
0.14.21
to
0.14.22
+6
.bumpversion.cfg
[bumpversion]
current_version = 0.14.22
commit = True
tag = True
[bumpversion:file:setup.py]
name: CI/CD
on:
push:
pull_request:
# Run daily at 0:01 UTC
schedule:
- cron: '1 0 * * *'
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
python -m pip install -q --no-cache-dir -e .[celery]
python -m pip install -q --upgrade --no-cache-dir pyflakes pytest>=3.6 pytest-cov python-coveralls
python -m pip list
- name: Lint with Pyflakes
if: matrix.python-version == 3.8
run: |
python -m pyflakes packtivity
- name: Test with pytest
run: |
python -m pytest --cov=packtivity -vv
- name: Run packtivity tests
run: |
packtivity-run tests/testspecs/noop-test.yml -p a_parameter=hello
- name: Report coverage with Codecov
if: github.event_name == 'push' && matrix.python-version == 3.8
uses: codecov/codecov-action@v1.0.7
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./coverage.xml
flags: unittests
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
python -m pip install --ignore-installed -U -q --no-cache-dir -e .
python -m pip install sphinx
python -m pip list
sudo apt-get update
sudo apt-get -qq install pandoc
- name: Test and build docs
run: |
cd docs
make html
cd ..
touch docs/_build/html/.nojekyll
- name: Deploy docs to GitHub Pages
if: success() && github.event_name == 'push' && github.ref == 'refs/heads/master'
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: docs/_build/html
force_orphan: true
user_name: 'github-actions[bot]'
user_email: 'github-actions[bot]@users.noreply.github.com'
commit_message: Deploy to GitHub pages
docker:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build Docker image
if: "!(startsWith(github.ref, 'refs/tags/'))"
uses: docker/build-push-action@v1
with:
repository: yadage/packtivity
dockerfile: Dockerfile
tag_with_sha: true
tag_with_ref: true
push: false
- name: List built images
run: docker images
name: Publish Docker Images
on:
push:
branches:
- master
tags:
- v*
jobs:
build-and-publish:
name: Build and publish Docker images to Docker Hub
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build and Publish to Registry
if: "!(startsWith(github.ref, 'refs/tags/'))"
uses: docker/build-push-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: yadage/packtivity
dockerfile: Dockerfile
tags: latest
- name: Build and Publish to Registry with Release Tag
if: startsWith(github.ref, 'refs/tags/')
uses: docker/build-push-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: yadage/packtivity
dockerfile: Dockerfile
tags: latest,latest-stable
tag_with_ref: true
name: publish distributions
on:
push:
branches:
- master
tags:
- v*
pull_request:
branches:
- master
jobs:
build-and-publish:
name: Build and publish Python distro to (Test)PyPI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install pep517 and twine
run: |
python -m pip install pep517 --user
python -m pip install twine
- name: Build a binary wheel and a source tarball
run: |
python -m pep517.build --source --binary --out-dir dist/ .
- name: Verify tagged commits don't have dev versions
if: startsWith(github.ref, 'refs/tags')
run: |
wheel_name=$(find dist/ -iname "*.whl" -printf "%f\n")
if [[ "${wheel_name}" == *"dev"* ]]; then
echo "pep517.build incorrectly named built distribution: ${wheel_name}"
echo "this is incorrrectly being treated as a dev release"
echo "intentionally erroring with 'return 1' now"
return 1
fi
echo "pep517.build named built distribution: ${wheel_name}"
- name: Verify the distribution
run: twine check dist/*
- name: Publish distribution 📦 to Test PyPI
# every PR will trigger a push event on master, so check the push event is actually coming from master
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'yadage/packtivity'
uses: pypa/gh-action-pypi-publish@v1.1.0
with:
password: ${{ secrets.test_pypi_password }}
repository_url: https://test.pypi.org/legacy/
- name: Publish distribution 📦 to PyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && github.repository == 'yadage/packtivity'
uses: pypa/gh-action-pypi-publish@v1.1.0
with:
password: ${{ secrets.pypi_password }}
*.egg-info
venv
.coverage*
sudo: required
services:
- docker
- redis-server
language: python
python:
- '2.7'
- '3.5'
install:
- pip install -U pyflakes pytest>=3.6 pytest-cov python-coveralls
- pip install --process-dependency-links -e '.[celery]'
script:
- pyflakes packtivity
- pytest --cov=packtivity -vv
- packtivity-run tests/testspecs/noop-test.yml -p a_parameter=hello
jobs:
include:
- stage: deploy
deploy:
provider: pypi
password:
secure: OCaYWRAgoQIgyraeTMOpDxRYh0bIazkB1pwNo/7yxbmDloYZeoHTMYZOWYJ9D6Al9wFzveHvW+iABOhP8Zc3e4crep94VIMfYj+IzPFnVMk4+t2ZnTArJRwuuKdzAl2+aeFcRjgUidORWKwzp/T4kf1ScLjSSf7SQQAQexnhGpGgHB4CHdXhV03fGydKeoBJKNhfXzmQtaNdtJtRURi0s86SQSpJJBDb1Jeike2ZpShJBiehLWQ0TMAivcPKaCI1stRwCeFKpInvxqrJBTj7oov+EdsH0eh1uMgwlNiHjd94RG2epFvlr1GBvq8Dr86/4KKykHrDtwQjLpsIvADzzS/9U+EEd4OKTGkXgF7RFcMKzrJ6MWrdLc5EzeeZRR2k3cPqDByy1Mnmevc+9EzqJHpQteigkXR7pcNjaq7vhFuzbwDcqtgwWZz1g3wenRpdqeYMq3qkkJ8D4yTBtTZejE1mWkUEFKR83ehS8J/7B1XkekHRncLm4vhNGOlcPFIcuvko1NayNmKejH+5BntpO0pZAafeE2vhK8kCDexQbf/P773IOAKPBU7b/pBs/xZ6EwCCqGj2HETsxgFMshMXwjq5f3hOseP8QHlbgelxlKu3Qf816SpUxBZH0STTS6xq3we6feCFpYyza2f9eB98zmF56PJRuzTjUs9+nh0QyEI=
username:
secure: E5tapPXNvC1e92k4KdCojyN2ZUxsN+EZWgB6rLtMh9qgNWZd0FmnX15IuXRD/HzPbSFa10Qus9M7kK0B8Bm0FhmU9a1KnzSPJEDaAGJ1RIx50EfFqJGbqpSp++HVzhHx6y3Jer6ltpeKoYdxFQwi+NVFHGEuJHltzKYcOx8XLWNe0AtZdi5LLqocJyk7MuQ/5QYGIygAR4/+sK7Pe8xmBR6uzrVcKRuoIzfxJEeXepLk779UnxD4Yqn0R19j8hdBNBd2q7ue9iAbPr+Uv3uTRnHjLGVPU4cKy8qeO5h4GXQBbeT1QyJsm+so/fSxqVIb50NNlaOlHSqaAmDGZ+fR70snEYOS+n2LqoD/KpOLip1upgjLo1bIqvUOStqVq4kLPUeb1pX3gONDq1YloSkJwKObNOXP5THMBf2mhtUDsT//iEGKytzclGISYyL77nyVKwcyix6BpHY1IcAvieenEMRtpcz2BaQWAd8s3Xgdk1Ht10wpskbAUomb8R8oI9kf3isvmSEjr57kLmr7zkCIODvHZt2y5Xj16bsruWMTtqfewkgniUOskGV3SNmMhdrgQnFBgv8M4P0C8CK+5zrpO2SXGqjOvyQJw06RjCKPqnWGl2kTPVds3cbrZ668clJexEpRj8m4wR1bgOUh4u1qLVIQtpO7eb6h/roFfAs7onA=
on:
tags: true
after_success:
- coveralls
FROM centos:8
RUN dnf install -y python3
COPY . /packtivity
WORKDIR /packtivity
RUN dnf install -y python3-pip
RUN pip3 install -e .
# -*- coding: utf-8 -*-
#
# packtivity documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 16 17:28:07 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.viewcode",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "packtivity"
copyright = "2017, Lukas Heinrich"
author = "Lukas Heinrich"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "0.5"
# The full version, including alpha/beta/rc tags.
release = "0.5"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'packtivity v0.5'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "packtivitydoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"packtivity.tex",
"packtivity Documentation",
"Lukas Heinrich",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "packtivity", "packtivity Documentation", [author], 1)]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"packtivity",
"packtivity Documentation",
author,
"packtivity",
"One line description of project.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"https://docs.python.org/": None}
.. packtivity documentation master file, created by
sphinx-quickstart on Thu Mar 16 17:28:07 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to packtivity's documentation!
======================================
Contents:
.. toctree::
:maxdepth: 2
.. automodule:: packtivity.statecontexts.posixfs_context
:members:
.. automodule:: packtivity.syncbackends
:members:
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " epub3 to make an epub3"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/packtivity.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/packtivity.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/packtivity"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/packtivity"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
epub3:
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.PHONY: dummy
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."
*.pyc
*egg-info*
[build-system]
# Minimum requirements for the build system to execute.
requires = ["wheel", "setuptools>=30.3.0", "attrs>=17.1", "setuptools_scm"]
build-backend = "setuptools.build_meta"
[pytest]
addopts = --ignore=setup.py --cov=packtivity --cov-report=term-missing --cov-config=.coveragerc --cov-report html
import pytest
import packtivity
import packtivity.utils
import packtivity.syncbackends
import packtivity.asyncbackends
from packtivity.statecontexts.posixfs_context import LocalFSState
@pytest.fixture()
def localproc_pack(tmpdir):
return packtivity.pack_object.fromspec("tests/testspecs/localtouchfile.yml")
@pytest.fixture()
def basic_localfs_state(tmpdir):
return LocalFSState([str(tmpdir)])
@pytest.fixture
def default_handler_config():
return packtivity.syncbackends.packconfig()
@pytest.fixture()
def localproc_pack_fail(tmpdir):
return packtivity.pack_object.fromspec("tests/testspecs/localtouchfail.yml")
@pytest.fixture()
def docker_pack_fail(tmpdir):
return packtivity.pack_object.fromspec("tests/testspecs/dockerfail.yml")
@pytest.fixture()
def docker_script_pack_fail(tmpdir):
return packtivity.pack_object.fromspec("tests/testspecs/dockerfail_script.yml")
@pytest.fixture()
def localproc_packspec(tmpdir):
return packtivity.utils.load_packtivity("tests/testspecs/localtouchfile.yml")
@pytest.fixture()
def dockeproc_pack(tmpdir):
return packtivity.pack_object.fromspec("tests/testspecs/dockertouchfile.yml")
@pytest.fixture()
def dockeproc_script_pack(tmpdir):
return packtivity.pack_object.fromspec("tests/testspecs/dockertouchfile_script.yml")
@pytest.fixture()
def docker_touchfile_workdir(tmpdir):
return packtivity.pack_object.fromspec(
"tests/testspecs/environment_tests/touchfile_docker_inworkdir.yml"
)
@pytest.fixture()
def docker_env_resources(tmpdir):
return packtivity.pack_object.fromspec(
"tests/testspecs/environment_tests/resources_docker.yml"
)
@pytest.fixture()
def docker_env_parmounts(tmpdir):
return packtivity.pack_object.fromspec(
"tests/testspecs/environment_tests/resources_parmounts.yml"
)
@pytest.fixture()
def fromjq_pub_default(tmpdir):
return packtivity.pack_object.fromspec(
"tests/testspecs/publisher_tests/fromjq-pub-default.yml"
)
@pytest.fixture()
def default_sync():
return packtivity.syncbackends.defaultsyncbackend()
@pytest.fixture()
def default_async():
return packtivity.asyncbackends.MultiProcBackend(2)
from packtivity.asyncbackends import MultiProcBackend
from packtivity import datamodel as pdm
def test_create_multiproc():
MultiProcBackend(4)
def test_multiproc(tmpdir, basic_localfs_state, localproc_packspec):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
backend = MultiProcBackend(2)
proxy = backend.submit(localproc_packspec, pars, basic_localfs_state)
while not backend.ready(proxy):
pass
assert backend.successful(proxy)
assert backend.result(proxy).json() == {
"output": str(tmpdir.join("helloworld.txt"))
}
assert tmpdir.join("helloworld.txt").check() == True
def test_multiproc_fail(tmpdir, basic_localfs_state, localproc_pack_fail):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
backend = MultiProcBackend(2)
proxy = backend.submit(localproc_pack_fail, pars, basic_localfs_state)
while not backend.ready(proxy):
pass
assert backend.successful(proxy) == False
backend.fail_info(proxy)
import pytest
from packtivity.backendutils import backend_from_string
def test_known():
for known_backend in [
"celery",
"multiproc:4",
"multiproc:auto",
"foregroundasync",
"externalasync:default",
]:
b = backend_from_string(known_backend)
assert b
def test_python_import():
b = backend_from_string(
"py:packtivity.asyncbackends:MultiProcBackend", {"poolsize": 1}
)
assert b
def test_env_import(tmpdir, monkeypatch):
monkeypatch.setenv(
"PACKTIVITY_ASYNCBACKEND",
"packtivity.asyncbackends:ForegroundBackend:ForegroundProxy",
)
b = backend_from_string("fromenv")
optfile = tmpdir.join("opt.yml")
optfile.write("{}")
monkeypatch.setenv("PACKTIVITY_ASYNCBACKEND_OPTS", str(optfile))
b = backend_from_string("fromenv")
assert b
def test_unknown():
with pytest.raises(RuntimeError):
backend_from_string("doesnotexist")
from click.testing import CliRunner
import packtivity.cli
def test_maincli(tmpdir):
runner = CliRunner()
result = runner.invoke(
packtivity.cli.runcli,
[
"tests/testspecs/localtouchfile.yml",
"-p",
'outputfile="{workdir}/hello.txt"',
"-w",
str(tmpdir),
],
)
assert result.exit_code == 0
assert tmpdir.join("hello.txt").check()
def test_maincli_fail(tmpdir):
runner = CliRunner()
result = runner.invoke(
packtivity.cli.runcli,
[
"tests/testspecs/localtouchfail.yml",
"-p",
'outputfile="{workdir}/hello.txt"',
"-w",
str(tmpdir),
],
)
assert result.exit_code != 0
def test_maincli_async(tmpdir):
runner = CliRunner()
result = runner.invoke(
packtivity.cli.runcli,
[
"tests/testspecs/localtouchfile.yml",
"-p",
'outputfile="{workdir}/hello.txt"',
"-w",
str(tmpdir.join("workdir")),
"-b",
"foregroundasync",
"-x",
str(tmpdir.join("proxy.json")),
"--async",
],
)
assert result.exit_code == 0
assert tmpdir.join("proxy.json").check()
result = runner.invoke(packtivity.cli.checkproxy, [str(tmpdir.join("proxy.json"))])
assert result.exit_code == 0
def test_maincli(tmpdir):
runner = CliRunner()
result = runner.invoke(
packtivity.cli.runcli,
[
"tests/testspecs/localtouchfile.yml",
"-p",
'outputfile="{workdir}/hello.txt"',
"-w",
str(tmpdir),
],
)
assert result.exit_code == 0
assert tmpdir.join("hello.txt").check()
def test_maincli_fail(tmpdir):
runner = CliRunner()
result = runner.invoke(
packtivity.cli.runcli,
[
"tests/testspecs/localtouchfail.yml",
"-p",
'outputfile="{workdir}/hello.txt"',
"-w",
str(tmpdir),
],
)
assert result.exit_code != 0
def test_validatecli_valid(tmpdir):
runner = CliRunner()
result = runner.invoke(
packtivity.cli.validatecli, ["tests/testspecs/noop-test.yml"]
)
assert result.exit_code == 0
def test_validatecli_invalid(tmpdir):
runner = CliRunner()
result = runner.invoke(
packtivity.cli.validatecli, ["tests/testspecs/noop-test-invalid.yml"]
)
assert result.exit_code == 1
from packtivity.handlers.publisher_handlers import handlers
from packtivity import datamodel as pdm
from packtivity.handlers.environment_handlers import handlers
from packtivity.syncbackends import finalize_inputs
import logging
def test_docker_parmounts(tmpdir, basic_localfs_state, docker_env_parmounts):
state = basic_localfs_state
environment = docker_env_parmounts.spec["environment"]
parameters, state = finalize_inputs(
pdm.create({"outputfile": "{workdir}/hello.txt"}), state
)
env = handlers[environment["environment_type"]]["default"](
environment, parameters, state
)
assert env["par_mounts"][0]["mountcontent"] == '"{}"'.format(
parameters["outputfile"]
)
from packtivity.handlers.publisher_handlers import handlers
from packtivity import datamodel as pdm
from packtivity.handlers.execution_handlers import (
command_argv,
docker_execution_cmdline,
)
from packtivity.syncbackends import ExecutionConfig
import logging
def test_docker_cvmfs(tmpdir, basic_localfs_state, docker_env_resources, monkeypatch):
state = basic_localfs_state
log = logging.getLogger("test")
cmdline = docker_execution_cmdline(
ExecutionConfig(),
state,
log,
{"name": "myname"},
race_spec={
"workdir": None,
"stdin": None,
"tty": False,
"argv": ["echo", "hello", "world"],
"image": "lukasheinrich/testimage",
"mounts": [
{
"type": "bind",
"source": "/cvmfs",
"destination": "/cvmfs",
"readonly": False,
}
],
},
)
assert "-v /cvmfs:/cvmfs" in cmdline
def test_docker_auth(tmpdir, basic_localfs_state):
state = basic_localfs_state
log = logging.getLogger("test")
cmdline = docker_execution_cmdline(
ExecutionConfig(),
state,
log,
{"name": "myname"},
race_spec={
"workdir": None,
"stdin": None,
"tty": False,
"argv": ["echo", "hello", "world"],
"image": "lukasheinrich/testimage",
"mounts": [
{
"type": "bind",
"source": "/home/recast/recast_auth",
"destination": "/recast_auth",
"readonly": False,
}
],
},
)
assert "-v /home/recast/recast_auth:/recast_auth:rw" in cmdline
import pytest
import packtivity.syncbackends
from packtivity import datamodel as pdm
def test_build_oneline_job(default_handler_config, basic_localfs_state):
job = packtivity.syncbackends.build_job(
{"process_type": "string-interpolated-cmd", "cmd": "hello {one} {two}"},
pdm.create({"one": "ONE", "two": "TWO"}),
basic_localfs_state,
default_handler_config,
)
assert "command" in job
assert job["command"] == "hello ONE TWO"
def test_build_script_job(default_handler_config, basic_localfs_state):
job = packtivity.syncbackends.build_job(
{
"process_type": "interpolated-script-cmd",
"interpreter": "sh",
"script": "hello {one} {two}\n echo another line {two}",
},
pdm.create({"one": "ONE", "two": "TWO"}),
basic_localfs_state,
default_handler_config,
)
assert "script" in job
assert "interpreter" in job
import os
import pytest
from jsonschema.exceptions import ValidationError
from packtivity.utils import load_packtivity
from yadageschemas import schemadir
def test_cliload_valid():
load_packtivity("tests/testspecs/noop-test.yml", os.getcwd(), schemadir, True)
def test_cliload_non_valid():
with pytest.raises(ValidationError):
load_packtivity(
"tests/testspecs/noop-test-invalid.yml", os.getcwd(), schemadir, True
)
def test_cliload_accept_non_valid():
load_packtivity(
"tests/testspecs/noop-test-invalid.yml", os.getcwd(), schemadir, False
)
import os
import pytest
from packtivity import datamodel as pdm
def test_pack_call_local(tmpdir, basic_localfs_state, localproc_pack):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
localproc_pack(parameters=pars, state=basic_localfs_state)
assert tmpdir.join("helloworld.txt").check()
def test_pack_call_docker(tmpdir, basic_localfs_state, dockeproc_pack):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
dockeproc_pack(parameters=pars, state=basic_localfs_state)
assert tmpdir.join("helloworld.txt").check()
def test_pack_call_local_fail(
tmpdir, basic_localfs_state, localproc_pack_fail, default_async
):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
with pytest.raises(RuntimeError):
localproc_pack_fail(parameters=pars, state=basic_localfs_state)
assert tmpdir.join("helloworld.txt").check()
def test_pack_call_docker_fail(
tmpdir, basic_localfs_state, docker_pack_fail, default_async
):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
with pytest.raises(RuntimeError):
docker_pack_fail(parameters=pars, state=basic_localfs_state)
def test_pack_call_docker_script_fail(
tmpdir, basic_localfs_state, docker_script_pack_fail, default_async
):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
with pytest.raises(RuntimeError):
docker_script_pack_fail(parameters=pars, state=basic_localfs_state)
def test_pack_workdir(
tmpdir, basic_localfs_state, docker_touchfile_workdir, default_async
):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
with pytest.raises(RuntimeError):
docker_touchfile_workdir(parameters=pars, state=basic_localfs_state)
def test_pack_call_docker_script(tmpdir, basic_localfs_state, dockeproc_script_pack):
basic_localfs_state.ensure()
pars = pdm.create({"outputfile": "{workdir}/helloworld.txt"})
dockeproc_script_pack(parameters=pars, state=basic_localfs_state)
assert tmpdir.join("helloworld.txt").check()
def test_pack_call_docker_async(
tmpdir, basic_localfs_state, dockeproc_script_pack, default_async
):
basic_localfs_state.ensure()
pars = {"outputfile": "{workdir}/helloworld.txt"}
dockeproc_script_pack(
parameters=pars,
state=basic_localfs_state,
asyncbackend=default_async,
asyncwait=True,
)
assert tmpdir.join("helloworld.txt").check()
def test_pack_call_docker_script_async(
tmpdir, basic_localfs_state, dockeproc_script_pack, default_async
):
basic_localfs_state.ensure()
pars = {"outputfile": "{workdir}/helloworld.txt"}
proxy = dockeproc_script_pack(
parameters=pars, state=basic_localfs_state, asyncbackend=default_async
)
while not default_async.ready(proxy):
pass
default_async.result(proxy)
assert tmpdir.join("helloworld.txt").check()
def test_pack_prepublish(tmpdir, basic_localfs_state, localproc_pack, default_sync):
basic_localfs_state.ensure()
pars = {"outputfile": "{workdir}/helloworld.txt"}
assert default_sync.prepublish(
localproc_pack.spec, pars, basic_localfs_state
).json() == {"output": str(tmpdir.join("helloworld.txt"))}
from packtivity.backendutils import load_proxy
def test_celery():
from packtivity.asyncbackends import CeleryProxy
from celery.result import AsyncResult
asyncresult = AsyncResult("1234")
p = CeleryProxy(asyncresult)
p, _ = load_proxy(p.json())
assert type(p) == CeleryProxy
p.details()["task_id"] == asyncresult.task_id
def test_foreground():
from packtivity.asyncbackends import ForegroundProxy
p = ForegroundProxy({"hello": "world"}, None, True)
p, _ = load_proxy(p.json())
assert type(p) == ForegroundProxy
def test_python():
from packtivity.asyncbackends import ForegroundProxy
p = ForegroundProxy({"hello": "world"}, None, True)
p = load_proxy(
p.json(),
{"proxy": "py:packtivity.asyncbackends:ForegroundProxy"},
best_effort_backend=False,
)
assert type(p) == ForegroundProxy
def test_env(monkeypatch):
from packtivity.asyncbackends import ForegroundProxy
monkeypatch.setenv(
"PACKTIVITY_ASYNCBACKEND",
"packtivity.asyncbackends:ForegroundBackend:ForegroundProxy",
)
p = ForegroundProxy({"hello": "world"}, None, True)
p, _ = load_proxy(p.json())
assert type(p) == ForegroundProxy
from packtivity.handlers.publisher_handlers import handlers
from packtivity import datamodel as pdm
def test_parpub(tmpdir, basic_localfs_state):
pub = {"publisher_type": "frompar-pub", "outputmap": {"hello": "mypar"}}
pars = pdm.create({"mypar": "myvalue"})
pubbed = handlers["frompar-pub"]["default"](pub, pars, basic_localfs_state)
assert pubbed == {"hello": "myvalue"}
def test_interp_pub(tmpdir, basic_localfs_state):
pub = {
"publisher_type": "interpolated-pub",
"publish": {"hello": "hello_{mypar}_world",},
"relative_paths": False,
"glob": False,
}
pars = pdm.create({"mypar": "myvalue"})
pubbed = handlers["interpolated-pub"]["default"](pub, pars, basic_localfs_state)
assert pubbed == {"hello": "hello_myvalue_world"}
def test_interp_pub_glob(tmpdir, basic_localfs_state):
tmpdir.join("hello_myvalue_1.txt").ensure(file=True)
tmpdir.join("hello_myvalue_2.txt").ensure(file=True)
pub = {
"publisher_type": "interpolated-pub",
"publish": {"hello": "{workdir}/hello_{mypar}_*.txt",},
"relative_paths": False,
"glob": True,
}
pars = pdm.create({"mypar": "myvalue"})
pubbed = handlers["interpolated-pub"]["default"](pub, pars, basic_localfs_state)
filelist = list(
map(
str,
[tmpdir.join("hello_myvalue_1.txt"), tmpdir.join("hello_myvalue_2.txt")],
)
)
assert set(pubbed["hello"]) == set(filelist)
def test_interp_pub_glob_relative(tmpdir, basic_localfs_state):
tmpdir.join("hello_myvalue_1.txt").ensure(file=True)
tmpdir.join("hello_myvalue_2.txt").ensure(file=True)
pub = {
"publisher_type": "interpolated-pub",
"publish": {"hello": ["hello_myvalue_2.txt", "hello_myvalue_1.txt"],},
"relative_paths": True,
"glob": False,
}
pars = pdm.create({"mypar": "myvalue"})
pubbed = handlers["interpolated-pub"]["default"](pub, pars, basic_localfs_state)
filelist = list(
map(
str,
[tmpdir.join("hello_myvalue_1.txt"), tmpdir.join("hello_myvalue_2.txt")],
)
)
assert set(pubbed["hello"]) == set(filelist)
def test_fromparjq_pub(tmpdir, basic_localfs_state):
tmpdir.join("hello_myvalue_1.txt").ensure(file=True)
tmpdir.join("hello_myvalue_2.txt").ensure(file=True)
pub = {
"publisher_type": "fromparjq-pub",
"script": '{hello: ["hello_myvalue_2.txt","hello_myvalue_1.txt"]}',
"relative_paths": True,
"tryExact": True,
"glob": False,
}
pars = pdm.create({"mypar": "myvalue"})
pars = pdm.create(pars)
pubbed = handlers["fromparjq-pub"]["default"](pub, pars, basic_localfs_state)
filelist = list(
map(
str,
[tmpdir.join("hello_myvalue_1.txt"), tmpdir.join("hello_myvalue_2.txt")],
)
)
assert set(pubbed["hello"]) == set(filelist)
def test_fromparjq_pub_relative(tmpdir, basic_localfs_state):
tmpdir.join("hello_myvalue_1.txt").ensure(file=True)
tmpdir.join("hello_myvalue_2.txt").ensure(file=True)
pub = {
"publisher_type": "fromparjq-pub",
"script": '{hello: "*.txt"}',
"relative_paths": True,
"glob": True,
"tryExact": True,
}
pars = pdm.create({"mypar": "myvalue"})
pubbed = handlers["fromparjq-pub"]["default"](pub, pars, basic_localfs_state)
filelist = list(
map(
str,
[tmpdir.join("hello_myvalue_1.txt"), tmpdir.join("hello_myvalue_2.txt")],
)
)
assert set(pubbed["hello"]) == set(filelist)
def test_glob_pub(tmpdir, basic_localfs_state):
tmpdir.join("hello_1.txt").ensure(file=True)
tmpdir.join("hello_2.txt").ensure(file=True)
pub = {
"publisher_type": "fromglob-pub",
"outputkey": "hello",
"globexpression": "hello_*.txt",
}
pars = pdm.create({"mypar": "myvalue"})
pubbed = handlers["fromglob-pub"]["default"](pub, pars, basic_localfs_state)
filelist = list(map(str, [tmpdir.join("hello_1.txt"), tmpdir.join("hello_2.txt")]))
assert set(pubbed["hello"]) == set(filelist)
def test_yml_pub(tmpdir, basic_localfs_state):
tmpdir.join("hello.yml").write("hello: world\n")
pub = {
"publisher_type": "fromyaml-pub",
"yamlfile": "hello.yml",
}
pars = pdm.create({"mypar": "myvalue"})
pubbed = handlers["fromyaml-pub"]["default"](pub, pars, basic_localfs_state)
assert pubbed == {"hello": "world"}
import os
from packtivity import datamodel as pdm
def test_pack_call_local(tmpdir, basic_localfs_state):
pars = pdm.create(
{"parcard": ["{workdir}/parcard.dat"], "banner_file": "{workdir}/banner.txt"}
)
newpars = basic_localfs_state.model(pars)
assert newpars["banner_file"] == os.path.join(str(tmpdir), "banner.txt")
assert newpars["parcard"][0] == os.path.join(str(tmpdir), "parcard.dat")
def test_ok():
pass
from packtivity.typedleafs import TypedLeafs
class MyClass(object):
def __init__(self, first_attr, second_attr):
self.first_attr = first_attr
self.second_attr = second_attr
@classmethod
def fromJSON(cls, data):
return cls(**data)
def json(self):
return {"first_attr": self.first_attr, "second_attr": self.second_attr}
datamodel = {"keyword": "$type", "types": {"MyClass": MyClass}}
simple_data = {
"hello": {"$type": "MyClass", "first_attr": "hello", "second_attr": "world"}
}
nested_data = {
"list_of_things": [
{"$type": "MyClass", "first_attr": "hello", "second_attr": "world"},
{"$type": "MyClass", "first_attr": "hello", "second_attr": "world"},
],
"single_thing": {"$type": "MyClass", "first_attr": "hello", "second_attr": "world"},
}
def test_init():
tl = TypedLeafs(simple_data, datamodel)
assert type(tl["hello"]) == MyClass
assert tl["hello"].first_attr == "hello"
assert tl["hello"].second_attr == "world"
assert tl.json() == simple_data
tl = TypedLeafs.fromJSON(simple_data, deserialization_opts={"leafmodel": datamodel})
assert tl.json() == simple_data
def test_deepnest():
tl = TypedLeafs(nested_data, datamodel)
paths = [p.path for p, v in tl.leafs()]
assert set(paths) == set(
["/list_of_things/0", "/list_of_things/1", "/single_thing"]
)
def test_jq():
tl = TypedLeafs(nested_data, datamodel)
assert (
tl.jq(".list_of_things[]", multiple_output=True)[0].json()
== tl["list_of_things"][0].json()
)
assert (
tl.jq(".list_of_things[]", multiple_output=True)[1].json()
== tl["list_of_things"][1].json()
)
assert tl.jq("[.list_of_things[]]").json() == nested_data["list_of_things"]
def test_jsonpath():
tl = TypedLeafs(nested_data, datamodel)
assert tl.jsonpath("single_thing").json() == tl["single_thing"].json()
assert (
tl.jsonpath("list_of_things[*]", multiple_output=True)[0].json()
== tl["list_of_things"][0].json()
)
def test_jsonpointer():
tl = TypedLeafs(nested_data, datamodel)
for p, v in tl.leafs():
try:
assert tl.jsonpointer(p.path).json() == v.json()
except AttributeError:
assert tl.jsonpointer(p.path) == v
def test_refs():
import jq
refs = TypedLeafs(nested_data, datamodel).asrefs()
assert refs["list_of_things"][0].path == "/list_of_things/0"
import jsonpointer
jp = jsonpointer.JsonPointer("/list_of_things/0")
tl = TypedLeafs(nested_data, datamodel)
assert tl.resolve_ref(jp).json() == tl["list_of_things"][0].json()
def test_modify():
import jq
tl = TypedLeafs(nested_data, datamodel)
tlnew = TypedLeafs(
{"$type": "MyClass", "second_attr": "newsecond", "first_attr": "newfirst"},
datamodel,
)
tl["single_thing"] = tlnew.typed()
assert type(tlnew.typed()) == MyClass
assert tl["single_thing"].json() == tlnew.typed().json()
import os
import pytest
from packtivity.utils import mkdir_p, leaf_iterator
def test_mkdir_notexist(tmpdir):
pathtomake = tmpdir.join("hello")
mkdir_p(str(pathtomake))
assert pathtomake.check()
def test_mkdir_exist(tmpdir):
pathtomake = tmpdir.join("hello")
pathtomake.ensure(dir=True)
mkdir_p(str(pathtomake))
assert pathtomake.check()
def test_mkdir_exist_butfile(tmpdir):
pathtomake = tmpdir.join("hello")
pathtomake.ensure(file=True)
with pytest.raises(OSError):
mkdir_p(str(pathtomake))
def test_leafit():
testdata = {
"hello": "world",
"deeply": {"nested": ["l", "i"], "numbers": 123},
"bool": True,
}
leafs = set([(x.path, y) for x, y in leaf_iterator(testdata)])
assert leafs == {
("/deeply/nested/0", "l"),
("/deeply/nested/1", "i"),
("/deeply/numbers", 123),
("/hello", "world"),
("/bool", True),
}
process:
process_type: 'interpolated-script-cmd'
script: 'cat /non/existent/file > {outputfile}'
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: outputfile
environment:
environment_type: 'docker-encapsulated'
image: busybox
process:
process_type: 'string-interpolated-cmd'
cmd: 'cat /non/existent/file > {outputfile}'
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: outputfile
environment:
environment_type: 'docker-encapsulated'
image: busybox
process:
process_type: 'interpolated-script-cmd'
script: |
echo HELLO WORLD
echo Hello World > {outputfile}
echo WE ARE DONE!!
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: outputfile
environment:
environment_type: 'docker-encapsulated'
image: busybox
process:
process_type: 'string-interpolated-cmd'
cmd: 'echo Hello World > {outputfile}'
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: outputfile
environment:
environment_type: 'docker-encapsulated'
image: busybox
process:
process_type: 'string-interpolated-cmd'
cmd: 'echo Hello World > {outputfile}; touch inworkdir.txt'
publisher:
publisher_type: 'interpolated-pub'
glob: True
relative_paths: True
publish:
main_output: '{outputfile}'
inworkdir: inworkdir.txt
environment:
environment_type: 'docker-encapsulated'
image: atlas/slc6-atlasos
resources:
- CVMFS
- GRIDProxy
process:
process_type: 'string-interpolated-cmd'
cmd: 'echo Hello World > {outputfile}; touch inworkdir.txt'
publisher:
publisher_type: 'interpolated-pub'
glob: True
relative_paths: True
publish:
main_output: '{outputfile}'
inworkdir: inworkdir.txt
environment:
environment_type: 'docker-encapsulated'
image: atlas/slc6-atlasos
par_mounts:
- mountpath: /parmounts/outputfile
jqscript: .outputfile
process:
process_type: 'string-interpolated-cmd'
cmd: 'echo Hello World > {outputfile}; touch inworkdir.txt'
publisher:
publisher_type: 'interpolated-pub'
glob: True
relative_paths: True
publish:
main_output: '{outputfile}'
inworkdir: inworkdir.txt
environment:
environment_type: 'docker-encapsulated'
image: atlas/slc6-atlasos
imagetag: '8862202-20171025'
workdir: '{workdir}'

Sorry, the diff of this file is not supported yet

process:
process_type: 'string-interpolated-cmd'
cmd: 'cat /non/existent/file > {outputfile}'
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: outputfile
environment:
environment_type: 'localproc-env'
process:
process_type: 'string-interpolated-cmd'
cmd: 'echo Hello World > {outputfile}'
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: outputfile
environment:
environment_type: 'localproc-env'
process:
process_type: 'string-interpolated-cmd'
cmd: 'echo in a noop env {a_parameter}'
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: a_parameter
process:
process_type: 'string-interpolated-cmd'
cmd: 'echo in a noop env {a_parameter}'
publisher:
publisher_type: 'frompar-pub'
outputmap:
output: a_parameter
environment:
environment_type: 'noop-env'
+2
-2
Metadata-Version: 2.1
Name: packtivity
Version: 0.14.21
Version: 0.14.22
Summary: packtivity - general purpose schema + bindings for PROV activities
Home-page: UNKNOWN
Home-page: https://github.com/yadage/packtivity
Author: Lukas Heinrich

@@ -7,0 +7,0 @@ Author-email: lukas.heinrich@cern.ch

@@ -10,3 +10,2 @@ requests[security]

jq
psutil
yadage-schemas

@@ -13,0 +12,0 @@ mock

@@ -0,3 +1,16 @@

.bumpversion.cfg
.gitignore
.travis.yml
Dockerfile
README.md
pyproject.toml
pytest.ini
setup.py
.github/workflows/ci.yml
.github/workflows/publish-docker.yml
.github/workflows/publish-package.yml
docs/Makefile
docs/conf.py
docs/index.rst
packtivity/.gitignore
packtivity/__init__.py

@@ -33,2 +46,30 @@ packtivity/asyncbackends.py

packtivity/statecontexts/__init__.py
packtivity/statecontexts/posixfs_context.py
packtivity/statecontexts/posixfs_context.py
tests/conftest.py
tests/test_asyncbackends.py
tests/test_backends.py
tests/test_clis.py
tests/test_environments.py
tests/test_executions.py
tests/test_handlers.py
tests/test_loading.py
tests/test_main.py
tests/test_proxies.py
tests/test_publishers.py
tests/test_statecontexts.py
tests/test_syncbackends.py
tests/test_typedleafs.py
tests/test_utils.py
tests/testspecs/dockerfail.yml
tests/testspecs/dockerfail_script.yml
tests/testspecs/dockertouchfile.yml
tests/testspecs/dockertouchfile_script.yml
tests/testspecs/fast.umbrella
tests/testspecs/here.txt
tests/testspecs/localtouchfail.yml
tests/testspecs/localtouchfile.yml
tests/testspecs/noop-test-invalid.yml
tests/testspecs/noop-test.yml
tests/testspecs/environment_tests/resources_docker.yml
tests/testspecs/environment_tests/resources_parmounts.yml
tests/testspecs/environment_tests/touchfile_docker_inworkdir.yml

@@ -96,3 +96,3 @@ import os

if jsondata["proxyname"] in proxyhandlers.keys():
if jsondata["proxyname"] in list(proxyhandlers.keys()):
if jsondata["proxyname"] == "PacktivityProxyBase":

@@ -191,7 +191,7 @@ return None # by definition unserializable

for k in backends.keys():
for k in list(backends.keys()):
if backendstring.startswith(k):
return backends[k]["default"](backendstring, backendopts)
raise RuntimeError("Unknown Backend %s", backendstring, backends.keys())
raise RuntimeError("Unknown Backend %s", backendstring, list(backends.keys()))

@@ -198,0 +198,0 @@

@@ -72,4 +72,4 @@ import click

if not state:
state.setdefault("readwrite", []).extend(map(os.path.realpath, write))
state.setdefault("readonly", []).extend(map(os.path.realpath, read))
state.setdefault("readwrite", []).extend(list(map(os.path.realpath, write)))
state.setdefault("readonly", []).extend(list(map(os.path.realpath, read)))
state = LocalFSState(state["readwrite"], state["readonly"])

@@ -158,4 +158,4 @@ state.ensure()

if not state:
state.setdefault("readwrite", []).extend(map(os.path.realpath, write))
state.setdefault("readonly", []).extend(map(os.path.realpath, read))
state.setdefault("readwrite", []).extend(list(map(os.path.realpath, write)))
state.setdefault("readonly", []).extend(list(map(os.path.realpath, read)))
state = LocalFSState(state["readwrite"], state["readonly"])

@@ -204,4 +204,4 @@

if not state:
state.setdefault("readwrite", []).extend(map(os.path.realpath, write))
state.setdefault("readonly", []).extend(map(os.path.realpath, read))
state.setdefault("readwrite", []).extend(list(map(os.path.realpath, write)))
state.setdefault("readonly", []).extend(list(map(os.path.realpath, read)))
state = LocalFSState(state["readwrite"], state["readonly"])

@@ -217,7 +217,7 @@

if spec['process']['process_type'] == 'interpolated-script-cmd':
job = {'interactive': spec['process']['interpreter']}
if spec["process"]["process_type"] == "interpolated-script-cmd":
job = {"interactive": spec["process"]["interpreter"]}
else:
job = {'interactive': 'sh'}
metadata = {'name': 'test'}
job = {"interactive": "sh"}
metadata = {"name": "test"}

@@ -224,0 +224,0 @@ result = packtivity.syncbackends.run_in_env(

@@ -5,3 +5,2 @@ import os

import time
import psutil
import shlex

@@ -313,10 +312,2 @@ import pipes

try: # some issues on some linux machines.. swallow exception
log.debug(
"process children: %s",
[x for x in psutil.Process(proc.pid).children(recursive=True)],
)
except:
pass
for line in iter(proc.stdout.readline, b""):

@@ -323,0 +314,0 @@ subproclog.info(line.strip())

@@ -14,3 +14,3 @@ import click

k: v if not (type(v) == list) else " ".join([str(x) for x in v])
for k, v in parameters.typed().items()
for k, v in list(parameters.typed().items())
}

@@ -35,3 +35,3 @@ command = process_spec["cmd"].format(**flattened_kwargs)

k: v if not (type(v) == list) else " ".join([str(x) for x in v])
for k, v in parameters.typed().items()
for k, v in list(parameters.typed().items())
}

@@ -38,0 +38,0 @@ script = process_spec["script"].format(**flattened_kwargs)

@@ -118,5 +118,5 @@ import yaml

try:
published_json = raw_input("Enter JSON data to publish: ")
published_json = input("Enter JSON data to publish: ")
except NameError:
published_json = input("Enter JSON data to publish: ")
published_json = eval(input("Enter JSON data to publish: "))
try:

@@ -129,3 +129,3 @@ data = json.loads(published_json)

shall = (
raw_input(
input(
"got: \n {} \npublish? (y/N) ".format(

@@ -132,0 +132,0 @@ yaml.safe_dump(data, default_flow_style=False)

@@ -101,7 +101,11 @@ import os

for dirname, volspec in vols_by_dir_name.items():
for dirname, volspec in list(vols_by_dir_name.items()):
parmount_configmap_contmount.append(
{"name": volspec["name"], "mountPath": dirname}
)
return parmount_configmap_contmount, vols_by_dir_name.values(), configmapspec
return (
parmount_configmap_contmount,
list(vols_by_dir_name.values()),
configmapspec,
)

@@ -108,0 +112,0 @@ def get_job_mounts(self, job_uuid, jobspec_environment):

@@ -18,3 +18,3 @@ import json

self._types2str, self._str2types = {}, {}
for name, class_def in self.datamodel["types"].items():
for name, class_def in list(self.datamodel["types"].items()):
if type(class_def) == type:

@@ -65,3 +65,3 @@ self._types2str[class_def] = name

for k in found_identifiers.keys():
for k in list(found_identifiers.keys()):
spec.pop(k)

@@ -68,0 +68,0 @@ cl = self._str2types[found_identifiers[self.keyword]]

Metadata-Version: 2.1
Name: packtivity
Version: 0.14.21
Version: 0.14.22
Summary: packtivity - general purpose schema + bindings for PROV activities
Home-page: UNKNOWN
Home-page: https://github.com/yadage/packtivity
Author: Lukas Heinrich

@@ -7,0 +7,0 @@ Author-email: lukas.heinrich@cern.ch

# packtivity
[![DOI](https://zenodo.org/badge/53696818.svg)](https://zenodo.org/badge/latestdoi/53696818)
[![Build Status](https://travis-ci.org/yadage/packtivity.svg?branch=master)](https://travis-ci.org/diana-hep/packtivity)
[![Coverage Status](https://coveralls.io/repos/github/diana-hep/packtivity/badge.svg)](https://coveralls.io/github/diana-hep/packtivity)

@@ -6,0 +5,0 @@ [![Documentation Status](https://readthedocs.org/projects/packtivity/badge/?version=latest)](http://packtivity.readthedocs.io/en/latest/?badge=latest)

+33
-41

@@ -1,2 +0,1 @@

import os

@@ -6,47 +5,40 @@ from setuptools import setup, find_packages

deps = [
'requests[security]',
'jsonschema',
'jsonref',
'pyyaml',
'click',
'glob2',
'jsonpointer',
'jsonpath-rw',
'jq',
'psutil',
'yadage-schemas',
'mock',
'checksumdir',
"requests[security]",
"jsonschema",
"jsonref",
"pyyaml",
"click",
"glob2",
"jsonpointer",
"jsonpath-rw",
"jq",
"yadage-schemas",
"mock",
"checksumdir",
]
if not 'READTHEDOCS' in os.environ:
deps += ['jq']
if not "READTHEDOCS" in os.environ:
deps += ["jq"]
setup(
name = 'packtivity',
version = '0.14.21',
description = 'packtivity - general purpose schema + bindings for PROV activities',
url = '',
author = 'Lukas Heinrich',
author_email = 'lukas.heinrich@cern.ch',
packages = find_packages(),
include_package_data = True,
install_requires = deps,
extras_require={
'celery': [
'celery','redis'
],
},
entry_points = {
'console_scripts': [
'packtivity-run=packtivity.cli:runcli',
'packtivity-util=packtivity.cli:utilcli',
'packtivity-validate=packtivity.cli:validatecli',
'packtivity-checkproxy=packtivity.cli:checkproxy'
],
},
dependency_links = [
]
name="packtivity",
version="0.14.22",
description="packtivity - general purpose schema + bindings for PROV activities",
url="https://github.com/yadage/packtivity",
author="Lukas Heinrich",
author_email="lukas.heinrich@cern.ch",
packages=find_packages(),
include_package_data=True,
install_requires=deps,
extras_require={"celery": ["celery", "redis"],},
entry_points={
"console_scripts": [
"packtivity-run=packtivity.cli:runcli",
"packtivity-util=packtivity.cli:utilcli",
"packtivity-validate=packtivity.cli:validatecli",
"packtivity-checkproxy=packtivity.cli:checkproxy",
],
},
dependency_links=[],
)