Unverified Commit 10ca2dc5 authored by René Fritze's avatar René Fritze Committed by GitHub
Browse files

Switch from Azure Pipelines to Github Actions (#1518)

The workflow definitions are much nicer than before, thanks to an existing GitHub Action for setting up Conda (actually mambaforge) it's mostly the same for all three platforms. 
Plus @pymor/all can finally restart those pipelines on failure. 
Test jobs now take around 30% less time compared to the azure setup. Mainly due to environment caching. No more weird exit code plugin hacks either.
parents 99e1cc2a 957c9fe8
Pipeline #112033 passed with stages
in 60 minutes and 49 seconds
jobs:
- template: ./pipeline-osx.yml
- template: ./pipeline-win.yml
jobs:
- job: 'OSX_CI'
pool:
vmImage: macOS-10.15
timeoutInMinutes: 65
variables:
PYMOR_HYPOTHESIS_PROFILE: ci
strategy:
maxParallel: 8
matrix:
osx_python3.7:
CONFIG: osx_python3.7
UPLOAD_PACKAGES: False
osx_python3.9:
CONFIG: osx_python3.9
UPLOAD_PACKAGES: False
steps:
- bash: echo "##vso[task.prependpath]$CONDA/bin"
displayName: Add conda to PATH
- bash: sudo chown -R $USER $CONDA
displayName: Take ownership of conda installation
- bash: |
set -x -e
conda config --add channels conda-forge
conda config --set channel_priority strict
#conda update -y --all
conda install -y --only-deps pymor
# these ones are not in the 2020.1 conda build yet
conda install -y pyevtk mpi4py slycot pytorch pyqt==5.12.3 pyside2==5.13.2 codecov "qtpy<2.0.0"
# these are buildtime, not a runtime, deps for our conda package
conda install -y cython pytest-cov pytest
# install anything which might be a new dependency with pip
pip install -r requirements.txt
pip install -r requirements-ci.txt
# this currently still introduces some conflicts
# pip install -r requirements-optional.txt
displayName: Configure conda and conda-build
- script: |
#set -ex
export PYTHONPATH=${PWD}/src:${PYTHONPATH}
export QT_API=pyside2
export PYMOR_ALLOW_DEADLINE_EXCESS=1
# this allows azure-specific defaults
cp .ci/azure/pymor_defaults.py_osx pymor_defaults.py
# ignore random interpreter error and rely on pytest exit instead
python .ci/azure/run_tests.py --cov --cov-config=setup.cfg --cov-context=test || echo "ignoring python exit code"
cat pytest.azure.success || exit 127
displayName: py.test
- script: |
coverage xml
codecov --file coverage.xml
displayName: 'Upload to codecov.io'
- publish: src/pymortests/testdata/check_results/
artifact: changed_results_$(CONFIG)
condition: always()
jobs:
- job: 'Windows_CI'
pool:
vmImage: 'windows-2022'
timeoutInMinutes: 75
variables:
PYMOR_HYPOTHESIS_PROFILE: ci
strategy:
matrix:
Python37:
python.version: '3.7'
Python39:
python.version: '3.9'
maxParallel: 4
steps:
- script: |
curl --output mesa.7z -L https://github.com/pal1000/mesa-dist-win/releases/download/20.3.4/mesa3d-20.3.4-release-msvc.7z
7z x mesa.7z -omesa -y
:: The script requires user input (choice of options) so need to
:: fiddle to get it to run automatically. Not a clean way to do it,
:: but works.
sed -i 's/@echo Please make a deployment choice:/@GOTO desktopgl/g' mesa\systemwidedeploy.cmd
sed -i 's/@echo Desktop OpenGL drivers deploy complete./@exit/g' mesa\systemwidedeploy.cmd
mesa\systemwidedeploy.cmd
displayName: download MESA
- powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
displayName: Add conda to PATH
- script: conda update -n base -c defaults conda
displayName: Update base Anaconda environment
- script: conda create -v --yes --name myEnvironment python=$(python.version)
displayName: Create Anaconda environment
- script: |
call activate myEnvironment
conda config --set channel_priority strict
conda install -v -c conda-forge --yes --name myEnvironment numpy pip
displayName: Conda 1/3
- script: |
call activate myEnvironment
conda install -v -c conda-forge --yes --name myEnvironment --only-deps pymor
displayName: Conda 2/3
- script: |
call activate myEnvironment
conda install -v -c conda-forge --yes --name myEnvironment pyopengl cython pyevtk slycot cython pytest pytest-cov curl hypothesis pyside2==5.13.2 "qtpy<2.0.0" typer click==7.1.2
displayName: Conda 3/3
- script: |
call activate myEnvironment
python -c "import numpy"
python -c "import OpenGL.GL as gl; print(gl.glGetString(gl.GL_RENDERER))"
python -c "import OpenGL.GL as gl; print(gl.glGetString(gl.GL_VERSION))"
pip install pytest-azurepipelines pytest-datadir
displayName: 'Sanity Checks'
- script: |
call activate myEnvironment
set PYTHONPATH=%PYTHONPATH%;%cd%\src
set QT_API=pyside2
set PYMOR_ALLOW_DEADLINE_EXCESS=1
:: this allows azure-specific defaults
cp .ci/azure/pymor_defaults.py_win pymor_defaults.py
:: async + converage data in sqlite -> errors
:: ignore random interpreter error and rely on pytest exit instead
python .ci/azure/run_tests.py -k hapod || echo "ignoring python exit code"
:: cat equivalent will fail if tests did not succeed
type pytest.azure.success || exit 127
python .ci/azure/run_tests.py -k "not hapod" --cov --cov-config=setup.cfg --cov-context=test || echo "ignoring python exit code"
type pytest.azure.success || exit 127
- script: |
call activate myEnvironment
choco install -y codecov
coverage xml
codecov.exe -f coverage.xml
displayName: 'Upload to codecov.io'
- publish: src/pymortests/testdata/check_results/
artifact: changed_results_win_$(python.version)
condition: always()
REGEDIT4
; https://technet.microsoft.com/en-us/library/cc749368.aspx
; https://www.msfn.org/board/topic/143241-portable-windows-7-build-from-winpe-30/page-5#entry942596
[HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Microsoft\Windows NT\CurrentVersion\OpenGLDrivers\MSOGL]
"DLL"="mesadrv.dll"
"DriverVersion"=dword:00000001
"Flags"=dword:00000001
"Version"=dword:00000002
# This file is part of the pyMOR project (https://www.pymor.org).
# Copyright 2013-2021 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (https://opensource.org/licenses/BSD-2-Clause)
if __name__ == '__main__':
import pytest
import os
import sys
from pathlib import Path
this_dir = Path(__file__).resolve().parent
pymor_root_dir = (this_dir / '..' / '..').resolve()
result_file_fn = pymor_root_dir / 'pytest.azure.success'
try:
os.unlink(result_file_fn)
except FileNotFoundError:
pass
profile = os.environ.get("PYMOR_HYPOTHESIS_PROFILE", "ci")
args = ["--junitxml=test_results.xml", f"--hypothesis-profile={profile}"] + sys.argv[1:]
if pytest.main(args) == pytest.ExitCode.OK:
with open(result_file_fn, 'wt') as result_file:
result_file.write('True')
......@@ -26,7 +26,7 @@ missing = [(u, e) for u, e in seen_set if u in contents and e not in contents]
duplicates = [(u, mails) for u, mails in seen.items() if len(mails) > 1]
lines = [l for l in open(mailmap).readlines() if not l.startswith("#")]
unsorted = [u for u, s in zip(lines, sorted(lines)) if u != s]
unsorted = [u for u, s in zip(lines, sorted((l.lower() for l in lines))) if u.lower() != s]
for user, email in missing:
print(f"missing mailmap entry for {user} {email}")
for user, emails in duplicates:
......
# THIS FILE IS AUTOGENERATED -- DO NOT EDIT #
name: pyMOR-ci
channels:
- conda-forge
dependencies:
- anaconda-client
- conda-build
- pip
- PySide2!=5.15.2,!=5.15.2.*,!=5.11.*,!=5.12.*,!=5.13.0
- bash_kernel
- check-manifest
- click
- docutils
- flake8-rst-docstrings
- ipyparallel>=6.2.5
- ipython>=5.0
- jupyter_client>=7.0.6
- jupyter_contrib_nbextensions
- lxml
- matplotlib
- meshio>=4.4
- mpi4py>=3.0
- mpi4py>=3.0.3
- myst-nb
- nbconvert
- nbresuse
- numpy>=1.16.0
- numpy>=1.17.5
- numpy>=1.19.4
- pillow
- pybind11
- pyevtk
- pyopengl
- pytest-parallel
- pytest>=6.0
- rstcheck
- scikit-fem
- scipy>=1.3
- scipy>=1.3.3
- scipy>=1.5.4
- setuptools
- slycot>=0.4.0
- sphinx-autoapi>=1.8
- sphinx>=3.4
- sympy
- twine
- typer
- pip:
- -r ../requirements-ci.txt
# THIS FILE IS AUTOGENERATED -- DO NOT EDIT #
#!/usr/bin/env python3
import itertools
import json
import operator
import os
import sys
from contextlib import contextmanager
from functools import reduce
from pathlib import Path
from subprocess import check_output, CalledProcessError
import jinja2
import logging
REQUIRED_PLATFORMS = ('osx-64', 'linux-64', 'win-64')
# stars are not actually a glob pattern, but as-is in the conda search output
REQUIRED_PYTHONS = ('3.7.*', '3.9.*')
ANY_PYTHON_VERSION = 'any_python'
NO_PYTHON_VERSION = -1
ENV_TPL = r'''# THIS FILE IS AUTOGENERATED -- DO NOT EDIT #
name: pyMOR-ci
channels:
- conda-forge
dependencies:
- anaconda-client
- conda-build
- pip
{% for pkg in available %}
- {{pkg}}
{%- endfor %}
- pip:
- -r ../requirements-ci.txt
# THIS FILE IS AUTOGENERATED -- DO NOT EDIT #
'''
# AFAICT we _should_ install pytorch-cpu instead of torch, that
# fails to install everywhere, so we're noping out of torch entirely
BLOCKLIST = ('torch', )
PYPI_TO_CONDA_PACKAGENAME_MAPPING = {'torch': 'pytorch-cpu'}
NO_ARCH = 'noarch'
THIS_DIR = Path(__file__).resolve().parent
LOGFILE = THIS_DIR / 'create_conda_env.log'
logging.basicConfig(filename=LOGFILE, level=logging.WARNING, filemode='wt')
@contextmanager
def change_to_directory(name):
"""Change current working directory to `name` for the scope of the context."""
old_cwd = os.getcwd()
try:
yield os.chdir(name)
finally:
os.chdir(old_cwd)
def _parse_req_file(path):
path = Path(path).resolve()
assert path.exists()
assert path.is_file()
pkgs = []
with change_to_directory(path.parent):
for line in open(path, 'rt').readlines():
line = line.strip()
if line.startswith('-r'):
pkgs += _parse_req_file(line[line.find('-r ')+3:])
continue
if line.startswith('#'):
continue
if ';' in line:
dropped = line.split(';')[0]
logging.debug(f'Dropping chained specifier, using {dropped} instead of {line}')
line = dropped
name_only = _strip_markers(line)
if name_only in BLOCKLIST:
continue
if name_only in PYPI_TO_CONDA_PACKAGENAME_MAPPING.keys():
line = line.replace(name_only, PYPI_TO_CONDA_PACKAGENAME_MAPPING[name_only])
pkgs.append(line)
return pkgs
def _strip_markers(name):
for m in '!;<>=':
try:
i = name.index(m)
name = name[:i].strip()
except ValueError:
continue
return name
def _search_single(pkg, plat):
"""Search needs to explicitly say its subdir, else only the host's native is searched"""
cmd = ['/usr/bin/env', 'conda', 'search', '--channel=conda-forge', '--json', f'{pkg}[subdir={plat}]']
try:
output = check_output(cmd)
except CalledProcessError as e:
if plat != NO_ARCH:
logging.debug(f'Falling back to noarch for {pkg} - {plat}')
return _search_single(pkg, NO_ARCH)
try:
err = json.loads(e.output)['error']
if 'PackagesNotFoundError' in err:
return None, []
raise RuntimeError(err)
except Exception:
raise e
pkg_name = _strip_markers(pkg).lower()
out = json.loads(output)
ll = list(itertools.chain.from_iterable((data for name, data in out.items() if name == pkg_name)))
return plat, list(reversed(ll))
def _extract_conda_py(release):
try:
if release['package_type'] == 'noarch_python':
return ANY_PYTHON_VERSION
except KeyError:
pass
for pkg in release['depends']:
if pkg.startswith('python_abi'):
# format 'python_abi 3.9.* *_cp39'
return pkg.split(' ')[1]
if pkg.startswith('python'):
# format ''python >=3.9,<3.10.0a0''
l, r = pkg.find('>='), pkg.find(',')
return pkg[l+2:r]+'.*'
return NO_PYTHON_VERSION
def _available_on_required(json_result, required_plats, required_pys):
required_tuples = list(itertools.product(required_plats, required_pys))
name = 'PackageNameNotSet'
for release in json_result:
plat = release['subdir']
if plat not in required_plats and plat != NO_ARCH:
continue
py = _extract_conda_py(release)
if py in required_pys or py == ANY_PYTHON_VERSION:
covered_pys = [py] if py != ANY_PYTHON_VERSION else required_pys
covered_plats = [plat] if plat != NO_ARCH else required_plats
to_remove = itertools.product(covered_plats, covered_pys)
for pair in to_remove:
try:
# combinations can be found multiple times
required_tuples.remove(pair)
except ValueError as e:
if 'list.remove' in str(e):
continue
raise e
if len(required_tuples) == 0:
return True
name = release['name']
logging.error(f'{name} not available on {required_tuples}')
return False
def _search(pkg):
"""If a resul is noarch, we can return early"""
for plat in REQUIRED_PLATFORMS:
found_plat, json_list = _search_single(pkg, plat)
yield json_list
if found_plat == NO_ARCH:
return
def main(input_paths, output_path='conda-environment.yml'):
available = []
wanted = set(reduce(operator.concat, (_parse_req_file(p) for p in input_paths)))
for pkg in wanted:
data = reduce(operator.concat, _search(pkg))
if _available_on_required(json_result=data,
required_plats=REQUIRED_PLATFORMS,
required_pys=REQUIRED_PYTHONS):
available.append(pkg)
for a in available:
wanted.remove(a)
available, wanted = sorted(list(available)), sorted(list(wanted))
tpl = jinja2.Template(ENV_TPL)
with open(output_path, 'wt') as yml:
yml.write(tpl.render(available=available))
return available, wanted
if __name__ == '__main__':
out_fn = THIS_DIR / 'conda-env.yml'
available, wanted = main(sys.argv[1:], output_path=out_fn)
from rich.console import Console
from rich.table import Table
table = Table("available", "wanted", title="Conda search result")
for el in itertools.zip_longest(available, wanted, fillvalue=''):
table.add_row(*el)
console = Console()
console.print(table)
console.print(f'Details at {LOGFILE}')
......@@ -3,7 +3,6 @@ infrastructure:
- .github/*
- .binder/*
- Makefile
- azure-pipelines.yml
- requirements*
- pyproject.toml
- MANIFEST.in
......
name: Conda Tests
on: [push]
jobs:
bugout:
name: Cancel superseded jobs
runs-on: ubuntu-20.04
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.9.1
with:
all_but_latest: true
# also works on 'pull_request' targets
ignore_sha: true
access_token: ${{ github.token }}
event_file:
# this is input for the pytest_results workflow
name: "Event File"
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v2
with:
name: Event File
path: ${{ github.event_path }}
miniconda:
name: Conda ${{ matrix.os }} - Python ${{ matrix.python }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, macos-11, windows-2022]
python: [3.7, 3.9]
include:
- os: ubuntu-20.04
prefix: /usr/share/miniconda3/envs/pyMOR-ci
- os: macos-11
prefix: /Users/runner/miniconda3/envs/pyMOR-ci
- os: macos-10.15
python: 3.7
prefix: /Users/runner/miniconda3/envs/pyMOR-ci
- os: windows-2022
prefix: C:\Miniconda3\envs\pyMOR-ci
exclude:
# these two don't mix WRT opengl loading: https://github.com/python/cpython/pull/21241
# explicitly include older macos for python 3.7 instead
- os: macos-11
python: 3.7
# avoid failure in one job immeadiately cancelling all others
fail-fast: false
steps:
- name: Install required X libs (Linux)
if: runner.os == 'Linux'
run: |
sudo apt-get install -y xvfb libxkbcommon-x11-0 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 libxcb-xinerama0 libxcb-xinput0 libxcb-xfixes0
- name: Install
if: runner.os == 'Windows'
run: |
curl --output mesa.7z -L https://github.com/pal1000/mesa-dist-win/releases/download/20.3.4/mesa3d-20.3.4-release-msvc.7z
7z x mesa.7z -omesa -y
# The script requires user input (choice of options) so need to
# fiddle to get it to run automatically. Not a clean way to do it,
# but works.
sed -i 's/@echo Please make a deployment choice:/@GOTO desktopgl/g' ./mesa/systemwidedeploy.cmd
sed -i 's/@echo Desktop OpenGL drivers deploy complete./@exit/g' ./mesa/systemwidedeploy.cmd
./mesa/systemwidedeploy.cmd
- uses: actions/checkout@v2
- uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: pyMOR-ci
miniforge-variant: Mambaforge
use-mamba: true
python-version: ${{ matrix.python }}
channels: conda-forge
channel-priority: true
# This needs to be set for caching to work properly!
use-only-tar-bz2: true
- name: Stop if dependencies changed
shell: bash -l {0}
run: |
./dependencies.py && git diff --exit-code requirements* pyproject.toml
- name: Cache conda
uses: actions/cache@v2
id: cache
env:
# Increase this value to reset cache if .ci/conda-env.yml have not changed
CACHE_NUMBER: 1
with:
path: ${{ matrix.prefix }}
key:
${{ runner.os }}-${{ matrix.python }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('.ci/conda-env.yml') }}
- name: Update environment
run: mamba env update -n pyMOR-ci -f .ci/conda-env.yml
if: steps.cache.outputs.cache-hit != 'true'
- name: Export Conda Env
shell: bash -l {0}
run: |
mamba env export > conda-env__${{ runner.os }}-${{ matrix.python }}.yml
- name: Install pyMOR
shell: bash -l {0}
# this seems to be the most portable way of making sure everything is importable
run: conda develop .
# alas it still does not work everywhere, so manual PYTHONPATH it is
- name: Platform env
shell: bash -l {0}
run: |
if [[ ${{ runner.os }} == Linux ]]; then
echo "PYTEST_PREFIX=xvfb-run -a" >> $GITHUB_ENV
fi
# windows currently segfaults in first QT+MPL plot otherwise
if [[ ${{ runner.os }} == Windows ]]; then
echo "QT_API=pyqt5" >> $GITHUB_ENV
echo "QT_DEBUG_PLUGINS=1" >> $GITHUB_ENV
fi
echo "PYMOR_VERSION=$(python -c 'import pymor;print(pymor.__version__)')" >> $GITHUB_ENV
- name: Sanity check
shell: bash -l {0}
run: |
${PYTEST_PREFIX} python -c "from matplotlib.pyplot import *"
${PYTEST_PREFIX} python -c "from qtpy.QtGui import *"
${PYTEST_PREFIX} python -c "import OpenGL"
${PYTEST_PREFIX} python -c "import OpenGL.GL as gl"
- name: Run pytest
shell: bash -l {0}
env:
PYTHONPATH: ./src
PYMOR_HYPOTHESIS_PROFILE: "ci"
# we may be able to limit this to macos
PYMOR_ALLOW_DEADLINE_EXCESS: 1
RESULTS_FILE: result_${{ runner.os }}-${{ matrix.python }}.xml
COMMON_PYTEST_OPTS: "--cov-report=xml --cov --cov-config=setup.cfg --cov-context=test --junitxml=${RESULTS_FILE}"
run: |
${PYTEST_PREFIX} pytest ${COMMON_PYTEST_OPTS}
- uses: codecov/codecov-action@v2
name: Upload coverage
with:
flags: github_actions,${{ matrix.os }}
fail_ci_if_error: true
verbose: true
- name: Upload Unit Test Results
if: always()
uses: actions/upload-artifact@v2
with: