Skip to content

Commit

Permalink
Update tox so all ci steps can use local source if package reference …
Browse files Browse the repository at this point in the history
…not yet released (Azure#23897)

* Update ci so all steps can use local source if package not released
* Add additional release check to ensure we can't release a package that doesn't have at least one matching package on pypi for each requirement. Pin release versioning. Document additional skip in eng_sys_checks. Add typing on a few functions that are used by this PR
* remove py2 specific build steps

Co-authored-by: scbedd <45376673+scbedd@users.noreply.github.com>
  • Loading branch information
jalauzon-msft and scbedd authored Apr 13, 2022
1 parent dcfad09 commit 8614158
Show file tree
Hide file tree
Showing 10 changed files with 137 additions and 39 deletions.
1 change: 1 addition & 0 deletions .vscode/cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@
"parameterizing",
"pytz",
"pywin",
"pyversion",
"RAGRS",
"rdbms",
"reauthenticated",
Expand Down
4 changes: 3 additions & 1 deletion doc/eng_sys_checks.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,8 @@ This is the most useful skip, but the following skip variables are also supporte
- Don't verify if a changeset includes breaking changes.
- `Skip.MyPy`
- Omit `mypy` checks in `analyze` job.
- `Skip.VerifyDependencies`
- Omit checking that a package's dependencies are on PyPI before releasing.

## Environment variables important to CI

Expand Down Expand Up @@ -343,7 +345,7 @@ extends:
To run autorest automation locally run the following command from the home of `azure-sdk-for-python`

```bash
azure-sdk-for-python> python scripts/devop_tasks/verify_autorest.py --service_directory <your_service_directory>
azure-sdk-for-python> python scripts/devops_tasks/verify_autorest.py --service_directory <your_service_directory>
```

## Nightly Live Checks
Expand Down
4 changes: 4 additions & 0 deletions eng/pipelines/templates/jobs/ci.tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,10 @@ jobs:
parameters:
AgentImage: $(OSVmImage)

- task: UsePythonVersion@0
inputs:
versionSpec: '3.8'

- template: /eng/common/pipelines/templates/steps/set-test-pipeline-version.yml
parameters:
PackageName: "azure-template"
Expand Down
16 changes: 13 additions & 3 deletions eng/pipelines/templates/stages/archetype-python-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,17 +74,27 @@ stages:
runOnce:
deploy:
steps:
- checkout: none
- checkout: self
- download: current
artifact: ${{parameters.ArtifactName}}
timeoutInMinutes: 5

- task: UsePythonVersion@0
inputs:
versionSpec: '3.9'

- script: |
set -e
pip install twine readme-renderer[md]
displayName: Install Twine
pip install -r eng/release_requirements.txt
pip install tools/azure-sdk-tools
displayName: Install Release Dependencies
- task: PythonScript@0
displayName: Verify Dependency Presence
condition: and(succeeded(), ne(variables['Skip.VerifyDependencies'], 'true'))
inputs:
scriptPath: 'scripts/devops_tasks/verify_dependencies_present.py'
arguments: '--package-name ${{ artifact.name }} --service ${{ parameters.ServiceDirectory }}'

- task: TwineAuthenticate@1
displayName: 'Authenticate to registry: pypi.org'
Expand Down
15 changes: 0 additions & 15 deletions eng/pipelines/templates/steps/build-artifacts.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,21 +35,6 @@ steps:
displayName: 'Tag scheduled builds'
condition: and(eq(variables['Build.SourceBranchName'], variables['DefaultBranch']), eq(variables['Build.Reason'],'Schedule'))
- task: UsePythonVersion@0
displayName: 'Use Python 2.7'
inputs:
versionSpec: '2.7'

- script: |
pip install -r eng/ci_tools.txt
displayName: 'Prep Py2 Environment'
- task: PythonScript@0
displayName: 'Generate Python2 Applicable Namespace Packages'
inputs:
scriptPath: 'scripts/devops_tasks/build_packages.py'
arguments: '-d "$(Build.ArtifactStagingDirectory)" "$(TargetingString)" --pkgfilter="nspkg" --service=${{parameters.ServiceDirectory}}'

- task: UsePythonVersion@0
displayName: 'Use Python $(PythonVersion)'
inputs:
Expand Down
3 changes: 3 additions & 0 deletions eng/release_requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
twine==3.1.1; python_version >= '3.6'
readme-renderer[md]==25.0
pkginfo==1.5.0.1
13 changes: 10 additions & 3 deletions eng/tox/create_package_and_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,8 @@ def build_and_discover_package(setuppy_path, dist_dir, target_setup, package_typ
"bdist_wheel",
"-d",
dist_dir,
]
],
cwd = os.path.dirname(setuppy_path)
)
else:
check_call(
Expand All @@ -99,7 +100,8 @@ def build_and_discover_package(setuppy_path, dist_dir, target_setup, package_typ
"zip",
"-d",
dist_dir,
]
],
cwd = os.path.dirname(setuppy_path)
)

prebuilt_packages = [
Expand Down Expand Up @@ -179,8 +181,13 @@ def build_and_discover_package(setuppy_path, dist_dir, target_setup, package_typ
built_pkg_path = ""
setup_py_path = os.path.join(args.target_setup, "setup.py")
additional_downloaded_reqs = []

if not os.path.exists(args.distribution_directory):
os.mkdir(args.distribution_directory)

tmp_dl_folder = os.path.join(args.distribution_directory, "dl")
os.mkdir(tmp_dl_folder)
if not os.path.exists(tmp_dl_folder):
os.mkdir(tmp_dl_folder)

# preview version is enabled when installing dev build so pip will install dev build version from devpos feed
if os.getenv("SetDevVersion", 'false') == 'true':
Expand Down
47 changes: 36 additions & 11 deletions eng/tox/tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,9 @@ commands =


[testenv:lint]
skipsdist = false
skip_install = false
usedevelop = true
skipsdist = true
skip_install = true
usedevelop = false
changedir = {toxinidir}
setenv =
{[testenv]setenv}
Expand All @@ -86,12 +86,17 @@ deps =
{[base]deps}
-e {toxinidir}/../../scripts/pylint_custom_plugin
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_package_and_install.py \
-d {distdir} \
-p {toxinidir} \
-w {envtmpdir} \
--package-type sdist
{envbindir}/python {toxinidir}/../../../eng/tox/run_pylint.py -t {toxinidir}


[testenv:mypy]
skipsdist = false
skip_install = false
skipsdist = true
skip_install = true
usedevelop = true
changedir = {toxinidir}
setenv =
Expand All @@ -104,6 +109,11 @@ deps =
types-requests==2.27.9
types-six==1.16.10
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_package_and_install.py \
-d {distdir} \
-p {toxinidir} \
-w {envtmpdir} \
--package-type sdist
{envbindir}/python {toxinidir}/../../../eng/tox/run_mypy.py -t {toxinidir}


Expand Down Expand Up @@ -169,8 +179,8 @@ commands =


[testenv:sphinx]
skipsdist = false
skip_install = false
skipsdist = true
skip_install = true
changedir = {toxinidir}
passenv = *
setenv =
Expand All @@ -184,6 +194,11 @@ deps =
mistune<2.0.0
m2r==0.2.1
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_package_and_install.py \
-d {distdir} \
-p {toxinidir} \
-w {envtmpdir} \
--package-type sdist
{envbindir}/python {toxinidir}/../../../eng/tox/prep_sphinx_env.py -d {distdir} -t {toxinidir}
{envbindir}/python {toxinidir}/../../../eng/tox/run_sphinx_apidoc.py \
-w {distdir} \
Expand Down Expand Up @@ -321,8 +336,8 @@ commands =


[testenv:bandit]
skipsdist = false
skip_install = false
skipsdist = true
skip_install = true
usedevelop = false
changedir = {envtmpdir}
setenv =
Expand All @@ -331,6 +346,11 @@ setenv =
deps =
{[base]deps}
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_package_and_install.py \
-d {envtmpdir} \
-p {toxinidir} \
-w {envtmpdir} \
--package-type sdist
{envbindir}/python -m pip freeze
{envbindir}/python {toxinidir}/../../../eng/tox/run_bandit.py -t {toxinidir}

Expand All @@ -352,8 +372,8 @@ commands =


[testenv:breaking]
skipsdist = false
skip_install = false
skipsdist = true
skip_install = true
usedevelop = true
changedir = {toxinidir}
setenv =
Expand All @@ -364,4 +384,9 @@ deps =
jsondiff==1.2.0
-e {toxinidir}/../../scripts/breaking_changes_checker
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_package_and_install.py \
-d {distdir} \
-p {toxinidir} \
-w {envtmpdir} \
--package-type sdist
{envbindir}/python {toxinidir}/../../../scripts/breaking_changes_checker/detect_breaking_changes.py -t {toxinidir}
40 changes: 34 additions & 6 deletions scripts/devops_tasks/common_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import re
import fnmatch
import platform
from typing import Tuple, Iterable

# Assumes the presence of setuptools
from pkg_resources import parse_version, parse_requirements, Requirement, WorkingSet, working_set
Expand Down Expand Up @@ -137,8 +138,21 @@ def str_to_bool(input_string):
return False


def parse_setup(setup_path):
setup_filename = os.path.join(setup_path, "setup.py")
def parse_setup(setup_path: str) -> Tuple[str, str, Iterable[str], str]:
"""
This function is used for getting metadata about a package from its setup.py.
Tuple index:
* 0 = name
* 1 = version
* 2 = array of dependencies
* 3 = python_requires value
"""

setup_filename = setup_path
if not setup_path.endswith("setup.py"):
setup_filename = os.path.join(setup_path, "setup.py")

mock_setup = textwrap.dedent(
"""\
def setup(*args, **kwargs):
Expand Down Expand Up @@ -342,16 +356,17 @@ def is_error_code_5_allowed(target_pkg, pkg_name):
return False


def parse_require(req):
def parse_require(req) -> Tuple[str, str]:
"""
Parses the incoming version specification and returns a tuple of the requirement name and specifier.
"azure-core<2.0.0,>=1.11.0" -> [azure-core, <2.0.0,>=1.11.0]
"""

req_object = Requirement.parse(req.split(";")[0])
pkg_name = req_object.key
spec = SpecifierSet(str(req_object).replace(pkg_name, ""))
return [pkg_name, spec]
return (pkg_name, spec)


def find_whl(package_name, version, whl_directory):
Expand Down Expand Up @@ -435,7 +450,14 @@ def extend_dev_requirements(dev_req_path, packages_to_include):
dev_req_file.writelines(requirements)


def is_required_version_on_pypi(package_name, spec):
def is_required_version_on_pypi(package_name: str, spec: str) -> bool:
"""
This function evaluates a package name and version specifier combination and returns the versions on pypi
that satisfy the provided version specifier.
Import dependency on azure-sdk-tools.
"""

from pypi_tools.pypi import PyPIClient

client = PyPIClient()
Expand All @@ -447,7 +469,13 @@ def is_required_version_on_pypi(package_name, spec):
return versions


def find_packages_missing_on_pypi(path):
def find_packages_missing_on_pypi(path: str) -> Iterable[str]:
"""
Given a setup path, evaluate all dependencies and return a list of packages whos specifier can NOT be matched against PyPI releases.
Import dependency on pkginfo.
"""

import pkginfo

requires = []
Expand Down
33 changes: 33 additions & 0 deletions scripts/devops_tasks/verify_dependencies_present.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import os
import argparse

root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", ".."))

from common_tasks import find_packages_missing_on_pypi

if __name__ == "__main__":

parser = argparse.ArgumentParser(
description="This script is used during a release stage to prevent releasing packages on PyPI with missing dependencies."
)

parser.add_argument(
"--package-name",
required=True,
help="name of package (accepts both formats: azure-service-package and azure_service_package)",
)
parser.add_argument(
"--service",
required=True,
help="name of the service for which to set the dev build id (e.g. keyvault)",
)

args = parser.parse_args()

package_name = args.package_name.replace("_", "-")
path_to_setup = os.path.join(root_dir, "sdk", args.service, package_name, "setup.py")

missing_packages = find_packages_missing_on_pypi(path_to_setup)

if missing_packages:
exit(1)

0 comments on commit 8614158

Please sign in to comment.