Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Python code quality scanning #727

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions .github/workflows/code-quality.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# This workflow performs code quality checks like:
# - PEP8: the workflow fails if code is not PEP8 compliant
# - flake8: the problems identified by flake 8 are listed but the workflow
# presently doesn't fail if flake reports errors.

name: Code Quality

on: [push, pull_request]

env:
max_line_length: 150

jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"

- name: install pip tools
run: |
python -m pip install --upgrade pip
pip install --upgrade pip-tools

- name: install code quality tools
run: pip install --upgrade autopep8 flake8

- name: Run (partial) flake8
if: ${{ ! cancelled() }}
run: flake8 --select F401,F522,F524,F541 --show-source src/

- name: check PEP8 compliance
if: ${{ ! cancelled() }}
id: autopep8
run: |
autopep8 --diff --exit-code --recursive --max-line-length ${{ env.max_line_length }} --ignore E402 .
2 changes: 1 addition & 1 deletion requirements-server.txt
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ numpy==1.23.4
# pyarrow
oauthlib==3.2.2
# via requests-oauthlib
ods-tools==2.3.2
ods-tools==3.0.1
# via -r requirements-server.in
packaging==21.3
# via drf-yasg
Expand Down
2 changes: 1 addition & 1 deletion requirements-worker.txt
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ oasislmf[extra]==1.27.0
# via -r requirements-worker.in
oauthlib==3.2.2
# via requests-oauthlib
ods-tools==3.0.0
ods-tools==3.0.1
# via oasislmf
packaging==21.3
# via
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ oasislmf[extra]==1.27.0
# via -r ./requirements-worker.in
oauthlib==3.2.2
# via requests-oauthlib
ods-tools==3.0.0
ods-tools==3.0.1
# via
# -r ./requirements-server.in
# oasislmf
Expand Down
100 changes: 46 additions & 54 deletions scripts/update-changelog.py

Large diffs are not rendered by default.

5 changes: 1 addition & 4 deletions scripts/update-requirements.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@





server = json.load(open('server-results.sarif'))
[v for v in server['runs'][0]['results'] if 'Fixed Version' in v['message']['text'] ]
[v for v in server['runs'][0]['results'] if 'Fixed Version' in v['message']['text']]
4 changes: 3 additions & 1 deletion src/common/shared.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging


def set_aws_log_level(log_level):
# Set log level for s3boto3
try:
Expand All @@ -13,9 +14,10 @@ def set_aws_log_level(log_level):
logging.getLogger('s3transfer').setLevel(LOG_LEVEL)
logging.getLogger('urllib3').setLevel(LOG_LEVEL)


def set_azure_log_level(log_level):
try:
LOG_LEVEL = getattr(logging, log_level.upper())
except AttributeError:
LOG_LEVEL = logging.WARNING
logging.getLogger('azure').setLevel(LOG_LEVEL)
logging.getLogger('azure').setLevel(LOG_LEVEL)
5 changes: 2 additions & 3 deletions src/conf/iniconf.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,14 +76,13 @@ def get_timedelta(self, section, option, **kwargs):
kwargs.setdefault('vars', self._get_section_env_vars(section))
kwargs_string = super(Settings, self).get(section, option, **kwargs)
try:
kwargs = {k.split('=')[0].strip():int(k.split('=')[1])
kwargs = {k.split('=')[0].strip(): int(k.split('=')[1])
for k in kwargs_string.split(',')}
except (TypeError, IndexError):
kwargs = {k.split('=')[0].strip():int(k.split('=')[1])
kwargs = {k.split('=')[0].strip(): int(k.split('=')[1])
for k in kwargs['fallback'].split(',')}
return timedelta(**kwargs)


def getboolean(self, section, option, **kwargs):
kwargs.setdefault('vars', self._get_section_env_vars(section))
return super(Settings, self).getboolean(section, option, **kwargs)
Expand Down
43 changes: 20 additions & 23 deletions src/model_execution_worker/backends/azure_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,44 +3,41 @@
import tempfile

from azure.core.exceptions import ResourceNotFoundError
from azure.storage.blob import (
BlobClient, BlobSasPermissions, BlobServiceClient, ContentSettings,
generate_blob_sas,
)
from azure.storage.blob import BlobServiceClient

from ...common.shared import set_azure_log_level
from ..storage_manager import BaseStorageConnector


class AzureObjectStore(BaseStorageConnector):
# https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python
# https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python

def __init__(self, settings):
self._service_client = None
self._client = None

# Required
self.account_name = settings.get('worker', 'AZURE_ACCOUNT_NAME')
self.account_key = settings.get('worker', 'AZURE_ACCOUNT_KEY')
self.azure_container = settings.get('worker', 'AZURE_CONTAINER')
self.account_name = settings.get('worker', 'AZURE_ACCOUNT_NAME')
self.account_key = settings.get('worker', 'AZURE_ACCOUNT_KEY')
self.azure_container = settings.get('worker', 'AZURE_CONTAINER')

# Optional
self.location = settings.get('worker', 'AZURE_LOCATION', fallback='')
self.connection_string = settings.get('worker', 'AZURE_CONNECTION_STRING', fallback=None)
self.shared_container = settings.get('worker', 'AZURE_SHARED_CONTAINER', fallback=True)
self.azure_ssl = settings.get('worker', 'AZURE_SSL', fallback=True)
self.upload_max_conn = settings.get('worker', 'AZURE_UPLOAD_MAX_CONN', fallback=2)
self.timeout = settings.get('worker', 'AZURE_CONNECTION_TIMEOUT_SECS', fallback=20)
self.max_memory_size = settings.get('worker', 'AZURE_BLOB_MAX_MEMORY_SIZE', fallback=2*1024*1024)
self.expiration_secs = settings.get('worker', 'AZURE_URL_EXPIRATION_SECS', fallback=None)
self.overwrite_files = settings.get('worker', 'AZURE_OVERWRITE_FILES', fallback=True)
self.location = settings.get('worker', 'AZURE_LOCATION', fallback='')
self.connection_string = settings.get('worker', 'AZURE_CONNECTION_STRING', fallback=None)
self.shared_container = settings.get('worker', 'AZURE_SHARED_CONTAINER', fallback=True)
self.azure_ssl = settings.get('worker', 'AZURE_SSL', fallback=True)
self.upload_max_conn = settings.get('worker', 'AZURE_UPLOAD_MAX_CONN', fallback=2)
self.timeout = settings.get('worker', 'AZURE_CONNECTION_TIMEOUT_SECS', fallback=20)
self.max_memory_size = settings.get('worker', 'AZURE_BLOB_MAX_MEMORY_SIZE', fallback=2 * 1024 * 1024)
self.expiration_secs = settings.get('worker', 'AZURE_URL_EXPIRATION_SECS', fallback=None)
self.overwrite_files = settings.get('worker', 'AZURE_OVERWRITE_FILES', fallback=True)
self.default_content_type = settings.get('worker', 'AZURE_DEFAULT_CONTENT', fallback='application/octet-stream')
self.cache_control = settings.get('worker', 'AZURE_CACHE_CONTROL', fallback=None)
self.sas_token = settings.get('worker', 'AZURE_SAS_TOKEN', fallback=None)
self.custom_domain = settings.get('worker', 'AZURE_CUSTOM_DOMAIN', fallback=None)
self.token_credential = settings.get('worker', 'AZURE_TOKEN_CREDENTIAL', fallback=None)
self.azure_log_level = settings.get('worker', 'AWS_LOG_LEVEL', fallback=logging.ERROR)
self.azure_protocol = 'https' if self.azure_ssl else 'http'
self.cache_control = settings.get('worker', 'AZURE_CACHE_CONTROL', fallback=None)
self.sas_token = settings.get('worker', 'AZURE_SAS_TOKEN', fallback=None)
self.custom_domain = settings.get('worker', 'AZURE_CUSTOM_DOMAIN', fallback=None)
self.token_credential = settings.get('worker', 'AZURE_TOKEN_CREDENTIAL', fallback=None)
self.azure_log_level = settings.get('worker', 'AWS_LOG_LEVEL', fallback=logging.ERROR)
self.azure_protocol = 'https' if self.azure_ssl else 'http'
set_azure_log_level(self.azure_log_level)

def _get_service_client(self):
Expand Down
7 changes: 4 additions & 3 deletions src/model_execution_worker/storage_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@
LOG_FILE_SUFFIX = 'txt'
ARCHIVE_FILE_SUFFIX = 'tar.gz'

#from .backends.aws_storage import AwsObjectStore
#from .backends.azure_storage import AzureObjectStore
# from .backends.aws_storage import AwsObjectStore
# from .backends.azure_storage import AzureObjectStore
#
#
#
#
#def StorageSelector(settings_conf):
# def StorageSelector(settings_conf):
# """ Returns a `StorageConnector` class based on conf.ini
#
# Call this method from model_execution_worker.task
Expand Down Expand Up @@ -53,6 +53,7 @@ class BaseStorageConnector(object):
Implements storage for a local fileshare between
`server` and `worker` containers
"""

def __init__(self, setting, logger=None):
self.media_root = setting.get('worker', 'MEDIA_ROOT')
self.storage_connector = 'FS-SHARE'
Expand Down
31 changes: 15 additions & 16 deletions src/model_execution_worker/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from ..conf.iniconf import settings
from ..common.data import STORED_FILENAME, ORIGINAL_FILENAME

#from .storage_manager import StorageSelector
# from .storage_manager import StorageSelector
from .storage_manager import BaseStorageConnector
from .backends.aws_storage import AwsObjectStore
from .backends.azure_storage import AzureObjectStore
Expand Down Expand Up @@ -359,8 +359,8 @@ def start_analysis(analysis_settings, input_location, complex_data_files=None):
tmpdir_persist = settings.getboolean('worker', 'KEEP_RUN_DIR', fallback=False)
tmpdir_base = settings.get('worker', 'BASE_RUN_DIR', fallback=None)


# Setup Job cancellation handler

def analysis_cancel_handler(signum, frame):
logging.info('TASK CANCELLATION')
if proc is not None:
Expand Down Expand Up @@ -413,7 +413,7 @@ def analysis_cancel_handler(signum, frame):
args_list = run_args + [''] if (len(run_args) % 2) else run_args
mdk_args = [x for t in list(zip(*[iter(args_list)] * 2)) if (None not in t) and ('--model-run-dir' not in t) for x in t]
logging.info('run_directory: {}'.format(oasis_files_dir))
#logging.info('args_list: {}'.format(str(run_args)))
# logging.info('args_list: {}'.format(str(run_args)))
logging.info("\nExecuting: generate-losses")
if debug_worker:
logging.info("\nCLI command: \noasislmf model generate-losses {}".format(
Expand Down Expand Up @@ -512,10 +512,10 @@ def generate_input_cancel_handler(signum, frame):
with tmp_dir as oasis_files_dir, tmp_input_dir as input_data_dir:

# Fetch input files
location_file = filestore.get(loc_file, oasis_files_dir, required=True)
accounts_file = filestore.get(acc_file, oasis_files_dir)
ri_info_file = filestore.get(info_file, oasis_files_dir)
ri_scope_file = filestore.get(scope_file, oasis_files_dir)
location_file = filestore.get(loc_file, oasis_files_dir, required=True)
accounts_file = filestore.get(acc_file, oasis_files_dir)
ri_info_file = filestore.get(info_file, oasis_files_dir)
ri_scope_file = filestore.get(scope_file, oasis_files_dir)
lookup_settings_file = filestore.get(settings_file, oasis_files_dir)

run_args = [
Expand Down Expand Up @@ -547,12 +547,11 @@ def generate_input_cancel_handler(signum, frame):
if debug_worker:
run_args += ['--verbose']


# Log MDK generate command
args_list = run_args + [''] if (len(run_args) % 2) else run_args
mdk_args = [x for t in list(zip(*[iter(args_list)] * 2)) if None not in t for x in t]
logging.info('run_directory: {}'.format(oasis_files_dir))
#logging.info('args_list: {}'.format(str(run_args)))
# logging.info('args_list: {}'.format(str(run_args)))
logging.info("\nExecuting: generate-oasis-files")
if debug_worker:
logging.info("\nCLI command: \noasislmf model generate-oasis-files {}".format(
Expand All @@ -564,7 +563,7 @@ def generate_input_cancel_handler(signum, frame):
proc = subprocess.Popen(
['oasislmf', 'model', 'generate-oasis-files'] + run_args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=worker_env,
preexec_fn=os.setsid, # run in a new session, assigning a new process group to it and its children.
preexec_fn=os.setsid, # run in a new session, assigning a new process group to it and its children.
)
stdout, stderr = proc.communicate()

Expand All @@ -581,13 +580,13 @@ def generate_input_cancel_handler(signum, frame):
summary_levels_fp = next(iter(glob.glob(os.path.join(oasis_files_dir, 'exposure_summary_levels.json'))), None)

# Store result files
traceback_file = filestore.create_traceback(stdout.decode(), stderr.decode(), oasis_files_dir)
traceback = filestore.put(traceback_file)
lookup_error = filestore.put(lookup_error_fp)
lookup_success = filestore.put(lookup_success_fp)
traceback_file = filestore.create_traceback(stdout.decode(), stderr.decode(), oasis_files_dir)
traceback = filestore.put(traceback_file)
lookup_error = filestore.put(lookup_error_fp)
lookup_success = filestore.put(lookup_success_fp)
lookup_validation = filestore.put(lookup_validation_fp)
summary_levels = filestore.put(summary_levels_fp)
output_tar_path = filestore.put(oasis_files_dir)
summary_levels = filestore.put(summary_levels_fp)
output_tar_path = filestore.put(oasis_files_dir)
return output_tar_path, lookup_error, lookup_success, lookup_validation, summary_levels, traceback, proc.returncode


Expand Down
Loading