Skip to content

Commit

Permalink
adding typing hinting and mypy check
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelcolvin committed Sep 17, 2016
1 parent 372ec27 commit c246896
Show file tree
Hide file tree
Showing 7 changed files with 31 additions and 26 deletions.
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ isort:
lint:
python setup.py check -rms
flake8 arq/ tests/
mypy --fast-parser --silent-imports arq/

.PHONY: test
test:
Expand Down
10 changes: 5 additions & 5 deletions arq/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# flake8: noqa
from .jobs import *
from .main import *
from .utils import *
from .version import *
from .worker import *
from .jobs import * # type: ignore
from .main import * # type: ignore
from .utils import * # type: ignore
from .version import * # type: ignore
from .worker import * # type: ignore
6 changes: 3 additions & 3 deletions arq/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,12 @@ class Job:
__slots__ = ('queue', 'queued_at', 'class_name', 'func_name', 'args', 'kwargs')

#: custom encoder for msgpack, see :class:`arq.jobs.DatetimeJob` for an example of usage
msgpack_encoder = None
msgpack_encoder = None # type: function

#: custom object hook for msgpack, see :class:`arq.jobs.DatetimeJob` for an example of usage
msgpack_object_hook = None
msgpack_object_hook = None # type: function

def __init__(self, queue: str, data: bytes):
def __init__(self, queue: str, data: bytes) -> None:
"""
Create a job instance be decoding a job definition eg. from redis.
Expand Down
5 changes: 3 additions & 2 deletions arq/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class Actor(RedisMixin, metaclass=ActorMeta):

#: if not None this name is used instead of the class name when encoding and referencing jobs,
#: if None the class's name is used
name = None
name = None # type: str

#: job class to use when encoding and decoding jobs from this actor
job_class = Job
Expand Down Expand Up @@ -92,7 +92,8 @@ async def enqueue_job(self, func_name: str, *args, queue: str=None, **kwargs):
"""
queue = queue or self.DEFAULT_QUEUE
try:
data = self.job_class.encode(class_name=self.name, func_name=func_name, args=args, kwargs=kwargs)
data = self.job_class.encode(class_name=self.name, func_name=func_name, # type: ignore
args=args, kwargs=kwargs) # type: ignore
except TypeError as e:
raise JobSerialisationError(str(e)) from e
main_logger.debug('%s.%s ▶ %s', self.name, func_name, queue)
Expand Down
17 changes: 9 additions & 8 deletions arq/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import os
from collections import OrderedDict
from datetime import datetime, timedelta, timezone
from typing import Tuple

import aioredis
from aioredis.pool import RedisPool
Expand All @@ -20,7 +21,7 @@


class SettingsMeta(type):
__dict__ = None
__dict__ = None # type: OrderedDict[str, object]

@classmethod
def __prepare__(mcs, *args, **kwargs):
Expand Down Expand Up @@ -49,7 +50,7 @@ class ConnectionSettings(metaclass=SettingsMeta):
R_HOST = 'localhost'
R_PORT = 6379
R_DATABASE = 0
R_PASSWORD = None
R_PASSWORD = None # type: str

def __init__(self, **custom_settings):
"""
Expand All @@ -76,7 +77,7 @@ class RedisMixin:
def __init__(self, *,
loop: asyncio.AbstractEventLoop=None,
settings: ConnectionSettings=None,
existing_pool: RedisPool=None):
existing_pool: RedisPool=None) -> None:
"""
:param loop: asyncio loop to use for the redis pool
:param settings: connection settings to use for the pool
Expand Down Expand Up @@ -127,7 +128,7 @@ def create_tz(utcoffset=0) -> timezone:
:param utcoffset: utc offset in seconds, if 0 timezone.utc is returned.
"""
if utcoffset == 0:
return timezone.utc
return timezone.utc # type: ignore
else:
return timezone(timedelta(seconds=utcoffset))

Expand All @@ -136,20 +137,20 @@ def create_tz(utcoffset=0) -> timezone:
EPOCH_TZ = EPOCH.replace(tzinfo=create_tz())


def timestamp():
def timestamp() -> float:
"""
:return: now in unix time, eg. seconds since 1970
"""
return (datetime.utcnow() - EPOCH).total_seconds()


def to_unix_ms(dt: datetime) -> int:
def to_unix_ms(dt: datetime) -> Tuple[int, int]:
"""
convert a datetime to number of milliseconds since 1970
"""
utcoffset = dt.utcoffset()
if utcoffset is not None:
utcoffset = utcoffset.total_seconds()
utcoffset = utcoffset.total_seconds() # type: ignore # looks like an error TODO
unix = (dt - EPOCH_TZ).total_seconds() + utcoffset
return int(unix * 1000), int(utcoffset)
else:
Expand All @@ -169,7 +170,7 @@ def from_unix_ms(ms: int, utcoffset: int=None) -> datetime:
return dt


def gen_random(length: int=20) -> str:
def gen_random(length: int=20) -> bytes:
"""
Create a random string.
Expand Down
16 changes: 8 additions & 8 deletions arq/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import time
from importlib import import_module, reload
from multiprocessing import Process
from signal import Signals
from signal import Signals # type: ignore

from .logs import default_log_config
from .utils import RedisMixin, ellipsis, gen_random, timestamp
Expand Down Expand Up @@ -57,7 +57,7 @@ def __init__(self, *,
queues: list=None,
timeout_seconds: int=None,
existing_shadows=None,
**kwargs):
**kwargs) -> None:
"""
:param burst: if true the worker will close as soon as no new jobs are found in the queue lists
:param shadows: list of :class:`arq.main.Actor` classes for the worker to run,
Expand All @@ -73,18 +73,18 @@ def __init__(self, *,
self.queues = queues
self.timeout_seconds = timeout_seconds or self.timeout_seconds
self.existing_shadows = existing_shadows
self._pending_tasks = set()
self._pending_tasks = set() # type: Set[asyncio.futures.Future]

self.jobs_complete, self.jobs_failed, self.jobs_timed_out = 0, 0, 0
self._task_exception = None
self._shadow_lookup = {}
self.start = None
self._task_exception = None # type: Exception
self._shadow_lookup = {} # type: Dict[str, object] # TODO
self.start = None # type: float
self.running = True
self._closed = False
self.job_class = None
self.job_class = None # type: type # TODO
signal.signal(signal.SIGINT, self.handle_sig)
signal.signal(signal.SIGTERM, self.handle_sig)
super().__init__(**kwargs)
super().__init__(**kwargs) # type: ignore # TODO
self._closing_lock = asyncio.Lock(loop=self.loop)

async def shadow_factory(self) -> list:
Expand Down
2 changes: 2 additions & 0 deletions tests/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@ docutils
flake8
isort
ghp-import
mypy-lang
pep8
pytest
pytest-cov
pytest-isort
pytest-sugar
pytz
sphinx
typed-ast

0 comments on commit c246896

Please sign in to comment.