Skip to content

Commit

Permalink
drop remaining six imports
Browse files Browse the repository at this point in the history
  • Loading branch information
branchvincent committed Jan 18, 2024
1 parent b94bb26 commit 4198d39
Show file tree
Hide file tree
Showing 16 changed files with 14 additions and 29 deletions.
1 change: 0 additions & 1 deletion requirements-base.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
requests>=1.0
retrying>=1.3.3
six>=1.10.0
5 changes: 2 additions & 3 deletions scrapinghub/client/collections.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from six import string_types
from six.moves import collections_abc
from collections.abc import Iterable

from ..hubstorage.collectionsrt import Collection as _Collection

Expand Down Expand Up @@ -183,7 +182,7 @@ def delete(self, keys):
The method returns ``None`` (original method returns an empty generator).
"""
if (not isinstance(keys, str) and
not isinstance(keys, collections_abc.Iterable)):
not isinstance(keys, Iterable)):
raise ValueError("You should provide string key or iterable "
"object providing string keys")
self._origin.delete(keys)
Expand Down
2 changes: 0 additions & 2 deletions scrapinghub/client/frontiers.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
from functools import partial
from collections import defaultdict

from six import string_types

from ..hubstorage.frontier import Frontier as _Frontier
from ..hubstorage.utils import urlpathjoin

Expand Down
3 changes: 1 addition & 2 deletions scrapinghub/client/proxy.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import six
import json

from ..hubstorage import ValueTooLarge as _ValueTooLarge
Expand Down Expand Up @@ -165,7 +164,7 @@ def iter(self):
:return: an iterator over key/value pairs.
:rtype: :class:`collections.abc.Iterable`
"""
return next(self._origin.apiget()).items()
return iter(next(self._origin.apiget()).items())


def _format_iter_filters(params):
Expand Down
2 changes: 0 additions & 2 deletions scrapinghub/client/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import warnings
from codecs import decode

import six


class LogLevel:
DEBUG = logging.DEBUG
Expand Down
1 change: 0 additions & 1 deletion scrapinghub/hubstorage/batchuploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import random
import logging
import warnings
import six
from queue import Queue
from io import BytesIO
from gzip import GzipFile
Expand Down
5 changes: 2 additions & 3 deletions scrapinghub/hubstorage/resourcetype.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
import socket
import time

import six
import requests.exceptions as rexc
from six.moves import collections_abc
from collections.abc import MutableMapping

from .utils import urlpathjoin, xauth
from .serialization import jlencode, jldecode, mpdecode
Expand Down Expand Up @@ -226,7 +225,7 @@ def stats(self):
return next(self.apiget('stats', chunk_size=STATS_CHUNK_SIZE))


class MappingResourceType(ResourceType, collections_abc.MutableMapping):
class MappingResourceType(ResourceType, MutableMapping):

_cached = None
ignore_fields = ()
Expand Down
1 change: 0 additions & 1 deletion scrapinghub/hubstorage/serialization.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import six
from json import dumps, loads
from datetime import datetime

Expand Down
3 changes: 1 addition & 2 deletions scrapinghub/hubstorage/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import six
import time
from queue import Empty

Expand Down Expand Up @@ -80,7 +79,7 @@ class iterqueue:
it exposes an attribute "count" with the number of messages read
>>> from six.moves.queue import Queue
>>> from queue import Queue
>>> q = Queue()
>>> for x in range(10):
... q.put(x)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
platforms=['Any'],
packages=['scrapinghub', 'scrapinghub.client', 'scrapinghub.hubstorage'],
package_data={'scrapinghub': ['VERSION']},
install_requires=['requests>=1.0', 'retrying>=1.3.3', 'six>=1.10.0'],
install_requires=['requests>=1.0', 'retrying>=1.3.3'],
extras_require={'msgpack': mpack_required},
python_requires='>=3.8',
classifiers=[
Expand Down
7 changes: 3 additions & 4 deletions tests/client/test_frontiers.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import time
from types import GeneratorType

from six import string_types
from six.moves import collections_abc
from collections.abc import Iterable

from scrapinghub.client.frontiers import Frontiers, Frontier, FrontierSlot
from ..conftest import TEST_FRONTIER_SLOT
Expand Down Expand Up @@ -36,7 +35,7 @@ def test_frontiers(project, frontier, frontier_name):

# test for iter() method
frontiers_names = frontiers.iter()
assert isinstance(frontiers_names, collections_abc.Iterable)
assert isinstance(frontiers_names, Iterable)
assert frontier_name in list(frontiers_names)

# test for list() method
Expand All @@ -58,7 +57,7 @@ def test_frontier(project, frontier):
_add_test_requests_to_frontier(frontier)

slots = frontier.iter()
assert isinstance(slots, collections_abc.Iterable)
assert isinstance(slots, Iterable)
assert TEST_FRONTIER_SLOT in list(slots)

slots = frontier.list()
Expand Down
4 changes: 2 additions & 2 deletions tests/client/test_job.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pytest
from six.moves import collections_abc
from collections.abc import Iterator

from scrapinghub.client.items import Items
from scrapinghub.client.jobs import Job
Expand Down Expand Up @@ -223,7 +223,7 @@ def test_metadata_delete(spider):
def test_metadata_iter_list(spider):
job = spider.jobs.run(meta={'meta1': 'data1', 'meta2': 'data2'})
meta_iter = job.metadata.iter()
assert isinstance(meta_iter, collections_abc.Iterator)
assert isinstance(meta_iter, Iterator)
meta_list = job.metadata.list()
assert ('meta1', 'data1') in meta_list
assert ('meta2', 'data2') in meta_list
Expand Down
4 changes: 2 additions & 2 deletions tests/client/test_projects.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import types
from collections import defaultdict
from collections.abc import Iterator

import pytest
import responses
from requests.compat import urljoin
from six.moves import collections_abc

from scrapinghub import ScrapinghubClient
from scrapinghub.client.activity import Activity
Expand Down Expand Up @@ -288,7 +288,7 @@ def test_settings_delete(project):
def test_settings_iter_list(project):
project.settings.set('job_runtime_limit', 24)
settings_iter = project.settings.iter()
assert isinstance(settings_iter, collections_abc.Iterator)
assert isinstance(settings_iter, Iterator)
settings_list = project.settings.list()
assert ('job_runtime_limit', 24) in settings_list
assert settings_list == list(settings_iter)
1 change: 0 additions & 1 deletion tests/client/test_spiders.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from collections import defaultdict

import pytest
from six import string_types

from scrapinghub.client.exceptions import DuplicateJobError
from scrapinghub.client.exceptions import BadRequest
Expand Down
1 change: 0 additions & 1 deletion tests/hubstorage/test_jobq.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Test JobQ
"""
import os
import six
import pytest

from scrapinghub.hubstorage.jobq import DuplicateJobError
Expand Down
1 change: 0 additions & 1 deletion tests/hubstorage/test_project.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""
Test Project
"""
import six
import json
import pytest
from requests.exceptions import HTTPError
Expand Down

0 comments on commit 4198d39

Please sign in to comment.