Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

drop six requirement #438

Merged
merged 4 commits into from
Jan 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# shub.image documentation build configuration file, created by
# sphinx-quickstart on Tue May 3 16:20:52 2016.
Expand Down Expand Up @@ -52,9 +51,9 @@
master_doc = 'index'

# General information about the project.
project = u'shub'
copyright = u'{}, Scrapinghub'.format(YEAR)
author = u'Scrapinghub'
project = 'shub'
copyright = f'{YEAR}, Scrapinghub'
author = 'Scrapinghub'

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
Expand Down Expand Up @@ -228,8 +227,8 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'shub.tex', u'shub Documentation',
u'Scrapinghub', 'manual'),
(master_doc, 'shub.tex', 'shub Documentation',
'Scrapinghub', 'manual'),
]

# The name of an image file (relative to this directory) to place at the top of
Expand Down Expand Up @@ -258,7 +257,7 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'shub', u'shub Documentation',
(master_doc, 'shub', 'shub Documentation',
[author], 1)
]

Expand All @@ -272,7 +271,7 @@
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'shub', u'shub Documentation',
(master_doc, 'shub', 'shub Documentation',
author, 'shub', 'One line description of project.',
'Miscellaneous'),
]
Expand Down
2 changes: 0 additions & 2 deletions freeze/hooks/hook-scrapinghub.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@

from __future__ import absolute_import
from PyInstaller.utils.hooks import collect_data_files

# Add the data files in the scrapinghub package (aka scrapinghub.VERSION).
Expand Down
2 changes: 0 additions & 2 deletions freeze/hooks/hook-shub.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@

from __future__ import absolute_import
from PyInstaller.utils.hooks import collect_submodules

# Add as hidden imports all submodules from shub. This is because shub
Expand Down
1 change: 0 additions & 1 deletion freeze/hooks/runtime-hooks.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from __future__ import absolute_import
import os
import sys

Expand Down
17 changes: 4 additions & 13 deletions freeze/tests/fakeserver.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import json
import multiprocessing
import six
from threading import Thread
from argparse import ArgumentParser
from six.moves.socketserver import TCPServer
from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler
from six.moves import urllib

from socketserver import TCPServer
from http.server import SimpleHTTPRequestHandler
import urllib.parse

class Handler(SimpleHTTPRequestHandler):

Expand All @@ -19,10 +13,7 @@ def _do_any(self):
query = urllib.parse.parse_qs(querystr)
content_len = int(self.headers.get('content-length', 0))
body = self.rfile.read(content_len)
if six.PY2:
headers = self.headers.getplist()
else:
headers = self.headers.get_params()
headers = self.headers.get_params()
print(self)

self.server.pipe.send({
Expand Down
1 change: 0 additions & 1 deletion freeze/tests/run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from __future__ import absolute_import
import os
import re
import shlex
Expand Down
2 changes: 0 additions & 2 deletions freeze/tests/testproject/testproject/items.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import scrapy


Expand Down
5 changes: 1 addition & 4 deletions freeze/tests/testproject/testproject/pipelines.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
# -*- coding: utf-8 -*-


class TestprojectPipeline(object):
class TestprojectPipeline:
def process_item(self, item, spider):
return item
1 change: 0 additions & 1 deletion freeze/tests/testproject/testproject/settings.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
BOT_NAME = 'testproject'
SPIDER_MODULES = ['testproject.spiders']
NEWSPIDER_MODULE = 'testproject.spiders'
2 changes: 0 additions & 2 deletions freeze/tests/testproject/testproject/spiders/example.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import scrapy


Expand Down
14 changes: 0 additions & 14 deletions requirements.in

This file was deleted.

43 changes: 0 additions & 43 deletions requirements.txt

This file was deleted.

6 changes: 1 addition & 5 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
from __future__ import absolute_import
import io
import os
from setuptools import setup, find_packages


about = {}
here = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(here, 'shub', '__init__.py'),
mode='r', encoding='utf-8') as f:
with open(os.path.join(here, 'shub', '__init__.py'), encoding='utf-8') as f:
exec(f.read(), about)


Expand Down Expand Up @@ -39,7 +36,6 @@
'retrying',
'requests',
'scrapinghub>=2.3.1',
'six>=1.7.0',
'tqdm==4.55.1',
'toml',
],
Expand Down
2 changes: 0 additions & 2 deletions shub/__main__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

import os
import sys

Expand Down
6 changes: 2 additions & 4 deletions shub/bootstrap.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

import os
import shutil
import tempfile
Expand All @@ -9,7 +7,7 @@
import requests
import yaml
from click.formatting import HelpFormatter
from six import BytesIO
from io import BytesIO

from shub.exceptions import (
BadParameterException, NotFoundException, RemoteErrorException)
Expand Down Expand Up @@ -73,7 +71,7 @@ def cli(project, target_dir):
"to get a list of all available projects." % project)
click.echo("Downloading custom image examples")
repo_zip = get_repo_zip(EXAMPLE_REPO)
click.echo("Cloning project '%s' into %s" % (project, target_dir))
click.echo(f"Cloning project '{project}' into {target_dir}")
unzip_project(repo_zip, project=projects[project], target_dir=target_dir)


Expand Down
4 changes: 1 addition & 3 deletions shub/cancel.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import absolute_import

import click

from scrapinghub import ScrapinghubAPIError
Expand Down Expand Up @@ -80,7 +78,7 @@ def cli(target_or_key, keys, force):


def validate_job_key(project_id, short_key):
job_key = "%s/%s" % (project_id, short_key)
job_key = f"{project_id}/{short_key}"

if len(short_key.split("/")) != 2:
raise BadParameterException(
Expand Down
16 changes: 5 additions & 11 deletions shub/compat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import six


def to_unicode(text, encoding=None, errors='strict'):
"""Return the unicode representation of `text`.

Expand All @@ -10,9 +7,9 @@
Otherwise, raise an error.

"""
if isinstance(text, six.text_type):
if isinstance(text, str):
return text
if not isinstance(text, (six.binary_type, bytearray)):
if not isinstance(text, (bytes, bytearray)):
raise TypeError('to_unicode must receive a bytes, str or unicode '
'object, got %s' % type(text).__name__)
if encoding is None:
Expand All @@ -27,11 +24,11 @@
If `text` is a ``unicode`` object, encode it using `encoding`.

Otherwise, raise an error."""
if isinstance(text, six.binary_type):
if isinstance(text, bytes):

Check warning on line 27 in shub/compat.py

View check run for this annotation

Codecov / codecov/patch

shub/compat.py#L27

Added line #L27 was not covered by tests
return text
if isinstance(text, bytearray):
return bytes(text)
if not isinstance(text, six.text_type):
if not isinstance(text, str):

Check warning on line 31 in shub/compat.py

View check run for this annotation

Codecov / codecov/patch

shub/compat.py#L31

Added line #L31 was not covered by tests
raise TypeError('to_bytes must receive a unicode, str or bytes '
'object, got %s' % type(text).__name__)
if encoding is None:
Expand All @@ -45,7 +42,4 @@
``str`` representation means ``bytes`` in PY2 and ``unicode`` in PY3.

"""
if six.PY2:
return to_bytes(text, encoding, errors)
else:
return to_unicode(text, encoding, errors)
return to_unicode(text, encoding, errors)
21 changes: 10 additions & 11 deletions shub/config.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
from __future__ import absolute_import
import netrc
import os
import warnings
from collections import namedtuple
from urllib.parse import urlparse, urlunparse

import click
import six
import yaml

from shub import DOCS_LINK, CONFIG_DOCS_LINK
Expand All @@ -26,7 +25,7 @@
NETRC_PATH = os.path.expanduser('~/_netrc' if os.name == 'nt' else '~/.netrc')


class ShubConfig(object):
class ShubConfig:

DEFAULT_ENDPOINT = 'https://app.zyte.com/api/'

Expand Down Expand Up @@ -54,9 +53,9 @@
def _check_endpoints(self):
"""Check the endpoints. Send warnings if necessary."""
for endpoint, url in self.endpoints.items():
parsed = six.moves.urllib.parse.urlparse(url)
parsed = urlparse(url)
if parsed.netloc == 'staging.scrapinghub.com':
self.endpoints[endpoint] = six.moves.urllib.parse.urlunparse(
self.endpoints[endpoint] = urlunparse(

Check warning on line 58 in shub/config.py

View check run for this annotation

Codecov / codecov/patch

shub/config.py#L58

Added line #L58 was not covered by tests
parsed._replace(netloc='app.zyte.com')
)
click.echo(
Expand Down Expand Up @@ -129,7 +128,7 @@
def load_file(self, filename):
"""Load Scrapinghub configuration from YAML file. """
try:
with open(filename, 'r') as f:
with open(filename) as f:
self.load(f)
except ConfigParseException:
raise ConfigParseException(
Expand Down Expand Up @@ -163,7 +162,7 @@
targets = get_scrapycfg_targets(sources)
self._load_scrapycfg_target('default', targets['default'])
del targets['default']
for tname, t in six.iteritems(targets):
for tname, t in targets.items():
self._load_scrapycfg_target(tname, t)
self._check_endpoints()

Expand Down Expand Up @@ -382,7 +381,7 @@
class APIkey(str):

def __new__(cls, *args, **kwargs):
cls._inst = super(APIkey, cls).__new__(cls, *args, **kwargs)
cls._inst = super().__new__(cls, *args, **kwargs)
return cls._inst

def __init__(self, value=None):
Expand All @@ -399,8 +398,8 @@
class Target(_Target):

def __new__(cls, project_id, endpoint, apikey, *args, **kwargs):
cls._inst = super(Target, cls).__new__(cls, project_id, endpoint,
APIkey(apikey), *args, **kwargs)
cls._inst = super().__new__(cls, project_id, endpoint,
APIkey(apikey), *args, **kwargs)
return cls._inst


Expand Down Expand Up @@ -438,7 +437,7 @@
try:
info = netrc.netrc(NETRC_PATH)
netrc_key, _, _ = info.authenticators("scrapinghub.com")
except (IOError, TypeError):
except (OSError, TypeError):
netrc_key = None
if netrc_key:
conf.apikeys['default'] = netrc_key
Expand Down
Loading
Loading