From 1ca9522c011472d9527d7e19cc63d632a9e15407 Mon Sep 17 00:00:00 2001 From: James McKinney <26463+jpmckinney@users.noreply.github.com> Date: Thu, 18 Jul 2024 14:09:17 -0400 Subject: [PATCH] chore: Run pyupgrade --py38-plus **/*.py --- scrapyd/_deprecate.py | 2 +- scrapyd/basicauth.py | 4 ++-- scrapyd/config.py | 7 +++---- scrapyd/eggstorage.py | 2 +- scrapyd/environ.py | 2 +- scrapyd/jobstorage.py | 9 ++++----- scrapyd/poller.py | 2 +- scrapyd/scheduler.py | 2 +- scrapyd/spiderqueue.py | 2 +- scrapyd/sqlite.py | 11 +++++------ scrapyd/utils.py | 4 ++-- tests/test_environ.py | 6 +++--- tests/test_spiderqueue.py | 2 +- tests/test_sqlite.py | 2 +- tests/test_utils.py | 7 +++---- 15 files changed, 30 insertions(+), 34 deletions(-) diff --git a/scrapyd/_deprecate.py b/scrapyd/_deprecate.py index b8d64070..b6d30f89 100644 --- a/scrapyd/_deprecate.py +++ b/scrapyd/_deprecate.py @@ -21,7 +21,7 @@ def __init__(cls, name, bases, clsdict): ['is', 'are'][min(2, len(offending_classes))-1]), ScrapydDeprecationWarning, ) - super(WarningMeta, cls).__init__(name, bases, clsdict) + super().__init__(name, bases, clsdict) def deprecate_class(cls): diff --git a/scrapyd/basicauth.py b/scrapyd/basicauth.py index 7ae4428c..145a3399 100644 --- a/scrapyd/basicauth.py +++ b/scrapyd/basicauth.py @@ -7,7 +7,7 @@ @implementer(IRealm) -class PublicHTMLRealm(object): +class PublicHTMLRealm: def __init__(self, resource): self.resource = resource @@ -19,7 +19,7 @@ def requestAvatar(self, avatarId, mind, *interfaces): @implementer(ICredentialsChecker) -class StringCredentialsChecker(object): +class StringCredentialsChecker: credentialInterfaces = (credentials.IUsernamePassword,) def __init__(self, username, password): diff --git a/scrapyd/config.py b/scrapyd/config.py index 36ac4e51..8766f269 100644 --- a/scrapyd/config.py +++ b/scrapyd/config.py @@ -1,5 +1,4 @@ import glob -import io from configparser import ConfigParser, NoOptionError, NoSectionError from os.path import expanduser from pkgutil import get_data @@ -7,7 +6,7 @@ from scrapy.utils.conf import closest_scrapy_cfg -class Config(object): +class Config: """A ConfigParser wrapper to support defaults when calling instance methods, and also tied to a single section""" @@ -22,9 +21,9 @@ def __init__(self, values=None, extra_sources=()): sources.extend(extra_sources) for fname in sources: try: - with io.open(fname) as fp: + with open(fname) as fp: self.cp.read_file(fp) - except (IOError, OSError): + except OSError: pass else: self.cp = ConfigParser(values) diff --git a/scrapyd/eggstorage.py b/scrapyd/eggstorage.py index db1ddd51..d3216d77 100644 --- a/scrapyd/eggstorage.py +++ b/scrapyd/eggstorage.py @@ -11,7 +11,7 @@ @implementer(IEggStorage) -class FilesystemEggStorage(object): +class FilesystemEggStorage: def __init__(self, config): self.basedir = config.get('eggs_dir', 'eggs') diff --git a/scrapyd/environ.py b/scrapyd/environ.py index 97cdd540..436991ca 100644 --- a/scrapyd/environ.py +++ b/scrapyd/environ.py @@ -10,7 +10,7 @@ @implementer(IEnvironment) -class Environment(object): +class Environment: def __init__(self, config, initenv=os.environ): self.dbs_dir = config.get('dbs_dir', 'dbs') diff --git a/scrapyd/jobstorage.py b/scrapyd/jobstorage.py index 7c941779..05bae657 100644 --- a/scrapyd/jobstorage.py +++ b/scrapyd/jobstorage.py @@ -15,7 +15,7 @@ def job_items_url(job): return f"/items/{job.project}/{job.spider}/{job.job}.jl" -class Job(object): +class Job: def __init__(self, project, spider, job=None, start_time=None, end_time=None): self.project = project self.spider = spider @@ -25,7 +25,7 @@ def __init__(self, project, spider, job=None, start_time=None, end_time=None): @implementer(IJobStorage) -class MemoryJobStorage(object): +class MemoryJobStorage: def __init__(self, config): self.jobs = [] @@ -42,12 +42,11 @@ def __len__(self): return len(self.jobs) def __iter__(self): - for j in self.jobs: - yield j + yield from self.jobs @implementer(IJobStorage) -class SqliteJobStorage(object): +class SqliteJobStorage: def __init__(self, config): self.jstorage = SqliteFinishedJobs(sqlite_connection_string(config, 'jobs'), "finished_jobs") diff --git a/scrapyd/poller.py b/scrapyd/poller.py index b54041ec..d855acf2 100644 --- a/scrapyd/poller.py +++ b/scrapyd/poller.py @@ -6,7 +6,7 @@ @implementer(IPoller) -class QueuePoller(object): +class QueuePoller: def __init__(self, config): self.config = config diff --git a/scrapyd/scheduler.py b/scrapyd/scheduler.py index 699e9812..7e3c5378 100644 --- a/scrapyd/scheduler.py +++ b/scrapyd/scheduler.py @@ -5,7 +5,7 @@ @implementer(ISpiderScheduler) -class SpiderScheduler(object): +class SpiderScheduler: def __init__(self, config): self.config = config diff --git a/scrapyd/spiderqueue.py b/scrapyd/spiderqueue.py index 8525f19e..4818c7a8 100644 --- a/scrapyd/spiderqueue.py +++ b/scrapyd/spiderqueue.py @@ -6,7 +6,7 @@ @implementer(ISpiderQueue) -class SqliteSpiderQueue(object): +class SqliteSpiderQueue: def __init__(self, config, project, table='spider_queue'): self.q = JsonSqlitePriorityQueue(sqlite_connection_string(config, project), table) diff --git a/scrapyd/sqlite.py b/scrapyd/sqlite.py index dad58c8d..53085582 100644 --- a/scrapyd/sqlite.py +++ b/scrapyd/sqlite.py @@ -5,7 +5,7 @@ try: from collections.abc import MutableMapping except ImportError: - from collections import MutableMapping + from collections.abc import MutableMapping class JsonSqliteDict(MutableMapping): @@ -44,8 +44,7 @@ def __len__(self): return self.conn.execute(sql).fetchone()[0] def __iter__(self): - for key in self.iterkeys(): - yield key + yield from self.iterkeys() def iterkeys(self): sql = "SELECT key FROM %s" % self.table @@ -75,7 +74,7 @@ def decode(self, obj): return json.loads(bytes(obj).decode('ascii')) -class JsonSqlitePriorityQueue(object): +class JsonSqlitePriorityQueue: """SQLite priority queue. It relies on SQLite concurrency support for providing atomic inter-process operations. """ @@ -141,7 +140,7 @@ def decode(self, text): return json.loads(bytes(text).decode('ascii')) -class SqliteFinishedJobs(object): +class SqliteFinishedJobs: """SQLite finished jobs. """ def __init__(self, database=None, table="finished_jobs"): @@ -167,7 +166,7 @@ def clear(self, finished_to_keep=None): return # nothing to delete w = "WHERE id <= " \ "(SELECT max(id) FROM (SELECT id FROM %s ORDER BY end_time LIMIT %d))" % (self.table, limit) - sql = "DELETE FROM %s %s" % (self.table, w) + sql = "DELETE FROM {} {}".format(self.table, w) self.conn.execute(sql) self.conn.commit() diff --git a/scrapyd/utils.py b/scrapyd/utils.py index de26b550..2301ca6a 100644 --- a/scrapyd/utils.py +++ b/scrapyd/utils.py @@ -118,10 +118,10 @@ def get_crawl_args(message): settings = msg.pop('settings', {}) for k, v in native_stringify_dict(msg, keys_only=False).items(): args += ['-a'] - args += ['%s=%s' % (k, v)] + args += ['{}={}'.format(k, v)] for k, v in native_stringify_dict(settings, keys_only=False).items(): args += ['-s'] - args += ['%s=%s' % (k, v)] + args += ['{}={}'.format(k, v)] return args diff --git a/tests/test_environ.py b/tests/test_environ.py index dd97aad1..5661b8a7 100644 --- a/tests/test_environ.py +++ b/tests/test_environ.py @@ -34,10 +34,10 @@ def test_get_environment_with_eggfile(self): self.assertEqual(env['SCRAPYD_SLOT'], '3') self.assertEqual(env['SCRAPYD_SPIDER'], 'myspider') self.assertEqual(env['SCRAPYD_JOB'], 'ID') - self.assert_(env['SCRAPYD_LOG_FILE'].endswith(os.path.join('mybot', 'myspider', 'ID.log'))) + self.assertTrue(env['SCRAPYD_LOG_FILE'].endswith(os.path.join('mybot', 'myspider', 'ID.log'))) if env.get('SCRAPYD_FEED_URI'): # Not compulsory - self.assert_(env['SCRAPYD_FEED_URI'].startswith('file://{}'.format(os.getcwd()))) - self.assert_(env['SCRAPYD_FEED_URI'].endswith(os.path.join('mybot', 'myspider', 'ID.jl'))) + self.assertTrue(env['SCRAPYD_FEED_URI'].startswith(f'file://{os.getcwd()}')) + self.assertTrue(env['SCRAPYD_FEED_URI'].endswith(os.path.join('mybot', 'myspider', 'ID.jl'))) self.assertNotIn('SCRAPY_SETTINGS_MODULE', env) def test_get_environment_with_no_items_dir(self): diff --git a/tests/test_spiderqueue.py b/tests/test_spiderqueue.py index 5f86e5a2..f7d9a480 100644 --- a/tests/test_spiderqueue.py +++ b/tests/test_spiderqueue.py @@ -18,7 +18,7 @@ def setUp(self): self.args = { 'arg1': 'val1', 'arg2': 2, - 'arg3': u'\N{SNOWMAN}', + 'arg3': '\N{SNOWMAN}', } self.msg = self.args.copy() self.msg['name'] = self.name diff --git a/tests/test_sqlite.py b/tests/test_sqlite.py index 232f4987..ec979c75 100644 --- a/tests/test_sqlite.py +++ b/tests/test_sqlite.py @@ -56,7 +56,7 @@ class JsonSqlitePriorityQueueTest(unittest.TestCase): supported_values = [ "native ascii str", - u"\xa3", + "\xa3", 123, 1.2, True, diff --git a/tests/test_utils.py b/tests/test_utils.py index 69767c6f..da502232 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from io import BytesIO from pkgutil import get_data @@ -27,14 +26,14 @@ def test_get_crawl_args(self): self.assertEqual(get_crawl_args(msg), ['lala']) - msg = {'_project': 'lolo', '_spider': 'lala', 'arg1': u'val1'} + msg = {'_project': 'lolo', '_spider': 'lala', 'arg1': 'val1'} cargs = get_crawl_args(msg) self.assertEqual(cargs, ['lala', '-a', 'arg1=val1']) self.assertTrue(all(isinstance(x, str) for x in cargs), cargs) def test_get_crawl_args_with_settings(self): - msg = {'_project': 'lolo', '_spider': 'lala', 'arg1': u'val1', 'settings': {'ONE': 'two'}} + msg = {'_project': 'lolo', '_spider': 'lala', 'arg1': 'val1', 'settings': {'ONE': 'two'}} cargs = get_crawl_args(msg) self.assertEqual(cargs, ['lala', '-a', 'arg1=val1', '-s', 'ONE=two']) @@ -110,7 +109,7 @@ def test_get_spider_list_unicode(self): self.add_test_version('mybotunicode.egg', 'mybotunicode', 'r1') spiders = get_spider_list('mybotunicode', pythonpath=get_pythonpath_scrapyd()) - self.assertEqual(sorted(spiders), [u'araña1', u'araña2']) + self.assertEqual(sorted(spiders), ['araña1', 'araña2']) def test_failed_spider_list(self): self.add_test_version('mybot3.egg', 'mybot3', 'r1')