Some text. A link' - ". And more." - ) + '
Some text. A link'
+ ". And more."
),
"Some text. A link. And more.",
)
@@ -170,7 +167,6 @@ def do_download(self, status=200, content_type="image/jpeg"):
"Mocks requests and calls filedownloader.download()"
# Open the image we're going to pretend we're fetching from the URL:
with open("tests/core/fixtures/images/marmite.jpg", "rb") as img1:
-
responses.add(
responses.GET,
self.url,
@@ -227,7 +223,7 @@ def test_make_filename_from_url(self):
def test_make_filename_from_content_disposition(self):
"If URL has no filename, should use the Content-Disposition filename."
filename = filedownloader.make_filename(
- "https://www.flickr.com/photos/philgyford/26348530105/play/orig/2b5f3e0919/", # noqa: E501
+ "https://www.flickr.com/photos/philgyford/26348530105/play/orig/2b5f3e0919/",
{"Content-Disposition": "attachment; filename=26348530105.mov"},
)
self.assertEqual(filename, "26348530105.mov")
diff --git a/tests/flickr/test_fetch.py b/tests/flickr/test_fetch.py
index e280ec8..e1e2224 100644
--- a/tests/flickr/test_fetch.py
+++ b/tests/flickr/test_fetch.py
@@ -1,12 +1,10 @@
import json
from datetime import datetime, timezone
-from urllib.parse import quote_plus
+from urllib.parse import parse_qs, quote_plus
from zoneinfo import ZoneInfo
import responses
-import six
from django.test import TestCase
-from six.moves.urllib.parse import parse_qs
from ditto.flickr.fetch.savers import SaveUtilsMixin
@@ -35,7 +33,7 @@ def test_api_datetime_to_datetime_custom_tz(self):
def test_unixtime_to_datetime(self):
api_time = "1093459273"
time1 = SaveUtilsMixin()._unixtime_to_datetime(api_time)
- time2 = datetime.utcfromtimestamp(int(api_time)).replace(tzinfo=timezone.utc)
+ time2 = datetime.fromtimestamp(int(api_time), tz=timezone.utc)
self.assertEqual(time1, time2)
@@ -58,23 +56,22 @@ class FlickrFetchTestCase(TestCase):
}
def setUp(self):
- super(FlickrFetchTestCase, self).setUp()
+ super().setUp()
self.mock = responses.RequestsMock(assert_all_requests_are_fired=True)
self.mock.start()
def tearDown(self):
self.mock.stop()
self.mock.reset()
- super(FlickrFetchTestCase, self).tearDown()
+ super().tearDown()
def load_raw_fixture(self, method):
"""Makes the JSON response to a call to the API.
method -- Method name used in self.flickr_fixtures.
Returns the JSON text.
"""
- json_file = open("%s%s" % (self.fixture_path, self.flickr_fixtures[method]))
- json_data = json_file.read()
- json_file.close()
+ with open(f"{self.fixture_path}{self.flickr_fixtures[method]}") as f:
+ json_data = f.read()
return json_data
def load_fixture(self, method):
@@ -84,6 +81,7 @@ def load_fixture(self, method):
def expect(
self,
+ *,
params=None,
body="",
status=200,
@@ -92,7 +90,7 @@ def expect(
match_querystring=True,
):
"""Mocks an expected HTTP query with Responses.
- Mostly copied from https://github.com/sybrenstuvel/flickrapi/blob/master/tests/test_flickrapi.py # noqa: E501
+ Mostly copied from https://github.com/sybrenstuvel/flickrapi/blob/master/tests/test_flickrapi.py
"""
urlbase = "https://api.flickr.com/services/rest/"
@@ -108,11 +106,11 @@ def expect(
# The parameters should be on the URL.
qp = quote_plus
qs = "&".join(
- "%s=%s" % (qp(key), qp(six.text_type(value).encode("utf-8")))
+ "{}={}".format(qp(key), qp(str(value).encode("utf-8")))
for key, value in sorted(params.items())
)
if qs:
- url = "%s?%s" % (urlbase, qs)
+ url = f"{urlbase}?{qs}"
self.mock.add(
method=method,
@@ -197,6 +195,6 @@ def expect_response(self, method, body=None, params=None):
params.setdefault(k, v)
# Add the param specifying the API method:
- params.setdefault("method", "flickr.{}".format(method))
+ params.setdefault("method", f"flickr.{method}")
self.expect(params=params, body=body)
diff --git a/tests/flickr/test_fetch_fetchers.py b/tests/flickr/test_fetch_fetchers.py
index c58e96f..b00d9d7 100644
--- a/tests/flickr/test_fetch_fetchers.py
+++ b/tests/flickr/test_fetch_fetchers.py
@@ -103,7 +103,7 @@ def test_failure_with_no_child_save_results(self, call_api):
class UserIdFetcherTestCase(FlickrFetchTestCase):
def setUp(self):
- super(UserIdFetcherTestCase, self).setUp()
+ super().setUp()
self.account = AccountFactory(api_key="1234", api_secret="9876")
def test_inherits_from_fetcher(self):
@@ -128,7 +128,7 @@ def test_returns_id(self):
class UserFetcherTestCase(FlickrFetchTestCase):
def setUp(self):
- super(UserFetcherTestCase, self).setUp()
+ super().setUp()
self.account = AccountFactory(api_key="1234", api_secret="9876")
def test_inherits_from_fetcher(self):
@@ -225,7 +225,7 @@ class PhotosFetcherTestCase(FlickrFetchTestCase):
photos."""
def setUp(self):
- super(PhotosFetcherTestCase, self).setUp()
+ super().setUp()
account = AccountFactory(api_key="1234", api_secret="9876")
self.fetcher = PhotosFetcher(account=account)
@@ -367,7 +367,7 @@ def test_fetch_photo_exif_throws_exception(self):
class RecentPhotosFetcherTestCase(FlickrFetchTestCase):
def setUp(self):
- super(RecentPhotosFetcherTestCase, self).setUp()
+ super().setUp()
account = AccountFactory(
api_key="1234", api_secret="9876", user=UserFactory(nsid="35034346050@N01")
)
@@ -430,7 +430,7 @@ def test_calls_fetch_pages(self, fetch_pages):
def test_fetches_recent_days(self, save_photo, fetch_extra):
"Should only ask for photos from recent days, if number of days is set."
self.expect_response(
- "people.getPhotos", params={"min_upload_date": "1439294400"}
+ "people.getPhotos", params={"min_upload_date": "1439265600"}
)
with patch("time.sleep"):
@@ -456,7 +456,7 @@ def test_saves_photos(
class PhotosetsFetcherTestCase(FlickrFetchTestCase):
def setUp(self):
- super(PhotosetsFetcherTestCase, self).setUp()
+ super().setUp()
account = AccountFactory(
api_key="1234", api_secret="9876", user=UserFactory(nsid="35034346050@N01")
)
diff --git a/tests/flickr/test_fetch_filesfetchers.py b/tests/flickr/test_fetch_filesfetchers.py
index 850cd34..39b1862 100644
--- a/tests/flickr/test_fetch_filesfetchers.py
+++ b/tests/flickr/test_fetch_filesfetchers.py
@@ -158,8 +158,7 @@ def test_saves_downloaded_photo_file(self, download):
nsid = nsid[: nsid.index("@")]
self.assertEqual(
self.photo_2.original_file.name,
- "flickr/%s/%s/%s/photos/2015/08/14/%s"
- % (
+ "flickr/{}/{}/{}/photos/2015/08/14/{}".format(
nsid[-4:-2],
nsid[-2:],
self.photo_2.user.nsid.replace("@", ""),
@@ -180,8 +179,7 @@ def test_saves_downloaded_video_file(self, download):
nsid = nsid[: nsid.index("@")]
self.assertEqual(
self.video_2.video_original_file.name,
- "flickr/%s/%s/%s/photos/2015/08/14/%s"
- % (
+ "flickr/{}/{}/{}/photos/2015/08/14/{}".format(
nsid[-4:-2],
nsid[-2:],
self.video_2.user.nsid.replace("@", ""),
diff --git a/tests/flickr/test_fetch_savers.py b/tests/flickr/test_fetch_savers.py
index cc9f27f..a133c61 100644
--- a/tests/flickr/test_fetch_savers.py
+++ b/tests/flickr/test_fetch_savers.py
@@ -22,7 +22,7 @@ def make_user_object(self, user_data):
""" "Creates/updates a User from API data, then fetches that User from
the DB and returns it.
"""
- fetch_time = datetime.utcnow().replace(tzinfo=timezone.utc)
+ fetch_time = datetime.now(tz=timezone.utc)
UserSaver().save_user(user_data, fetch_time)
return User.objects.get(nsid="35034346050@N01")
@@ -33,9 +33,7 @@ def test_saves_correct_user_data(self):
user_data = self.load_fixture("people.getInfo")
user = self.make_user_object(user_data["person"])
- self.assertEqual(
- user.fetch_time, datetime.utcnow().replace(tzinfo=timezone.utc)
- )
+ self.assertEqual(user.fetch_time, datetime.now(tz=timezone.utc))
self.assertEqual(user.raw, json.dumps(user_data["person"]))
self.assertEqual(user.nsid, "35034346050@N01")
self.assertTrue(user.is_pro)
@@ -50,7 +48,7 @@ def test_saves_correct_user_data(self):
self.assertEqual(user.photos_count, 2876)
self.assertEqual(
user.photos_first_date,
- datetime.utcfromtimestamp(1093459273).replace(tzinfo=timezone.utc),
+ datetime.fromtimestamp(1093459273, tz=timezone.utc),
)
self.assertEqual(
user.photos_first_date_taken,
@@ -99,7 +97,7 @@ def make_photo_object(self, photo_data):
def make_photo_data(self):
"""Makes the dict of data that photo_save() expects, based on API data."""
return {
- "fetch_time": datetime.utcnow().replace(tzinfo=timezone.utc),
+ "fetch_time": datetime.now(tz=timezone.utc),
"user_obj": UserFactory(nsid="35034346050@N01"),
"info": self.load_fixture("photos.getInfo")["photo"],
"exif": self.load_fixture("photos.getExif")["photo"],
@@ -121,9 +119,7 @@ def test_saves_correct_photo_data(self, save_tags):
)
self.assertFalse(photo.is_private)
self.assertEqual(photo.summary, "Some test HTML. And another paragraph.")
- self.assertEqual(
- photo.fetch_time, datetime.utcnow().replace(tzinfo=timezone.utc)
- )
+ self.assertEqual(photo.fetch_time, datetime.now(tz=timezone.utc))
self.assertEqual(
photo.post_time,
datetime.strptime("2016-03-28 16:05:05", "%Y-%m-%d %H:%M:%S").replace(
@@ -379,7 +375,7 @@ def make_photoset_object(self, photoset_data):
def make_photoset_data(self):
"""Makes the dict of data that photo_save() expects, based on API data."""
return {
- "fetch_time": datetime.utcnow().replace(tzinfo=timezone.utc),
+ "fetch_time": datetime.now(tz=timezone.utc),
"user_obj": UserFactory(nsid="35034346050@N01"),
"photoset": self.load_fixture("photosets.getList")["photosets"]["photoset"][
0
@@ -392,9 +388,7 @@ def test_saves_correct_photoset_data(self):
photoset_data = self.make_photoset_data()
photoset = self.make_photoset_object(photoset_data)
- self.assertEqual(
- photoset.fetch_time, datetime.utcnow().replace(tzinfo=timezone.utc)
- )
+ self.assertEqual(photoset.fetch_time, datetime.now(tz=timezone.utc))
self.assertEqual(photoset.user, photoset_data["user_obj"])
self.assertEqual(photoset.flickr_id, 72157665648859705)
diff --git a/tests/flickr/test_management_commands.py b/tests/flickr/test_management_commands.py
index 9ad2464..5740ad8 100644
--- a/tests/flickr/test_management_commands.py
+++ b/tests/flickr/test_management_commands.py
@@ -93,7 +93,7 @@ def setUp(self):
self.out_err = StringIO()
@patch(
- "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher"
)
def test_sends_all_true_to_fetcher_with_account(self, fetcher):
call_command("fetch_flickr_originals", "--all", account="99999999999@N99")
@@ -101,7 +101,7 @@ def test_sends_all_true_to_fetcher_with_account(self, fetcher):
fetcher.return_value.fetch.assert_called_with(fetch_all=True)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher"
)
def test_sends_all_true_to_fetcher_no_account(self, fetcher):
call_command("fetch_flickr_originals", "--all")
@@ -109,7 +109,7 @@ def test_sends_all_true_to_fetcher_no_account(self, fetcher):
fetcher.return_value.fetch.assert_called_with(fetch_all=True)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher"
)
def test_sends_all_false_to_fetcher(self, fetcher):
call_command("fetch_flickr_originals")
@@ -117,7 +117,7 @@ def test_sends_all_false_to_fetcher(self, fetcher):
fetcher.return_value.fetch.assert_called_with(fetch_all=False)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher"
)
def test_success_output(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -127,7 +127,7 @@ def test_success_output(self, fetcher):
self.assertIn("Phil Gyford: Fetched 33 Files", self.out.getvalue())
@patch(
- "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher"
)
def test_success_output_verbosity_0(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -137,7 +137,7 @@ def test_success_output_verbosity_0(self, fetcher):
self.assertEqual("", self.out.getvalue())
@patch(
- "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_originals.OriginalFilesMultiAccountFetcher"
)
def test_error_output(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -179,7 +179,7 @@ def test_fail_with_days_and_end(self):
# Sending --days argument
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_days_to_fetcher_with_account(self, fetcher):
call_command("fetch_flickr_photos", account="99999999999@N99", days="4")
@@ -187,7 +187,7 @@ def test_sends_days_to_fetcher_with_account(self, fetcher):
fetcher.return_value.fetch.assert_called_with(days=4, start=None, end=None)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_days_to_fetcher_no_account(self, fetcher):
call_command("fetch_flickr_photos", days="4")
@@ -195,7 +195,7 @@ def test_sends_days_to_fetcher_no_account(self, fetcher):
fetcher.return_value.fetch.assert_called_with(days=4, start=None, end=None)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_all_to_fetcher_with_account(self, fetcher):
call_command("fetch_flickr_photos", account="99999999999@N99", days="all")
@@ -205,7 +205,7 @@ def test_sends_all_to_fetcher_with_account(self, fetcher):
# Sending --start argument
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_start_to_fetcher_with_account(self, fetcher):
call_command(
@@ -217,7 +217,7 @@ def test_sends_start_to_fetcher_with_account(self, fetcher):
)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_start_to_fetcher_with_no_account(self, fetcher):
call_command("fetch_flickr_photos", start="2022-01-31")
@@ -229,7 +229,7 @@ def test_sends_start_to_fetcher_with_no_account(self, fetcher):
# Sending --end argument
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_end_to_fetcher_with_account(self, fetcher):
call_command("fetch_flickr_photos", account="99999999999@N99", end="2022-01-31")
@@ -239,7 +239,7 @@ def test_sends_end_to_fetcher_with_account(self, fetcher):
)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_end_to_fetcher_with_no_account(self, fetcher):
call_command("fetch_flickr_photos", end="2022-01-31")
@@ -251,7 +251,7 @@ def test_sends_end_to_fetcher_with_no_account(self, fetcher):
# Sending --start and --end arguments
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_start_and_end_to_fetcher_with_account(self, fetcher):
call_command(
@@ -266,7 +266,7 @@ def test_sends_start_and_end_to_fetcher_with_account(self, fetcher):
)
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_sends_start_and_end_to_fetcher_with_no_account(self, fetcher):
call_command("fetch_flickr_photos", start="2022-01-31", end="2022-02-14")
@@ -278,7 +278,7 @@ def test_sends_start_and_end_to_fetcher_with_no_account(self, fetcher):
# Outputs
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_success_output(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -288,7 +288,7 @@ def test_success_output(self, fetcher):
self.assertIn("Phil Gyford: Fetched 40 Photos", self.out.getvalue())
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_success_output_verbosity_0(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -298,7 +298,7 @@ def test_success_output_verbosity_0(self, fetcher):
self.assertEqual("", self.out.getvalue())
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photos.RecentPhotosMultiAccountFetcher"
)
def test_error_output(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -318,7 +318,7 @@ def setUp(self):
self.out_err = StringIO()
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher"
)
def test_calls_fetcher_with_account(self, fetcher):
call_command("fetch_flickr_photosets", account="99999999999@N99")
@@ -326,7 +326,7 @@ def test_calls_fetcher_with_account(self, fetcher):
fetcher.return_value.fetch.assert_called_with()
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher"
)
def test_calls_fetcher_with_no_account(self, fetcher):
call_command("fetch_flickr_photosets")
@@ -334,7 +334,7 @@ def test_calls_fetcher_with_no_account(self, fetcher):
fetcher.return_value.fetch.assert_called_with()
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher"
)
def test_success_output(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -344,7 +344,7 @@ def test_success_output(self, fetcher):
self.assertIn("Phil Gyford: Fetched 40 Photosets", self.out.getvalue())
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher"
)
def test_success_output_verbosity_0(self, fetcher):
fetcher.return_value.fetch.return_value = [
@@ -354,7 +354,7 @@ def test_success_output_verbosity_0(self, fetcher):
self.assertEqual("", self.out.getvalue())
@patch(
- "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher" # noqa: E501
+ "ditto.flickr.management.commands.fetch_flickr_photosets.PhotosetsMultiAccountFetcher"
)
def test_error_output(self, fetcher):
fetcher.return_value.fetch.return_value = [
diff --git a/tests/flickr/test_models.py b/tests/flickr/test_models.py
index d30dad8..3733177 100644
--- a/tests/flickr/test_models.py
+++ b/tests/flickr/test_models.py
@@ -439,7 +439,7 @@ def test_image_urls(self):
def test_image_url_with_invalid_size(self):
with self.assertRaises(AttributeError):
- self.photo.an_invalid_url
+ _url = self.photo.an_invalid_url
def test_video_urls_video(self):
"Videos should return the correct URLs from the video url properties."
@@ -448,20 +448,15 @@ def test_video_urls_video(self):
media="video", permalink=permalink, secret=9876, original_secret="7777"
)
for size, prop in self.video_sizes.items():
- if size == "orig":
- secret = 7777
- else:
- secret = 9876
+ secret = 7777 if size == "orig" else 9876
- self.assertEqual(
- getattr(photo, prop), "%splay/%s/%s/" % (permalink, size, secret)
- )
+ self.assertEqual(getattr(photo, prop), f"{permalink}play/{size}/{secret}/")
def test_video_urls_photo(self):
"Photos should have None for all video URLs."
photo = PhotoFactory(media="photo")
- for size, prop in self.video_sizes.items():
+ for _size, prop in self.video_sizes.items():
self.assertIsNone(getattr(photo, prop))
@@ -510,20 +505,20 @@ def test_medium_url(self):
"Has a different format to most other image sizes."
self.assertRegex(
self.photo.medium_url,
- r"CACHE/images/flickr/34/56/123456N01/photos/2015/08/14/example.[^\.]+\.jpg", # noqa: E501
+ r"CACHE/images/flickr/34/56/123456N01/photos/2015/08/14/example.[^\.]+\.jpg",
)
def test_image_urls(self):
"""Test all but the Original and Medium image URL properties."""
- for size, prop in self.photo_sizes.items():
+ for _size, prop in self.photo_sizes.items():
self.assertRegex(
getattr(self.photo, prop),
- r"CACHE/images/flickr/34/56/123456N01/photos/2015/08/14/example.[^\.]+\.jpg", # noqa: E501
+ r"CACHE/images/flickr/34/56/123456N01/photos/2015/08/14/example.[^\.]+\.jpg",
)
def test_image_url_with_invalid_size(self):
with self.assertRaises(AttributeError):
- self.photo.an_invalid_url
+ _url = self.photo.an_invalid_url
def test_video_urls_video(self):
"Should currently return the remote URL for videos."
@@ -532,14 +527,8 @@ def test_video_urls_video(self):
media="video", permalink=permalink, secret=9876, original_secret="7777"
)
for size, prop in self.video_sizes.items():
- if size == "orig":
- secret = 7777
- else:
- secret = 9876
-
- self.assertEqual(
- getattr(photo, prop), "%splay/%s/%s/" % (permalink, size, secret)
- )
+ secret = 7777 if size == "orig" else 9876
+ self.assertEqual(getattr(photo, prop), f"{permalink}play/{size}/{secret}/")
def test_image_url_when_original_missing(self):
"If we have no original file, we should use the 'missing' image."
diff --git a/tests/flickr/test_templatetags.py b/tests/flickr/test_templatetags.py
index d79129e..d70d494 100644
--- a/tests/flickr/test_templatetags.py
+++ b/tests/flickr/test_templatetags.py
@@ -73,8 +73,8 @@ def setUp(self):
self.photos_1 = PhotoFactory.create_batch(2, user=user_1)
self.photos_2 = PhotoFactory.create_batch(3, user=user_2)
- taken_time = datetime(2014, 3, 18, 12, 0, 0).replace(tzinfo=timezone.utc)
- post_time = datetime(2015, 3, 18, 12, 0, 0).replace(tzinfo=timezone.utc)
+ taken_time = datetime(2014, 3, 18, 12, 0, 0, tzinfo=timezone.utc)
+ post_time = datetime(2015, 3, 18, 12, 0, 0, tzinfo=timezone.utc)
self.photos_1[0].taken_time = taken_time
self.photos_1[0].post_time = post_time
self.photos_1[0].save()
diff --git a/tests/flickr/test_views.py b/tests/flickr/test_views.py
index 2234229..68b4f39 100644
--- a/tests/flickr/test_views.py
+++ b/tests/flickr/test_views.py
@@ -294,7 +294,7 @@ def setUp(self):
TaggedPhotoFactory(content_object=self.cod_photo, tag=fish_tag)
TaggedPhotoFactory(content_object=self.cod_photo, tag=cod_tag)
- def createDogPhoto(self):
+ def create_dog_photo(self):
"Creates a photo tagged with 'dog' and 'mammal'."
self.dog_photo = PhotoFactory(title="Dog")
mammal_tag = TagFactory(slug="mammal")
@@ -344,7 +344,7 @@ def test_tag_detail_context(self):
"Sends the correct data to the templates"
AccountFactory.create_batch(2)
# The 'fish' tag page shouldn't include this dog photo:
- self.createDogPhoto()
+ self.create_dog_photo()
response = self.client.get(
reverse("flickr:tag_detail", kwargs={"slug": "fish"})
)
@@ -422,7 +422,7 @@ def test_user_tag_detail_templates(self):
def test_user_tag_detail_context(self):
"Sends the correct data to templates"
- self.createDogPhoto()
+ self.create_dog_photo()
# Ensure the cod, carp and dog photos are all owned by the same user.
# Only the carp and cod pics should show up on the user's 'fish' page.
@@ -523,7 +523,6 @@ def test_user_tag_detail_fails_2(self):
class PhotosetViewTests(TestCase):
def setUp(self):
-
self.user_1 = UserFactory(nsid="1234567890@N01")
self.account_1 = AccountFactory(user=self.user_1)
# Three photos, one of which is private.
diff --git a/tests/lastfm/test_fetch.py b/tests/lastfm/test_fetch.py
index 7b3d68f..bb17b50 100644
--- a/tests/lastfm/test_fetch.py
+++ b/tests/lastfm/test_fetch.py
@@ -131,9 +131,8 @@ def load_raw_fixture(self, fixture_name):
fixture_name -- eg 'messages' to load 'messages.json'.
Returns the JSON text.
"""
- json_file = open("%s%s.json" % (self.fixture_path, fixture_name))
- json_data = json_file.read()
- json_file.close()
+ with open(f"{self.fixture_path}{fixture_name}.json") as f:
+ json_data = f.read()
return json_data
def load_fixture(self, fixture_name):
diff --git a/tests/lastfm/test_utils.py b/tests/lastfm/test_utils.py
index 6a5924a..df733ce 100644
--- a/tests/lastfm/test_utils.py
+++ b/tests/lastfm/test_utils.py
@@ -21,6 +21,6 @@ def test_changed_characters_1(self):
def test_changed_characters_2(self):
self.assertEqual(
- slugify_name('" < > \ ^ ` { | }'), # noqa: W605
+ slugify_name(r'" < > \ ^ ` { | }'),
"%22+%3C+%3E+%5C%5C+%5E+%60+%7B+%7C+%7D",
)
diff --git a/tests/lastfm/test_views.py b/tests/lastfm/test_views.py
index 722b94c..2995c02 100644
--- a/tests/lastfm/test_views.py
+++ b/tests/lastfm/test_views.py
@@ -446,7 +446,7 @@ def test_7_days(self):
self.assertEqual(response.context["track_list"][0].scrobble_count, 1)
-class UserCommonTests(object):
+class UserCommonTests:
"""Parent for all user-specific views.
Doesn't inherit from TestCase because we don't want the tests in this class
to run, only in its child classes.
@@ -518,12 +518,10 @@ def test_404s(self):
class UserDetailViewTestCase(UserCommonTests, TestCase):
-
view_name = "user_detail"
class UserAlbumListViewTestCase(UserCommonTests, TestCase):
-
view_name = "user_album_list"
def test_context_albums(self):
@@ -539,7 +537,6 @@ def test_context_albums(self):
class UserArtistListViewTestCase(UserCommonTests, TestCase):
-
view_name = "user_artist_list"
def test_context_albums(self):
@@ -557,7 +554,6 @@ def test_context_albums(self):
class UserScrobbleListViewTestCase(UserCommonTests, TestCase):
-
view_name = "user_scrobble_list"
def test_context_scrobbles(self):
@@ -571,7 +567,6 @@ def test_context_scrobbles(self):
class UserTrackListViewTestCase(UserCommonTests, TestCase):
-
view_name = "user_track_list"
def test_context_tracks(self):
diff --git a/tests/pinboard/test_fetch.py b/tests/pinboard/test_fetch.py
index 05221da..d8d4c55 100644
--- a/tests/pinboard/test_fetch.py
+++ b/tests/pinboard/test_fetch.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import json
from datetime import datetime, timezone
from unittest.mock import patch
@@ -64,9 +63,9 @@ def make_success_body(
`method` is 'get' or 'recent' or 'all'.
"""
posts = []
- for n in range(0, num_posts):
+ for n in range(num_posts):
posts.append(
- '{"href":"http:\\/\\/example%s.com\\/","description":"My description %s","extended":"My extended %s.","meta":"abcdef1234567890abcdef1234567890","hash":"1234567890abcdef1234567890abcdef","time":"%sT09:48:31Z","shared":"yes","toread":"no","tags":"tag1 tag2 tag3"}' # noqa: E501
+ '{"href":"http:\\/\\/example%s.com\\/","description":"My description %s","extended":"My extended %s.","meta":"abcdef1234567890abcdef1234567890","hash":"1234567890abcdef1234567890abcdef","time":"%sT09:48:31Z","shared":"yes","toread":"no","tags":"tag1 tag2 tag3"}' # noqa: E501, UP031
% (n, n, n, post_date)
)
@@ -75,7 +74,7 @@ def make_success_body(
if method == "all":
return posts_json
else:
- return '{"date":"%sT09:48:31Z","user":"%s","posts":%s}\t\n' % (
+ return '{{"date":"{}T09:48:31Z","user":"{}","posts":{}}}\t\n'.format(
post_date,
username,
posts_json,
@@ -209,11 +208,11 @@ def test_no_bom(self):
It didn't until June 2020 when Pinboard upgraded the server
and something happened to add a BOM to API responses.
"""
- json_file = open("tests/pinboard/fixtures/api/bookmarks_no_bom.json")
- self.add_response(body=json_file.read())
- result = DateBookmarksFetcher().fetch(
- post_date="2015-06-18", username="philgyford"
- )
+ with open("tests/pinboard/fixtures/api/bookmarks_no_bom.json") as f:
+ self.add_response(body=f.read())
+ result = DateBookmarksFetcher().fetch(
+ post_date="2015-06-18", username="philgyford"
+ )
self.assertEqual(result[0]["account"], "philgyford")
self.assertTrue(result[0]["success"])
self.assertEqual(result[0]["fetched"], 1)
@@ -224,11 +223,11 @@ def test_has_bom(self):
It didn't until June 2020 when Pinboard upgraded the server
and something happened to add a BOM to API responses.
"""
- json_file = open("tests/pinboard/fixtures/api/bookmarks_bom.json")
- self.add_response(body=json_file.read())
- result = DateBookmarksFetcher().fetch(
- post_date="2015-06-18", username="philgyford"
- )
+ with open("tests/pinboard/fixtures/api/bookmarks_bom.json") as f:
+ self.add_response(body=f.read())
+ result = DateBookmarksFetcher().fetch(
+ post_date="2015-06-18", username="philgyford"
+ )
self.assertEqual(result[0]["account"], "philgyford")
self.assertTrue(result[0]["success"])
self.assertEqual(result[0]["fetched"], 1)
@@ -262,10 +261,9 @@ def get_bookmarks_from_json(self):
what should be a list of correctly-parsed data about Bookmarks, ready
to make Bookmark objects out of.
"""
- json_file = open(self.api_fixture)
- json_data = json_file.read()
- bookmarks_data = BookmarksFetcher()._parse_response("date", json_data)
- json_file.close()
+ with open(self.api_fixture) as f:
+ json_data = f.read()
+ bookmarks_data = BookmarksFetcher()._parse_response("date", json_data)
return {"json": json_data, "bookmarks": bookmarks_data}
@@ -310,7 +308,7 @@ def test_save_bookmarks(self):
Bookmark objects.
"""
account = Account.objects.get(pk=1)
- fetch_time = datetime.utcnow().replace(tzinfo=timezone.utc)
+ fetch_time = datetime.now(tz=timezone.utc)
bookmarks_from_json = self.get_bookmarks_from_json()
bookmarks_data = bookmarks_from_json["bookmarks"]
@@ -331,8 +329,8 @@ def test_save_bookmarks(self):
self.assertEqual(
bookmarks[1].fetch_time,
- datetime.strptime("2015-07-01 12:00:00", "%Y-%m-%d %H:%M:%S").replace(
- tzinfo=timezone.utc
+ datetime.strptime("2015-07-01 04:00:00", "%Y-%m-%d %H:%M:%S").astimezone(
+ timezone.utc
),
)
self.assertEqual(
@@ -347,8 +345,8 @@ def test_save_bookmarks(self):
self.assertEqual(bookmarks[1].url, "http://fontello.com/")
self.assertEqual(
bookmarks[1].post_time,
- datetime.strptime("2015-06-18T09:48:31Z", "%Y-%m-%dT%H:%M:%SZ").replace(
- tzinfo=timezone.utc
+ datetime.strptime("2015-06-18T09:48:31Z", "%Y-%m-%dT%H:%M:%SZ").astimezone(
+ timezone.utc
),
)
self.assertEqual(
@@ -374,7 +372,7 @@ def test_update_bookmarks(self):
"""Ensure that when saving a Bookmark that already exists, we update
it."""
account = Account.objects.get(pk=1)
- fetch_time = datetime.utcnow().replace(tzinfo=timezone.utc)
+ fetch_time = datetime.now(tz=timezone.utc)
# Add a Bookmark into the DB before we fetch anything.
bookmark = BookmarkFactory(
@@ -411,8 +409,8 @@ def test_update_bookmarks(self):
# This should be updated to now, as we've changed things:
self.assertEqual(
bookmarks[1].fetch_time,
- datetime.strptime("2015-07-01 12:00:00", "%Y-%m-%d %H:%M:%S").replace(
- tzinfo=timezone.utc
+ datetime.strptime("2015-07-01 04:00:00", "%Y-%m-%d %H:%M:%S").astimezone(
+ timezone.utc
),
)
@@ -424,7 +422,7 @@ def test_update_bookmarks(self):
def test_no_update_bookmarks(self):
"""Ensure that if no values have changed, we don't update a bookmark."""
account = Account.objects.get(pk=1)
- fetch_time = datetime.utcnow().replace(tzinfo=timezone.utc)
+ fetch_time = datetime.now(tz=timezone.utc)
# Add a Bookmark into the DB before we fetch anything.
BookmarkFactory(
diff --git a/tests/pinboard/test_management_commands.py b/tests/pinboard/test_management_commands.py
index ff3cace..27cd1d6 100644
--- a/tests/pinboard/test_management_commands.py
+++ b/tests/pinboard/test_management_commands.py
@@ -1,4 +1,3 @@
-# coding: utf-8
from io import StringIO
from unittest.mock import patch
diff --git a/tests/pinboard/test_models.py b/tests/pinboard/test_models.py
index 1e590f5..cf401f1 100644
--- a/tests/pinboard/test_models.py
+++ b/tests/pinboard/test_models.py
@@ -1,4 +1,3 @@
-# coding: utf-8
from datetime import datetime, timedelta, timezone
from django.db import IntegrityError
diff --git a/tests/pinboard/test_templatetags.py b/tests/pinboard/test_templatetags.py
index a5f794d..c927e99 100644
--- a/tests/pinboard/test_templatetags.py
+++ b/tests/pinboard/test_templatetags.py
@@ -1,4 +1,3 @@
-# coding: utf-8
from datetime import date, datetime, timedelta, timezone
from django.test import TestCase
@@ -43,7 +42,7 @@ def setUp(self):
self.bookmarks_1 = BookmarkFactory.create_batch(6, account=account_1)
self.bookmarks_2 = BookmarkFactory.create_batch(6, account=account_2)
- post_time = datetime(2015, 3, 18, 12, 0, 0).replace(tzinfo=timezone.utc)
+ post_time = datetime(2015, 3, 18, 12, 0, 0, tzinfo=timezone.utc)
self.bookmarks_1[3].post_time = post_time
self.bookmarks_1[3].save()
self.bookmarks_1[5].is_private = True
diff --git a/tests/twitter/test_fetch.py b/tests/twitter/test_fetch.py
index 8a4eaff..620c0bc 100644
--- a/tests/twitter/test_fetch.py
+++ b/tests/twitter/test_fetch.py
@@ -17,13 +17,18 @@ class FetchTwitterTestCase(TestCase):
def make_response_body(self):
"Makes the JSON response to a call to the API"
- json_file = open("%s%s" % ("tests/twitter/fixtures/api/", self.api_fixture))
- json_data = json_file.read()
- json_file.close()
- return json_data
+ with open(f"tests/twitter/fixtures/api/{self.api_fixture}") as f:
+ json_data = f.read()
+ return json_data
def add_response(
- self, body, status=200, querystring={}, match_querystring=False, method="GET"
+ self,
+ body,
+ *,
+ status=200,
+ querystring=None,
+ match_querystring=False,
+ method="GET",
):
"""Add a Twitter API response.
@@ -35,13 +40,12 @@ def add_response(
a querystring.
method -- 'GET' or 'POST'.
"""
- url = "%s/%s.json" % (self.api_url, self.api_call)
+ querystring = {} if querystring is None else querystring
+ url = f"{self.api_url}/{self.api_call}.json"
if len(querystring):
- qs = "&".join(
- "%s=%s" % (key, querystring[key]) for key in querystring.keys()
- )
- url = "%s?%s" % (url, qs)
+ qs = "&".join(f"{key}={querystring[key]}" for key in querystring)
+ url = f"{url}?{qs}"
method = responses.POST if method == "POST" else responses.GET
diff --git a/tests/twitter/test_fetch_fetchers.py b/tests/twitter/test_fetch_fetchers.py
index b8a4c08..dd53f7f 100644
--- a/tests/twitter/test_fetch_fetchers.py
+++ b/tests/twitter/test_fetch_fetchers.py
@@ -302,10 +302,9 @@ def test_fetches_multiple_pages_for_count(self):
qs["count"] = 100
self.add_response(body=body, querystring=qs, match_querystring=True)
- with patch.object(FetchTweetsRecent, "_save_results"):
- with patch("time.sleep"):
- RecentTweetsFetcher(screen_name="jill").fetch(count=700)
- self.assertEqual(4, len(responses.calls))
+ with patch.object(FetchTweetsRecent, "_save_results"), patch("time.sleep"):
+ RecentTweetsFetcher(screen_name="jill").fetch(count=700)
+ self.assertEqual(4, len(responses.calls))
class FavoriteTweetsFetcherTestCase(TwitterFetcherTestCase):
@@ -497,14 +496,12 @@ def test_fetches_multiple_pages_for_count(self):
qs["count"] = 100
self.add_response(body=body, querystring=qs, match_querystring=True)
- with patch.object(FetchTweetsFavorite, "_save_results"):
- with patch("time.sleep"):
- FavoriteTweetsFetcher(screen_name="jill").fetch(count=700)
- self.assertEqual(4, len(responses.calls))
+ with patch.object(FetchTweetsFavorite, "_save_results"), patch("time.sleep"):
+ FavoriteTweetsFetcher(screen_name="jill").fetch(count=700)
+ self.assertEqual(4, len(responses.calls))
class UsersFetcherTestCase(TwitterFetcherTestCase):
-
api_fixture = "users_lookup.json"
api_call = "users/lookup"
@@ -587,14 +584,12 @@ def test_fetches_multiple_pages(self):
qs["user_id"] = "%2C".join(map(str, ids[-50:]))
self.add_response(body=body, querystring=qs, match_querystring=True)
- with patch.object(FetchUsers, "_save_results"):
- with patch("time.sleep"):
- UsersFetcher(screen_name="jill").fetch(ids)
- self.assertEqual(4, len(responses.calls))
+ with patch.object(FetchUsers, "_save_results"), patch("time.sleep"):
+ UsersFetcher(screen_name="jill").fetch(ids)
+ self.assertEqual(4, len(responses.calls))
class TweetsFetcherTestCase(TwitterFetcherTestCase):
-
api_fixture = "tweets.json"
api_call = "statuses/lookup"
@@ -689,7 +684,7 @@ def test_fetches_multiple_pages(self):
ids = [id for id in range(1, 351)]
body = json.dumps([{"id": id} for id in range(1, 100)])
- for n in range(3):
+ for _n in range(3):
# First time, add ids 1-100. Then 101-200. Then 201-300.
# start = n * 100
# end = (n + 1) * 100
@@ -704,14 +699,12 @@ def test_fetches_multiple_pages(self):
# qs['id'] = ','.join(map(str, ids[-50:]))
self.add_response(body=body, method="POST")
- with patch.object(FetchTweets, "_save_results"):
- with patch("time.sleep"):
- TweetsFetcher(screen_name="jill").fetch(ids)
- self.assertEqual(4, len(responses.calls))
+ with patch.object(FetchTweets, "_save_results"), patch("time.sleep"):
+ TweetsFetcher(screen_name="jill").fetch(ids)
+ self.assertEqual(4, len(responses.calls))
class VerifyFetcherTestCase(TwitterFetcherTestCase):
-
api_fixture = "verify_credentials.json"
api_call = "account/verify_credentials"
@@ -783,7 +776,6 @@ def test_saves_users(self):
class FetchVerifyTestCase(FetchTwitterTestCase):
-
api_fixture = "verify_credentials.json"
api_call = "account/verify_credentials"
@@ -807,7 +799,7 @@ def test_fetch_for_account_creates(self, fetch_avatar):
self.assertEqual(new_user.screen_name, "philgyford")
self.assertEqual(1, len(responses.calls))
self.assertEqual(
- "%s/%s.json" % (self.api_url, "account/verify_credentials"),
+ f"{self.api_url}/account/verify_credentials.json",
responses.calls[0].request.url,
)
@@ -831,7 +823,7 @@ def test_fetch_for_account_updates(self, fetch_avatar):
self.assertEqual(updated_user.screen_name, "philgyford")
self.assertEqual(1, len(responses.calls))
self.assertEqual(
- "%s/%s.json" % (self.api_url, "account/verify_credentials"),
+ f"{self.api_url}/account/verify_credentials.json",
responses.calls[0].request.url,
)
@@ -849,7 +841,7 @@ def test_fetch_for_account_fails(self):
self.assertEqual(result["account"], "Unsaved Account")
self.assertEqual(1, len(responses.calls))
self.assertEqual(
- "%s/%s.json" % (self.api_url, "account/verify_credentials"),
+ f"{self.api_url}/account/verify_credentials.json",
responses.calls[0].request.url,
)
self.assertTrue("Could not authenticate you" in result["messages"][0])
@@ -876,7 +868,7 @@ def setUp(self):
self.video = VideoFactory(
twitter_id=66666666,
- image_url="https://pbs.twimg.com/ext_tw_video_thumb/740282905369444352/pu/img/zyxwvutsrqponml.jpg", # noqa: E501
+ image_url="https://pbs.twimg.com/ext_tw_video_thumb/740282905369444352/pu/img/zyxwvutsrqponml.jpg",
image_file="",
mp4_file="",
)
@@ -984,8 +976,7 @@ def test_saves_downloaded_image_file(self, download):
FetchFiles()._fetch_and_save_file(self.image, "image")
self.assertEqual(
self.image.image_file.name,
- "twitter/media/%s/%s/%s"
- % (
+ "twitter/media/{}/{}/{}".format(
temp_filepath[-4:-2],
temp_filepath[-2:],
os.path.basename(temp_filepath),
@@ -1003,8 +994,7 @@ def test_saves_downloaded_mp4_file(self, download):
FetchFiles()._fetch_and_save_file(self.animated_gif, "mp4")
self.assertEqual(
self.animated_gif.mp4_file.name,
- "twitter/media/%s/%s/%s"
- % (
+ "twitter/media/{}/{}/{}".format(
temp_filepath[-4:-2],
temp_filepath[-2:],
os.path.basename(temp_filepath),
diff --git a/tests/twitter/test_fetch_savers.py b/tests/twitter/test_fetch_savers.py
index 62b05b8..97a7acb 100644
--- a/tests/twitter/test_fetch_savers.py
+++ b/tests/twitter/test_fetch_savers.py
@@ -23,7 +23,7 @@ class TweetSaverTestCase(FetchTwitterTestCase):
# fixture to something much shorter, and easier to test with.
api_fixture = "tweets.json"
- def make_tweet(self, is_private=False):
+ def make_tweet(self, *, is_private=False):
self.fetch_time = datetime_now()
# Get the JSON for a single tweet.
@@ -325,7 +325,7 @@ def test_saves_videos(self):
self.assertEqual(video.twitter_id, 1234567890)
self.assertEqual(
video.image_url,
- "https://pbs.twimg.com/ext_tw_video_thumb/661601811007188992/pu/img/gcxHGl7EA08a-Gps.jpg", # noqa: E501
+ "https://pbs.twimg.com/ext_tw_video_thumb/661601811007188992/pu/img/gcxHGl7EA08a-Gps.jpg",
)
self.assertEqual(video.large_w, 640)
self.assertEqual(video.large_h, 360)
@@ -340,11 +340,11 @@ def test_saves_videos(self):
self.assertEqual(video.aspect_ratio, "16:9")
self.assertEqual(
video.dash_url,
- "https://video.twimg.com/ext_tw_video/661601811007188992/pu/pl/K0pVjBgnc5BI_4e5.mpd", # noqa: E501
+ "https://video.twimg.com/ext_tw_video/661601811007188992/pu/pl/K0pVjBgnc5BI_4e5.mpd",
)
self.assertEqual(
video.xmpeg_url,
- "https://video.twimg.com/ext_tw_video/661601811007188992/pu/pl/K0pVjBgnc5BI_4e5.m3u8", # noqa: E501
+ "https://video.twimg.com/ext_tw_video/661601811007188992/pu/pl/K0pVjBgnc5BI_4e5.m3u8",
)
@@ -382,14 +382,14 @@ def test_saves_gifs(self):
class UserSaverTestCase(FetchTwitterTestCase):
-
api_fixture = "verify_credentials.json"
- def make_user_data(self, custom={}):
+ def make_user_data(self, custom=None):
"""Get the JSON for a single user.
custom is a dict of attributes to override on the default data.
eg, {'protected': True}
"""
+ custom = {} if custom is None else custom
raw_json = self.make_response_body()
user_data = json.loads(raw_json)
for key, value in custom.items():
@@ -408,7 +408,6 @@ def make_user_object(self, user_data, download):
@freeze_time("2015-08-14 12:00:00", tz_offset=-8)
def test_saves_correct_user_data(self):
-
user_data = self.make_user_data()
user = self.make_user_object(user_data)
diff --git a/tests/twitter/test_ingest_v1.py b/tests/twitter/test_ingest_v1.py
index 6302dbb..d91ff87 100644
--- a/tests/twitter/test_ingest_v1.py
+++ b/tests/twitter/test_ingest_v1.py
@@ -8,28 +8,25 @@
class Version1TweetIngesterTestCase(TestCase):
-
# A sample file of the format we'd get in a Twitter archive.
ingest_fixture = "tests/twitter/fixtures/ingest/v1/2015_08.js"
def get_tweet_data(self):
"Returns the JSON tweet data, as text, from the fixture."
- file = open(self.ingest_fixture)
- tweet_data = file.read()
- file.close()
- return tweet_data
+ with open(self.ingest_fixture) as f:
+ tweet_data = f.read()
+ return tweet_data
def test_raises_error_with_invalid_dir(self):
- with patch("os.path.isdir", return_value=False):
- with self.assertRaises(IngestError):
- Version1TweetIngester().ingest(directory="/bad/dir")
+ with patch("os.path.isdir", return_value=False), self.assertRaises(IngestError):
+ Version1TweetIngester().ingest(directory="/bad/dir")
def test_raises_error_with_empty_dir(self):
"If no .js files are found, raises IngestError"
- with patch("os.path.isdir", return_value=True):
- with patch("ditto.twitter.ingest.Version1TweetIngester", file_count=0):
- with self.assertRaises(IngestError):
- Version1TweetIngester().ingest(directory="/bad/dir")
+ with patch("os.path.isdir", return_value=True), patch(
+ "ditto.twitter.ingest.Version1TweetIngester", file_count=0
+ ), self.assertRaises(IngestError):
+ Version1TweetIngester().ingest(directory="/bad/dir")
# All the below have a similar structure to mock out file-related functions.
# Here's what's happening:
@@ -74,9 +71,9 @@ def test_opens_all_files(self):
ingester.ingest(directory="/good/dir")
m.assert_has_calls(
[
- call("/good/dir/2015_01.js", "r"),
- call("/good/dir/2015_02.js", "r"),
- call("/good/dir/2015_03.js", "r"),
+ call("/good/dir/2015_01.js"),
+ call("/good/dir/2015_02.js"),
+ call("/good/dir/2015_03.js"),
],
any_order=True,
)
diff --git a/tests/twitter/test_ingest_v2.py b/tests/twitter/test_ingest_v2.py
index c9f409c..33b6fae 100644
--- a/tests/twitter/test_ingest_v2.py
+++ b/tests/twitter/test_ingest_v2.py
@@ -49,7 +49,7 @@ def test_saves_user_data(self):
"id_str": "12552",
"screen_name": "philgyford",
"name": "Phil Gyford",
- "profile_image_url_https": "https://pbs.twimg.com/profile_images/1167616130/james_200208_300x300.jpg", # NOQA: E501
+ "profile_image_url_https": "https://pbs.twimg.com/profile_images/1167616130/james_200208_300x300.jpg",
"verified": False,
"ditto_note": (
"This user data was compiled from separate parts of a "
diff --git a/tests/twitter/test_management_commands.py b/tests/twitter/test_management_commands.py
index 952af42..8335424 100644
--- a/tests/twitter/test_management_commands.py
+++ b/tests/twitter/test_management_commands.py
@@ -1,4 +1,3 @@
-# coding: utf-8
from io import StringIO
from unittest.mock import patch
@@ -25,7 +24,6 @@ def tearDown(self):
class FetchTwitterTweetsArgs(FetchTwitterArgs):
-
fetcher_class_path = (
"ditto.twitter.management.commands.fetch_twitter_tweets.RecentTweetsFetcher"
)
@@ -60,7 +58,6 @@ def test_with_number(self):
class FetchTwitterFavoritesArgs(FetchTwitterArgs):
-
fetcher_class_path = "ditto.twitter.management.commands.fetch_twitter_favorites.FavoriteTweetsFetcher" # noqa: E501
def test_fail_with_no_args(self):
@@ -117,7 +114,6 @@ def tearDown(self):
class FetchTwitterTweetsOutput(FetchTwitterOutput):
-
fetch_method_path = "ditto.twitter.management.commands.fetch_twitter_tweets.RecentTweetsFetcher.fetch" # noqa: E501
def test_success_output(self):
@@ -152,7 +148,6 @@ def test_error_output(self):
class FetchTwitterFavoritesOutput(FetchTwitterOutput):
-
fetch_method_path = "ditto.twitter.management.commands.fetch_twitter_favorites.FavoriteTweetsFetcher.fetch" # noqa: E501
def test_success_output(self):
@@ -190,7 +185,6 @@ def test_error_output(self):
class FetchTwitterAccountsOutput(FetchTwitterOutput):
-
fetch_method_path = (
"ditto.twitter.management.commands.fetch_twitter_accounts.VerifyFetcher.fetch"
)
@@ -242,7 +236,7 @@ class ImportTweetsVersion1(TestCase):
def setUp(self):
self.patcher = patch(
- "ditto.twitter.management.commands.import_twitter_tweets.Version1TweetIngester.ingest" # noqa: E501
+ "ditto.twitter.management.commands.import_twitter_tweets.Version1TweetIngester.ingest"
)
self.ingest_mock = self.patcher.start()
self.out = StringIO()
@@ -270,7 +264,7 @@ class ImportTweetsVersion2(TestCase):
def setUp(self):
self.patcher = patch(
- "ditto.twitter.management.commands.import_twitter_tweets.Version2TweetIngester.ingest" # noqa: E501
+ "ditto.twitter.management.commands.import_twitter_tweets.Version2TweetIngester.ingest"
)
self.ingest_mock = self.patcher.start()
self.out = StringIO()
@@ -281,11 +275,12 @@ def tearDown(self):
def test_fails_with_invalid_directory(self):
"Test fails with invalid directory"
- with patch("os.path.isdir", return_value=False):
- with self.assertRaises(CommandError):
- call_command(
- "import_twitter_tweets", path="/wrong/path", archive_version="v2"
- )
+ with patch("os.path.isdir", return_value=False), self.assertRaises(
+ CommandError
+ ):
+ call_command(
+ "import_twitter_tweets", path="/wrong/path", archive_version="v2"
+ )
def test_calls_ingest_method(self):
"Calls correct class and method"
diff --git a/tests/twitter/test_models.py b/tests/twitter/test_models.py
index 08f3a13..ff6ee0b 100644
--- a/tests/twitter/test_models.py
+++ b/tests/twitter/test_models.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import os
from datetime import datetime, timedelta, timezone
from unittest.mock import Mock, patch
@@ -24,17 +23,15 @@
class AccountTestCase(TestCase):
-
api_url = "https://api.twitter.com/1.1"
api_fixture = "tests/twitter/fixtures/api/verify_credentials.json"
def make_verify_credentials_body(self):
"Makes the JSON response to a call to verify_credentials"
- json_file = open(self.api_fixture)
- json_data = json_file.read()
- json_file.close()
- return json_data
+ with open(self.api_fixture) as f:
+ json_data = f.read()
+ return json_data
def add_response(self, body, call, status=200):
"""Add a Twitter API response.
@@ -47,7 +44,7 @@ def add_response(self, body, call, status=200):
"""
responses.add(
responses.GET,
- "%s/%s.json" % (self.api_url, call),
+ f"{self.api_url}/{call}.json",
status=status,
match_querystring=False,
body=body,
@@ -89,7 +86,7 @@ def test_creates_user(self, fetch_avatar):
self.assertEqual(account.user.screen_name, "philgyford")
self.assertEqual(1, len(responses.calls))
self.assertEqual(
- "%s/%s.json" % (self.api_url, "account/verify_credentials"),
+ f"{self.api_url}/account/verify_credentials.json",
responses.calls[0].request.url,
)
@@ -114,7 +111,7 @@ def test_update_user_does_nothing_with_no_credentials(self):
)
# Not saving (as that generates another request):
account = AccountFactory.build(user=None)
- result = account.updateUserFromTwitter()
+ result = account.update_user_from_twitter()
self.assertEqual(result, False)
self.assertEqual(0, len(responses.calls))
@@ -131,14 +128,14 @@ def test_update_user_updates_user(self, fetch_avatar):
# Not saving (as that generates another request):
account = AccountWithCredentialsFactory.build(user=None)
- result = account.updateUserFromTwitter()
+ result = account.update_user_from_twitter()
self.assertTrue(result["success"])
self.assertIsInstance(result["user"], User)
self.assertIsInstance(account.user, User)
self.assertEqual(account.user.screen_name, "philgyford")
self.assertEqual(1, len(responses.calls))
self.assertEqual(
- "%s/%s.json" % (self.api_url, "account/verify_credentials"),
+ f"{self.api_url}/account/verify_credentials.json",
responses.calls[0].request.url,
)
@@ -153,10 +150,10 @@ def test_update_user_returns_error_message(self):
# Not saving (as that generates another request):
account = AccountWithCredentialsFactory.build(user=None)
- result = account.updateUserFromTwitter()
+ result = account.update_user_from_twitter()
self.assertEqual(1, len(responses.calls))
self.assertEqual(
- "%s/%s.json" % (self.api_url, "account/verify_credentials"),
+ f"{self.api_url}/account/verify_credentials.json",
responses.calls[0].request.url,
)
self.assertFalse(result["success"])
@@ -195,7 +192,7 @@ def test_media_type(self):
def test_ordering(self):
"""Multiple accounts are sorted by time_created ascending"""
- time_now = datetime.utcnow().replace(tzinfo=timezone.utc)
+ time_now = datetime.now(tz=timezone.utc)
photo_1 = PhotoFactory(time_created=time_now - timedelta(minutes=1))
PhotoFactory(time_created=time_now)
photos = Media.objects.all()
@@ -414,7 +411,7 @@ def test_get_absolute_url(self):
def test_ordering(self):
"""Multiple tweets are sorted by post_time descending"""
- time_now = datetime.utcnow().replace(tzinfo=timezone.utc)
+ time_now = datetime.now(tz=timezone.utc)
TweetFactory(post_time=time_now - timedelta(minutes=1))
tweet_2 = TweetFactory(post_time=time_now)
tweets = Tweet.objects.all()
diff --git a/tests/twitter/test_templatetags.py b/tests/twitter/test_templatetags.py
index 8ec56a8..b39dbae 100644
--- a/tests/twitter/test_templatetags.py
+++ b/tests/twitter/test_templatetags.py
@@ -96,7 +96,7 @@ def setUp(self):
self.tweets_2 = TweetFactory.create_batch(2, user=user_2)
self.tweets_3 = TweetFactory.create_batch(2, user=user_3)
- post_time = datetime(2015, 3, 18, 12, 0, 0).replace(tzinfo=timezone.utc)
+ post_time = datetime(2015, 3, 18, 12, 0, 0, tzinfo=timezone.utc)
self.tweets_1[0].post_time = post_time
self.tweets_1[0].save()
self.tweets_2[1].post_time = post_time + timedelta(hours=1)
@@ -137,7 +137,7 @@ def setUp(self):
self.tweets[0].user.is_private = True
self.tweets[0].user.save()
- post_time = datetime(2015, 3, 18, 12, 0, 0).replace(tzinfo=timezone.utc)
+ post_time = datetime(2015, 3, 18, 12, 0, 0, tzinfo=timezone.utc)
self.tweets[0].post_time = post_time
self.tweets[0].save()
self.tweets[1].post_time = post_time + timedelta(hours=1)
diff --git a/tests/twitter/test_utils.py b/tests/twitter/test_utils.py
index e2e2111..c46a2c5 100644
--- a/tests/twitter/test_utils.py
+++ b/tests/twitter/test_utils.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import json
from django.test import TestCase
@@ -7,15 +6,13 @@
class HtmlifyTestCase(TestCase):
-
# Define in child classes.
api_fixture = None
- def getJson(self, fixture):
- json_file = open("%s%s" % ("tests/twitter/fixtures/api/", fixture))
- json_data = json.loads(json_file.read())
- json_file.close()
- return json_data
+ def get_json(self, fixture):
+ with open(f"tests/twitter/fixtures/api/{fixture}") as f:
+ json_data = json.loads(f.read())
+ return json_data
class HtmlifyDescriptionTestCase(HtmlifyTestCase):
@@ -24,7 +21,7 @@ class HtmlifyDescriptionTestCase(HtmlifyTestCase):
api_fixture = "user_with_description.json"
def test_htmlify_description(self):
- description_html = htmlify_description(self.getJson(self.api_fixture))
+ description_html = htmlify_description(self.get_json(self.api_fixture))
self.assertEqual(
description_html,
(
@@ -42,7 +39,7 @@ class HtmlifyTweetEntitiesTestCase(HtmlifyTestCase):
def test_links_urls(self):
"Makes 'urls' entities into clickable links."
api_fixture = "tweet_with_entities.json"
- tweet_html = htmlify_tweet(self.getJson(api_fixture))
+ tweet_html = htmlify_tweet(self.get_json(api_fixture))
self.assertTrue(
(
'with '
@@ -93,7 +90,7 @@ def test_links_users_with_entities(self):
def test_links_hashtags(self):
"Makes 'hashtags' entities into clickable #links."
api_fixture = "tweet_with_entities.json"
- tweet_html = htmlify_tweet(self.getJson(api_fixture))
+ tweet_html = htmlify_tweet(self.get_json(api_fixture))
self.assertTrue(
(
' "
@@ -126,7 +123,7 @@ def test_links_substringed_hashtags(self):
def test_links_symbols(self):
api_fixture = "tweet_with_symbols.json"
- tweet_html = htmlify_tweet(self.getJson(api_fixture))
+ tweet_html = htmlify_tweet(self.get_json(api_fixture))
self.assertEqual(
tweet_html,
(
@@ -141,11 +138,10 @@ def test_links_symbols(self):
class HtmlifyTweetTestCase(HtmlifyTestCase):
-
api_fixture = "tweet_with_entities.json"
def setUp(self):
- self.json_data = self.getJson(self.api_fixture)
+ self.json_data = self.get_json(self.api_fixture)
def test_linebreaks(self):
"Turns linebreaks into
s"
@@ -171,7 +167,7 @@ def test_handles_display_text_range_str(self):
It should be able to cope with that.
"""
api_fixture = "tweet_with_display_text_range_str.json"
- tweet_html = htmlify_tweet(self.getJson(api_fixture))
+ tweet_html = htmlify_tweet(self.get_json(api_fixture))
self.assertEqual(
tweet_html,
(
@@ -191,7 +187,7 @@ def test_handles_entities_indicies_str(self):
It should be able to cope with that.
"""
api_fixture = "tweet_with_entities_indices_str.json"
- tweet_html = htmlify_tweet(self.getJson(api_fixture))
+ tweet_html = htmlify_tweet(self.get_json(api_fixture))
self.assertEqual(
tweet_html,
(
@@ -212,7 +208,7 @@ def test_urls_in_archived_tweets(self):
tweets.
"""
api_fixture = "tweet_from_archive_2006.json"
- tweet_html = htmlify_tweet(self.getJson(api_fixture))
+ tweet_html = htmlify_tweet(self.get_json(api_fixture))
self.assertEqual(
tweet_html,
(
@@ -226,7 +222,7 @@ def test_urls_in_archived_tweets(self):
def test_urls_with_no_media(self):
"""The 'entities' element has no 'media'."""
api_fixture = "tweet_with_entities_2.json"
- tweet_html = htmlify_tweet(self.getJson(api_fixture))
+ tweet_html = htmlify_tweet(self.get_json(api_fixture))
self.assertTrue(
(
'created. = 3.2, < 3.3
- django41: Django >= 4.0, < 4.2
- django42: Django >= 4.1, < 4.3
+ django41: Django >= 4.1, < 4.2
+ django42: Django >= 4.2, < 4.3
+ django50: Django >= 5.0, < 5.1
djangomain: https://github.com/django/django/archive/master.tar.gz
extras =
{[base]extras}
@@ -50,8 +50,7 @@ commands =
coverage run --branch --source=ditto --omit=*/migrations/*.py {envbindir}/django-admin test {posargs:}
coverage report -m
-[testenv:flake8]
-basepython = python3
+[testenv:ruff]
skip_install = true
-deps = flake8
-commands = flake8 {posargs:ditto}
+deps = ruff
+commands = ruff check {posargs:--show-source .}