Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pylint anitya/lib/backends files #1637

Merged
merged 1 commit into from
Jun 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 36 additions & 38 deletions anitya/lib/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import socket

# sre_constants contains re exceptions
import sre_constants
import sre_constants # pylint: disable=W4901
import urllib.request as urllib
from datetime import timedelta
from typing import List
Expand All @@ -43,8 +43,8 @@

# Default headers for requests
REQUEST_HEADERS = {
"User-Agent": "Anitya %s at release-monitoring.org"
% pkg_resources.get_distribution("anitya").version,
"User-Agent": f"Anitya {pkg_resources.get_distribution('anitya').version} "
"at release-monitoring.org",
"From": anitya_config.get("ADMIN_EMAIL"),
"If-modified-since": arrow.Arrow(1970, 1, 1).format("ddd, DD MMM YYYY HH:mm:ss")
+ " GMT",
Expand Down Expand Up @@ -88,7 +88,7 @@ class BaseBackend(object):
check_interval = timedelta(hours=1)

@classmethod
def expand_subdirs(self, url, last_change=None, glob_char="*"):
def expand_subdirs(cls, url, last_change=None, glob_char="*"):
"""Expand dirs containing ``glob_char`` in the given URL with the latest
Example URL: ``https://www.example.com/foo/*/``

Expand All @@ -99,7 +99,7 @@ def expand_subdirs(self, url, last_change=None, glob_char="*"):
`cnucnu <https://fedorapeople.org/cgit/till/public_git/cnucnu.git/>`_

"""
glob_pattern = "/([^/]*%s[^/]*)/" % re.escape(glob_char)
glob_pattern = f"/([^/]*{re.escape(glob_char)}[^/]*)/"
glob_match = re.search(glob_pattern, url)
if not glob_match:
return url
Expand All @@ -115,7 +115,7 @@ def expand_subdirs(self, url, last_change=None, glob_char="*"):
text_regex = re.compile(r"^d.+\s(\S+)\s*$", re.I | re.M)

if url_prefix != "":
resp = self.call_url(url_prefix, last_change=last_change)
resp = cls.call_url(url_prefix, last_change=last_change)
# When FTP server is called, Response object is not created
# and we get binary string instead
try:
Expand All @@ -135,8 +135,8 @@ def expand_subdirs(self, url, last_change=None, glob_char="*"):
sorted_subdirs = sorted([RpmVersion(s) for s in subdirs])
latest = sorted_subdirs[-1].version

url = "%s%s/%s" % (url_prefix, latest, url_suffix)
return self.expand_subdirs(url, glob_char)
url = f"{url_prefix}{latest}/{url_suffix}"
return cls.expand_subdirs(url, glob_char)
return url

@classmethod
Expand Down Expand Up @@ -174,7 +174,7 @@ def get_version_url(cls, project): # pragma: no cover
pass

@classmethod
def get_versions(self, project): # pragma: no cover
def get_versions(cls, project): # pragma: no cover
"""Method called to retrieve all the versions (that can be found)
of the projects provided, project that relies on the backend of
this plugin.
Expand All @@ -197,7 +197,7 @@ def get_versions(self, project): # pragma: no cover
pass

@classmethod
def check_feed(self):
def check_feed(cls):
"""Method called to retrieve the latest uploads to a given backend,
via, for example, RSS or an API.

Expand All @@ -219,7 +219,7 @@ def check_feed(self):
raise NotImplementedError()

@classmethod
def get_ordered_versions(self, project):
def get_ordered_versions(cls, project):
"""Method called to retrieve all the versions (that can be found)
of the projects provided, ordered from the oldest to the newest.

Expand All @@ -236,12 +236,12 @@ def get_ordered_versions(self, project):
when the versions cannot be retrieved correctly

"""
vlist = self.get_versions(project)
vlist = cls.get_versions(project)
sorted_versions = project.create_version_objects(vlist)
return [v.version for v in sorted_versions]

@classmethod
def _filter_versions(self, version, filter_list):
def _filter_versions(cls, version, filter_list):
"""
Method used to call as argument of Python filter function.

Expand All @@ -258,7 +258,7 @@ def _filter_versions(self, version, filter_list):
return False

@classmethod
def filter_versions(self, versions, filter_string):
def filter_versions(cls, versions, filter_string):
"""Method called to filter versions list by filter_string.
Filter string is first parsed by delimiter and then applied on list of versions.
For example: list of versions ["1.0.0", "1.0.0-alpha", "1.0.0-beta"]
Expand All @@ -272,23 +272,21 @@ def filter_versions(self, versions, filter_string):
Returns:
:obj:`list`: A list of filtered versions.
"""
_log.debug(
"Filtering versions '{}' by filter '{}'".format(versions, filter_string)
)
_log.debug("Filtering versions '%s' by filter '%s'", versions, filter_string)
filtered_versions = versions
if filter_string:
filter_list = filter_string.split(";")
filtered_versions = [
version
for version in versions
if not self._filter_versions(version, filter_list)
if not cls._filter_versions(version, filter_list)
]

_log.debug("Filtered versions '{}'".format(filtered_versions))
_log.debug("Filtered versions '%s'", filtered_versions)
return filtered_versions

@classmethod
def call_url(self, url, last_change=None, insecure=False):
def call_url(cls, url, last_change=None, insecure=False):
"""Dedicated method to query a URL.

It is important to use this method as it allows to query them with
Expand Down Expand Up @@ -317,7 +315,7 @@ def call_url(self, url, last_change=None, insecure=False):
last_change.format("ddd, DD MMM YYYY HH:mm:ss") + " GMT"
)
if "*" in url:
url = self.expand_subdirs(url, last_change)
url = cls.expand_subdirs(url, last_change) # pragma: no cover

if url.startswith("ftp://") or url.startswith("ftps://"):
socket.setdefaulttimeout(30)
Expand All @@ -327,16 +325,16 @@ def call_url(self, url, last_change=None, insecure=False):
req.add_header("From", headers["From"])
try:
# Ignore this bandit issue, the url is checked above
resp = urllib.urlopen(req) # nosec
resp = urllib.urlopen(req) # nosec # pylint: disable=R1732
content = resp.read().decode()
except URLError as e:
raise AnityaPluginException(
'Could not call "%s" with error: %s' % (url, e.reason)
)
except UnicodeDecodeError:
f'Could not call "{url}" with error: {e.reason}'
) from e
except UnicodeDecodeError as e:
raise AnityaPluginException(
"FTP response cannot be decoded with UTF-8: %s" % url
)
f"FTP response cannot be decoded with UTF-8: {url}"
) from e

return content

Expand Down Expand Up @@ -371,11 +369,10 @@ def get_versions_by_regex(url, regex, project, insecure=False):
try:
req = BaseBackend.call_url(url, last_change=last_change, insecure=insecure)
except Exception as err:
_log.debug("%s ERROR: %s" % (project.name, str(err)))
_log.debug("%s ERROR: %s", project.name, str(err))
raise AnityaPluginException(
'Could not call : "%s" of "%s", with error: %s'
% (url, project.name, str(err))
)
f'Could not call : "{url}" of "{project.name}", with error: {str(err)}'
) from err

if not isinstance(req, six.string_types):
# Not modified
Expand All @@ -394,25 +391,26 @@ def get_versions_by_regex_for_text(text, url, regex, project):

try:
upstream_versions = list(set(re.findall(regex, text)))
except sre_constants.error: # pragma: no cover
raise AnityaPluginException("%s: invalid regular expression" % project.name)
except sre_constants.error as err: # pragma: no cover
raise AnityaPluginException(
f"{project.name}: invalid regular expression"
) from err

for index, version in enumerate(upstream_versions):
# If the version retrieved is a tuple, re-constitute it
if type(version) == tuple:
if isinstance(version, tuple):
version = ".".join([v for v in version if not v == ""])

upstream_versions[index] = version

if " " in version:
raise AnityaPluginException(
"%s: invalid upstream version:>%s< - %s - %s "
% (project.name, version, url, regex)
f"{project.name}: invalid upstream version:>{version}< - {url} "
f"- {regex} "
)
if len(upstream_versions) == 0:
raise AnityaPluginException(
"%(name)s: no upstream version found. - %(url)s - "
"%(regex)s" % {"name": project.name, "url": url, "regex": regex}
f"{project.name}: no upstream version found. - {url} - {regex}"
)
# Filter retrieved versions
filtered_versions = BaseBackend.filter_versions(
Expand Down
23 changes: 20 additions & 3 deletions anitya/lib/backends/bitbucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ def get_version_url(cls, project):
Returns:
str: url used for version checking
"""
url_template = "https://bitbucket.org/%(version_url)s/" "downloads?tab=tags"
url = ""
if project.version_url:
url = project.version_url.replace("https://bitbucket.org/", "")
Expand All @@ -50,7 +49,7 @@ def get_version_url(cls, project):
url = url[:-1]

if url:
url = url_template % {"version_url": url}
url = f"https://bitbucket.org/{url}/downloads?tab=tags"

return url

Expand All @@ -72,7 +71,25 @@ def get_versions(cls, project):
url = cls.get_version_url(project)
if not url:
raise AnityaPluginException(
"Project %s was incorrectly set up." % project.name
f"Project {project.name} was incorrectly set up."
)

return get_versions_by_regex(url, REGEX, project)

@classmethod
def check_feed(cls): # pragma: no cover
"""Method called to retrieve the latest uploads to a given backend,
via, for example, RSS or an API.

Not Supported

Returns:
:obj:`list`: A list of 4-tuples, containing the project name, homepage, the
backend, and the version.

Raises:
NotImplementedError: If backend does not
support batch updates.

"""
raise NotImplementedError()
2 changes: 1 addition & 1 deletion anitya/lib/backends/cgit.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def get_versions(cls, project):
return get_versions_by_regex(url, regex, project, url.startswith("http://"))

@classmethod
def check_feed(cls):
def check_feed(cls): # pragma: no cover
"""Method called to retrieve the latest uploads to a given backend,
via, for example, RSS or an API.

Expand Down
41 changes: 19 additions & 22 deletions anitya/lib/backends/cran.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,31 +70,30 @@ def get_version(cls, project):
format.

"""
url = "https://crandb.r-pkg.org/{name}".format(name=project.name)
url = f"https://crandb.r-pkg.org/{project.name}"

last_change = project.get_time_last_created_version()
try:
response = cls.call_url(url, last_change=last_change)
except requests.RequestException as e: # pragma: no cover
raise AnityaPluginException("Could not contact {}: {}".format(url, str(e)))
raise AnityaPluginException(f"Could not contact {url}: {str(e)}") from e

if response.status_code != 200:
# Not modified
if response.status_code == 304:
return None
raise AnityaPluginException(
"Failed to download from {}: {} {}".format(
url, response.status_code, response.reason
)
f"Failed to download from {url}: {response.status_code} "
f"{response.reason}"
)

try:
data = response.json()
except json.JSONDecodeError: # pragma: no cover
raise AnityaPluginException("No JSON returned by {}".format(url))
except json.JSONDecodeError as e: # pragma: no cover
raise AnityaPluginException(f"No JSON returned by {url}") from e

if "error" in data or "Version" not in data: # pragma: no cover
raise AnityaPluginException("No versions found at {}".format(url))
raise AnityaPluginException(f"No versions found at {url}")

return data["Version"]

Expand All @@ -110,7 +109,7 @@ def get_version_url(cls, project):
Returns:
str: url used for version checking
"""
url = "https://crandb.r-pkg.org/{name}/all".format(name=project.name)
url = f"https://crandb.r-pkg.org/{project.name}/all"

return url

Expand All @@ -136,25 +135,24 @@ def get_versions(cls, project):
try:
response = cls.call_url(url, last_change=last_change)
except requests.RequestException as e: # pragma: no cover
raise AnityaPluginException("Could not contact {}: {}".format(url, str(e)))
raise AnityaPluginException(f"Could not contact {url}: {str(e)}") from e

if response.status_code != 200:
# Not modified
if response.status_code == 304:
return []
raise AnityaPluginException(
"Failed to download from {}: {} {}".format(
url, response.status_code, response.reason
)
f"Failed to download from {url}: {response.status_code} "
f"{response.reason}"
)

try:
data = response.json()
except json.JSONDecodeError: # pragma: no cover
raise AnityaPluginException("No JSON returned by {}".format(url))
except json.JSONDecodeError as e: # pragma: no cover
raise AnityaPluginException(f"No JSON returned by {url}") from e

if "error" in data or "versions" not in data: # pragma: no cover
raise AnityaPluginException("No versions found at {}".format(url))
raise AnityaPluginException(f"No versions found at {url}")

filtered_versions = cls.filter_versions(
list(data["versions"].keys()), project.version_filter
Expand Down Expand Up @@ -184,19 +182,18 @@ def check_feed(cls):
try:
response = cls.call_url(url)
except requests.RequestException as e: # pragma: no cover
raise AnityaPluginException("Could not contact {}: {}".format(url, str(e)))
raise AnityaPluginException(f"Could not contact {url}: {str(e)}") from e

if response.status_code != 200: # pragma: no cover
raise AnityaPluginException(
"Failed to download from {}: {} {}".format(
url, response.status_code, response.reason
)
f"Failed to download from {url}: {response.status_code} "
f"{response.reason}"
)

try:
data = response.json()
except json.JSONDecodeError: # pragma: no cover
raise AnityaPluginException("No JSON returned by {}".format(url))
except json.JSONDecodeError as err: # pragma: no cover
raise AnityaPluginException(f"No JSON returned by {url}") from err

for item in data:
name = item["name"]
Expand Down
Loading