# Copyright (C) 2019-2024 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
import hashlib
from itertools import islice
import json
import logging
import os
import string
import sys
import tempfile
from typing import (
Any,
Dict,
Generic,
Iterator,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
TypeVar,
)
import attr
from requests.exceptions import ContentDecodingError
import sentry_sdk
from swh.core.tarball import uncompress
from swh.loader.core import __version__
from swh.loader.core.loader import BaseLoader
from swh.loader.core.utils import download
from swh.loader.exception import NotFound
from swh.model import discovery, from_disk
from swh.model.hashutil import hash_to_hex
from swh.model.model import (
ExtID,
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
Origin,
OriginVisit,
OriginVisitStatus,
RawExtrinsicMetadata,
Release,
ReleaseTargetType,
Revision,
Sha1Git,
Snapshot,
)
from swh.model.swhids import CoreSWHID, ExtendedObjectType, ExtendedSWHID, ObjectType
from swh.storage.algos import discovery as storage_discovery
from swh.storage.algos.snapshot import snapshot_get_latest
from swh.storage.interface import StorageInterface
from swh.storage.utils import now
logger = logging.getLogger(__name__)
SWH_METADATA_AUTHORITY = MetadataAuthority(
type=MetadataAuthorityType.REGISTRY,
url="https://softwareheritage.org/",
metadata={},
)
"""Metadata authority for extrinsic metadata generated by Software Heritage.
Used for metadata on "original artifacts", ie. length, filename, and checksums
of downloaded archive files."""
PartialExtID = Tuple[str, int, bytes]
"""The ``extid_type`` and ``extid`` fields of an :class:`ExtID` object."""
[docs]
@attr.s
class BasePackageInfo:
"""Compute the primary key for a dict using the id_keys as primary key
composite.
Args:
d: A dict entry to compute the primary key on
id_keys: Sequence of keys to use as primary key
Returns:
The identity for that dict entry
"""
url = attr.ib(type=str)
filename = attr.ib(type=Optional[str])
version = attr.ib(type=str)
"""Version name/number."""
MANIFEST_FORMAT: Optional[string.Template] = None
"""If not None, used by the default extid() implementation to format a manifest,
before hashing it to produce an ExtID."""
EXTID_TYPE: str = "package-manifest-sha256"
EXTID_VERSION: int = 0
# The following attribute has kw_only=True in order to allow subclasses
# to add attributes. Without kw_only, attributes without default values cannot
# go after attributes with default values.
# See <https://github.com/python-attrs/attrs/issues/38>
directory_extrinsic_metadata = attr.ib(
type=List[RawExtrinsicMetadataCore],
default=[],
kw_only=True,
)
""":term:`extrinsic metadata` collected by the loader, that will be attached to the
loaded directory and added to the Metadata storage."""
checksums = attr.ib(type=Dict[str, str], default={}, kw_only=True)
"""Dictionary holding package tarball checksums for integrity check after
download, keys are hash algorithm names and values are checksums in
hexadecimal format. The supported algorithms are defined in the
:data:`swh.model.hashutil.ALGORITHMS` set."""
# TODO: add support for metadata for releases and contents
[docs]
def extid(self) -> Optional[PartialExtID]:
"""Returns a unique intrinsic identifier of this package info,
or None if this package info is not 'deduplicatable' (meaning that
we will always load it, instead of checking the ExtID storage
to see if we already did)"""
if self.MANIFEST_FORMAT is None:
return None
else:
manifest = self.MANIFEST_FORMAT.substitute(
{k: str(v) for (k, v) in attr.asdict(self).items()}
)
return (
self.EXTID_TYPE,
self.EXTID_VERSION,
hashlib.sha256(manifest.encode()).digest(),
)
TPackageInfo = TypeVar("TPackageInfo", bound=BasePackageInfo)
[docs]
class PackageLoader(BaseLoader, Generic[TPackageInfo]):
def __init__(self, storage: StorageInterface, url: str, **kwargs: Any):
"""Loader's constructor. This raises exception if the minimal required
configuration is missing (cf. fn:`check` method).
Args:
storage: Storage instance
url: Origin url to load data from
"""
super().__init__(storage=storage, origin_url=url, **kwargs)
self.status_load = ""
self.status_visit = ""
[docs]
def load_status(self) -> Dict[str, str]:
"""Detailed loading status."""
return {
"status": self.status_load,
}
[docs]
def visit_status(self) -> str:
"""Detailed visit status."""
return self.status_visit
[docs]
def get_versions(self) -> Sequence[str]:
"""Return the list of all published package versions.
Raises:
class:`swh.loader.exception.NotFound` error when failing to read the
published package versions.
Returns:
Sequence of published versions
"""
return []
[docs]
def get_package_info(self, version: str) -> Iterator[Tuple[str, TPackageInfo]]:
"""Given a release version of a package, retrieve the associated
package information for such version.
Args:
version: Package version
Returns:
(branch name, package metadata)
"""
yield from {}
[docs]
def build_release(
self, p_info: TPackageInfo, uncompressed_path: str, directory: Sha1Git
) -> Optional[Release]:
"""Build the release from the archive metadata (extrinsic
artifact metadata) and the intrinsic metadata.
Args:
p_info: Package information
uncompressed_path: Artifact uncompressed path on disk
"""
raise NotImplementedError("build_release")
[docs]
def get_default_version(self) -> str:
"""Retrieve the latest release version if any.
Returns:
Latest version
"""
return ""
[docs]
def last_snapshot(self) -> Optional[Snapshot]:
"""Retrieve the last snapshot out of the last visit."""
return snapshot_get_latest(
self.storage,
self.origin.url,
visit_type=self.visit_type,
)
[docs]
def new_packageinfo_to_extid(self, p_info: TPackageInfo) -> Optional[PartialExtID]:
return p_info.extid()
def _get_known_extids(
self, packages_info: List[TPackageInfo]
) -> Dict[PartialExtID, List[CoreSWHID]]:
"""Compute the ExtIDs from new PackageInfo objects, searches which are already
loaded in the archive, and returns them if any."""
# Compute the ExtIDs of all the new packages, grouped by extid type
new_extids: Dict[Tuple[str, int], List[bytes]] = {}
for p_info in packages_info:
res = p_info.extid()
if res is not None:
(extid_type, extid_version, extid_extid) = res
new_extids.setdefault((extid_type, extid_version), []).append(
extid_extid
)
# For each extid type, call extid_get_from_extid() with all the extids of
# that type, and store them in the '(type, extid) -> target' map.
known_extids: Dict[PartialExtID, List[CoreSWHID]] = {}
for (extid_type, extid_version), extids in new_extids.items():
for extid in self.storage.extid_get_from_extid(
extid_type, extids, version=extid_version
):
if extid is not None:
key = (extid.extid_type, extid_version, extid.extid)
known_extids.setdefault(key, []).append(extid.target)
return known_extids
[docs]
def resolve_object_from_extids(
self,
known_extids: Dict[PartialExtID, List[CoreSWHID]],
p_info: TPackageInfo,
whitelist: Set[Sha1Git],
) -> Optional[CoreSWHID]:
"""Resolve the revision/release from known ExtIDs and a package info object.
If the artifact has already been downloaded, this will return the
existing release (or revision) targeting that uncompressed artifact directory.
Otherwise, this returns None.
Args:
known_extids: Dict built from a list of ExtID, with the target as value
p_info: Package information
whitelist: Any ExtID with target not in this set is filtered out
Returns:
None or release/revision SWHID
"""
new_extid = p_info.extid()
if new_extid is None:
return None
extid_targets = set()
for extid_target in known_extids.get(new_extid, []):
if extid_target.object_id not in whitelist:
# There is a known ExtID for this package, but its target is not
# in the snapshot.
# This can happen for three reasons:
#
# 1. a loader crashed after writing the ExtID, but before writing
# the snapshot
# 2. some other loader loaded the same artifact, but produced
# a different revision, causing an additional ExtID object
# to be written. We will probably find this loader's ExtID
# in a future iteration of this loop.
# Note that for now, this is impossible, as each loader has a
# completely different extid_type, but this is an implementation
# detail of each loader.
# 3. we took a snapshot, then the package disappeared,
# then we took another snapshot, and the package reappeared
#
# In case of 1, we must actually load the package now,
# so let's do it.
# TODO: detect when we are in case 3 using release_missing
# or revision_missing instead of the snapshot.
continue
elif extid_target.object_type in (ObjectType.RELEASE, ObjectType.REVISION):
extid_targets.add(extid_target)
else:
# Note that this case should never be reached unless there is a
# collision between a revision hash and some non-revision object's
# hash, but better safe than sorry.
logger.warning(
"%s is in the whitelist, but is not a revision/release.",
hash_to_hex(extid_target.object_type),
)
if extid_targets:
# This is a known package version, as we have an extid to reference it.
# Let's return one of them.
# If there is a release extid, return it.
release_extid_targets = {
extid_target
for extid_target in extid_targets
if extid_target.object_type == ObjectType.RELEASE
}
# Exclude missing targets
missing_releases = {
CoreSWHID(object_type=ObjectType.RELEASE, object_id=id_)
for id_ in self.storage.release_missing(
[swhid.object_id for swhid in release_extid_targets]
)
}
if missing_releases:
err_message = "Found ExtIDs pointing to missing releases"
logger.error(err_message + ": %s", missing_releases)
with sentry_sdk.push_scope() as scope:
scope.set_extra(
"missing_releases", [str(x) for x in missing_releases]
)
sentry_sdk.capture_message(err_message, "error")
release_extid_targets -= missing_releases
extid_target2 = self.select_extid_target(p_info, release_extid_targets)
if extid_target2:
return extid_target2
# If there is no release extid (ie. if the package was only loaded with
# older versions of this loader, which produced revision objects instead
# of releases), return a revision extid when possible.
revision_extid_targets = {
extid_target
for extid_target in extid_targets
if extid_target.object_type == ObjectType.REVISION
}
if revision_extid_targets:
assert len(extid_targets) == 1, extid_targets
extid_target = list(extid_targets)[0]
return extid_target
# No target found (this is probably a new package version)
return None
[docs]
def select_extid_target(
self, p_info: TPackageInfo, extid_targets: Set[CoreSWHID]
) -> Optional[CoreSWHID]:
"""Given a list of release extid targets, choses one appropriate for the
given package info.
Package loaders shyould implement this if their ExtIDs may map to multiple
releases, so they can fetch releases from the storage and inspect their fields
to select the right one for this ``p_info``.
"""
if extid_targets:
# The base package loader does not have the domain-specific knowledge
# to select the right release -> crash if there is more than one.
assert len(extid_targets) == 1, extid_targets
return list(extid_targets)[0]
return None
[docs]
def download_package(
self, p_info: TPackageInfo, tmpdir: str
) -> List[Tuple[str, Mapping]]:
"""Download artifacts for a specific package. All downloads happen in
in the tmpdir folder.
Default implementation expects the artifacts package info to be
about one artifact per package.
Note that most implementation have 1 artifact per package. But some
implementation have multiple artifacts per package (debian), some have
none, the package is the artifact (gnu).
Args:
artifacts_package_info: Information on the package artifacts to
download (url, filename, etc...)
tmpdir: Location to retrieve such artifacts
Returns:
List of (path, computed hashes)
"""
try:
return [
download(
p_info.url,
dest=tmpdir,
filename=p_info.filename,
hashes=p_info.checksums,
)
]
except ContentDecodingError:
# package might be erroneously marked as gzip compressed while is is not,
# try to download its raw bytes again without attempting to uncompress
# the input stream
return [
download(
p_info.url,
dest=tmpdir,
filename=p_info.filename,
hashes=p_info.checksums,
extra_request_headers={"Accept-Encoding": "identity"},
)
]
[docs]
def uncompress(
self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], dest: str
) -> str:
"""Uncompress the artifact(s) in the destination folder dest.
Optionally, this could need to use the p_info dict for some more
information (debian).
"""
uncompressed_path = os.path.join(dest, "src")
for a_path, _ in dl_artifacts:
uncompress(a_path, dest=uncompressed_path)
return uncompressed_path
[docs]
def finalize_visit(
self,
*,
snapshot: Optional[Snapshot],
visit: OriginVisit,
status_visit: str,
status_load: str,
failed_branches: List[str],
errors: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Finalize the visit:
- flush eventual unflushed data to storage
- update origin visit's status
- return the task's status
"""
self.status_load = status_load
self.status_visit = status_visit
self.storage.flush()
snapshot_id: Optional[bytes] = None
if snapshot and snapshot.id: # to prevent the snapshot.id to b""
snapshot_id = snapshot.id
assert visit.visit
visit_status = OriginVisitStatus(
origin=self.origin.url,
visit=visit.visit,
type=self.visit_type,
date=now(),
status=status_visit,
snapshot=snapshot_id,
)
self.storage.origin_visit_status_add([visit_status])
result: Dict[str, Any] = {
"status": status_load,
}
if snapshot_id:
result["snapshot_id"] = hash_to_hex(snapshot_id)
if failed_branches:
logger.warning("%d failed branches", len(failed_branches))
for i, urls in enumerate(islice(failed_branches, 50)):
prefix_url = "Failed branches: " if i == 0 else ""
logger.warning("%s%s", prefix_url, urls)
return result
[docs]
def load(self) -> Dict:
"""Load for a specific origin the associated contents.
1. Get the list of versions in an origin.
2. Get the snapshot from the previous run of the loader,
and filter out versions that were already loaded, if their
:term:`extids <extid>` match
Then, for each remaining version in the origin
3. Fetch the files for one package version By default, this can be
implemented as a simple HTTP request. Loaders with more specific
requirements can override this, e.g.: the PyPI loader checks the
integrity of the downloaded files; the Debian loader has to download
and check several files for one package version.
4. Extract the downloaded files. By default, this would be a universal
archive/tarball extraction.
Loaders for specific formats can override this method (for instance,
the Debian loader uses dpkg-source -x).
5. Convert the extracted directory to a set of Software Heritage
objects Using swh.model.from_disk.
6. Extract the metadata from the unpacked directories This would only
be applicable for "smart" loaders like npm (parsing the
package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing
debian/changelog and debian/control).
On "minimal-metadata" sources such as the GNU archive, the lister
should provide the minimal set of metadata needed to populate the
revision/release objects (authors, dates) as an argument to the
task.
7. Generate the revision/release objects for the given version. From
the data generated at steps 3 and 4.
end for each
8. Generate and load the snapshot for the visit
Using the revisions/releases collected at step 7., and the branch
information from step 2., generate a snapshot and load it into the
Software Heritage archive
"""
self.status_load = "uneventful" # either: eventful, uneventful, failed
self.status_visit = "full" # see swh.model.model.OriginVisitStatus
snapshot = None
failed_branches: List[str] = []
# Prepare origin and origin_visit
origin = Origin(url=self.origin.url)
try:
self.storage.origin_add([origin])
visit = list(
self.storage.origin_visit_add(
[
OriginVisit(
origin=self.origin.url,
date=self.visit_date,
type=self.visit_type,
)
]
)
)[0]
except Exception as e:
logger.exception(
"Failed to initialize origin_visit for %s", self.origin.url
)
sentry_sdk.capture_exception(e)
self.status_load = self.status_visit = "failed"
return {"status": "failed"}
# Get the previous snapshot for this origin. It is then used to see which
# of the package's versions are already loaded in the archive.
try:
last_snapshot = self.last_snapshot()
logger.debug("last snapshot: %s", last_snapshot)
except Exception as e:
logger.exception("Failed to get previous state for %s", self.origin.url)
sentry_sdk.capture_exception(e)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
errors=[str(e)],
)
load_exceptions: List[Exception] = []
# Get the list of all version names
try:
versions = self.get_versions()
except NotFound as e:
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="not_found",
status_load="failed",
errors=[str(e)],
)
except Exception as e:
logger.exception("Failed to get list of versions for %s", self.origin.url)
sentry_sdk.capture_exception(e)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
errors=[str(e)],
)
errors = []
# Get the metadata of each version's package
packages_info: List[Tuple[str, TPackageInfo]] = []
for version in versions:
try:
for branch_name, p_info in self.get_package_info(version):
packages_info.append((branch_name, p_info))
except Exception as e:
load_exceptions.append(e)
sentry_sdk.capture_exception(e)
error = f"Failed to get package info for version {version} of {self.origin.url}"
logger.exception(error)
errors.append(f"{error}: {e}")
# Compute the ExtID of each of these packages
known_extids = self._get_known_extids([p_info for (_, p_info) in packages_info])
if last_snapshot is None:
last_snapshot_targets: Set[Sha1Git] = set()
else:
last_snapshot_targets = {
branch.target
for branch in last_snapshot.branches.values()
if branch is not None
}
new_extids: Set[ExtID] = set()
extid_to_swhid: Dict[Optional[PartialExtID], CoreSWHID] = {}
tmp_releases: Dict[str, List[Tuple[str, Sha1Git]]] = {
version: [] for version in versions
}
for branch_name, p_info in packages_info:
logger.debug("package_info: %s", p_info)
# Check if the package was already loaded, using its ExtID
swhid = self.resolve_object_from_extids(
known_extids, p_info, last_snapshot_targets
) or extid_to_swhid.get(p_info.extid())
if swhid is not None and swhid.object_type == ObjectType.REVISION:
# This package was already loaded, but by an older version
# of this loader, which produced revisions instead of releases.
# Let's fetch the revision's data, and "upgrade" it into a release.
(rev,) = self.storage.revision_get([swhid.object_id])
if not rev:
logger.error(
"Failed to upgrade branch %s from revision to "
"release, %s is missing from the storage. "
"Falling back to re-loading from the origin.",
branch_name,
swhid,
)
else:
rev = None
if swhid is None or (swhid.object_type == ObjectType.REVISION and not rev):
# No matching revision or release found in the last snapshot, load it.
release_id = None
try:
res = self._load_release(p_info, origin)
if res:
(release_id, directory_id) = res
assert release_id
assert directory_id
self._load_extrinsic_directory_metadata(
p_info, release_id, directory_id
)
self.storage.flush()
self.status_load = "eventful"
except Exception as e:
self.storage.clear_buffers()
load_exceptions.append(e)
sentry_sdk.capture_exception(e)
error = f"Failed to load branch {branch_name} for {self.origin.url}"
logger.exception(error)
failed_branches.append(branch_name)
errors.append(f"{error}: {e}")
continue
if release_id is None:
continue
add_extid = True
elif swhid.object_type == ObjectType.REVISION:
# If 'rev' was None, the previous block would have run.
assert rev is not None
rel = rev2rel(rev, p_info.version)
self.storage.release_add([rel])
logger.debug("Upgraded %s to %s", swhid, rel.swhid())
release_id = rel.id
# Create a new extid for this package, so the next run of this loader
# will be able to find the new release, and use it (instead of the
# old revision)
add_extid = True
elif swhid.object_type == ObjectType.RELEASE:
# This package was already loaded, nothing to do.
release_id = swhid.object_id
add_extid = False
else:
assert False, f"Unexpected object type: {swhid}"
assert release_id is not None
if add_extid:
partial_extid = p_info.extid()
if partial_extid is not None:
(extid_type, extid_version, extid) = partial_extid
release_swhid = CoreSWHID(
object_type=ObjectType.RELEASE, object_id=release_id
)
extid_to_swhid[partial_extid] = release_swhid
new_extids.add(
ExtID(
extid_type=extid_type,
extid_version=extid_version,
extid=extid,
target=release_swhid,
)
)
tmp_releases[p_info.version].append((branch_name, release_id))
if load_exceptions:
self.status_visit = "partial"
if not tmp_releases:
# We could not load any releases; fail completely
logger.error("Failed to load any release for %s", self.origin.url)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit="failed",
status_load="failed",
errors=errors,
)
try:
# Retrieve the default release version (the "latest" one)
default_version = self.get_default_version()
logger.debug("default version: %s", default_version)
# Retrieve extra branches
extra_branches = self.extra_branches()
logger.debug("extra branches: %s", extra_branches)
snapshot = self._load_snapshot(
default_version, tmp_releases, extra_branches
)
self.storage.flush()
except Exception as e:
error = f"Failed to build snapshot for origin {self.origin.url}"
logger.exception(error)
errors.append(f"{error}: {e}")
sentry_sdk.capture_exception(e)
self.status_visit = "failed"
self.status_load = "failed"
if snapshot:
try:
metadata_objects = self.build_extrinsic_snapshot_metadata(snapshot.id)
self.load_metadata_objects(metadata_objects)
except Exception as e:
error = (
f"Failed to load extrinsic snapshot metadata for {self.origin.url}"
)
logger.exception(error)
errors.append(f"{error}: {e}")
sentry_sdk.capture_exception(e)
self.status_visit = "partial"
self.status_load = "failed"
try:
metadata_objects = self.build_extrinsic_origin_metadata()
self.load_metadata_objects(metadata_objects)
except Exception as e:
error = f"Failed to load extrinsic origin metadata for {self.origin.url}"
logger.exception(error)
errors.append(f"{error}: {e}")
sentry_sdk.capture_exception(e)
self.status_visit = "partial"
self.status_load = "failed"
if self.status_load != "failed":
self._load_extids(new_extids)
return self.finalize_visit(
snapshot=snapshot,
visit=visit,
failed_branches=failed_branches,
status_visit=self.status_visit,
status_load=self.status_load,
errors=errors,
)
def _load_directory(
self, dl_artifacts: List[Tuple[str, Mapping[str, Any]]], tmpdir: str
) -> Tuple[str, from_disk.Directory]:
uncompressed_path = self.uncompress(dl_artifacts, dest=tmpdir)
logger.debug("uncompressed_path: %s", uncompressed_path)
directory = from_disk.Directory.from_disk(
path=uncompressed_path.encode("utf-8"),
max_content_length=self.max_content_size,
)
contents, skipped_contents, directories = from_disk.iter_directory(directory)
# Instead of sending everything from the bottom up to the storage,
# use a Merkle graph discovery algorithm to filter out known objects.
contents, skipped_contents, directories = discovery.filter_known_objects(
storage_discovery.DiscoveryStorageConnection(
contents, skipped_contents, directories, self.storage
),
)
logger.debug("Number of skipped contents: %s", len(skipped_contents))
self.storage.skipped_content_add(skipped_contents)
logger.debug("Number of contents: %s", len(contents))
self.storage.content_add(contents)
logger.debug("Number of directories: %s", len(directories))
self.storage.directory_add(directories)
return (uncompressed_path, directory)
def _load_release(
self, p_info: TPackageInfo, origin
) -> Optional[Tuple[Sha1Git, Sha1Git]]:
"""Does all the loading of a release itself:
* downloads a package and uncompresses it
* loads it from disk
* adds contents, directories, and release to self.storage
* returns (release_id, directory_id)
Raises
exception when unable to download or uncompress artifacts
"""
if not p_info.url:
raise ValueError("No url for package!")
with tempfile.TemporaryDirectory() as tmpdir:
dl_artifacts = self.download_package(p_info, tmpdir)
(uncompressed_path, directory) = self._load_directory(dl_artifacts, tmpdir)
# FIXME: This should be release. cf. D409
release = self.build_release(
p_info, uncompressed_path, directory=directory.hash
)
if not release:
# Some artifacts are missing intrinsic metadata
# skipping those
return None
metadata = [metadata for (filepath, metadata) in dl_artifacts]
assert release.target is not None, release
assert release.target_type == ReleaseTargetType.DIRECTORY, release
metadata_target = ExtendedSWHID(
object_type=ExtendedObjectType.DIRECTORY, object_id=release.target
)
original_artifact_metadata = RawExtrinsicMetadata(
target=metadata_target,
discovery_date=self.visit_date,
authority=SWH_METADATA_AUTHORITY,
fetcher=self.get_metadata_fetcher(),
format="original-artifacts-json",
metadata=json.dumps(metadata).encode(),
origin=self.origin.url,
release=release.swhid(),
)
self.load_metadata_objects([original_artifact_metadata])
logger.debug("Release: %s", release)
self.storage.release_add([release])
assert directory.hash
return (release.id, directory.hash)
def _load_snapshot(
self,
default_version: str,
releases: Dict[str, List[Tuple[str, bytes]]],
extra_branches: Dict[bytes, Mapping[str, Any]],
) -> Optional[Snapshot]:
"""Build snapshot out of the current releases stored and extra branches.
Then load it in the storage.
"""
logger.debug("releases: %s", releases)
# Build and load the snapshot
branches = {} # type: Dict[bytes, Mapping[str, Any]]
for version, branch_name_releases in releases.items():
if version == default_version and len(branch_name_releases) == 1:
# only 1 branch (no ambiguity), we can create an alias
# branch 'HEAD'
branch_name, _ = branch_name_releases[0]
# except for some corner case (deposit)
if branch_name != "HEAD":
branches[b"HEAD"] = {
"target_type": "alias",
"target": branch_name.encode("utf-8"),
}
for branch_name, target in branch_name_releases:
branches[branch_name.encode("utf-8")] = {
"target_type": "release",
"target": target,
}
# Deal with extra-branches
for name, branch_target in extra_branches.items():
if name in branches:
error_message = f"Extra branch '{name!r}' has been ignored"
logger.error(error_message)
sentry_sdk.capture_message(error_message, "error")
else:
branches[name] = branch_target
snapshot_data = {"branches": branches}
logger.debug("snapshot: %s", snapshot_data)
snapshot = Snapshot.from_dict(snapshot_data)
logger.debug("snapshot: %s", snapshot)
self.storage.snapshot_add([snapshot])
return snapshot
[docs]
def get_loader_name(self) -> str:
"""Returns a fully qualified name of this loader."""
return f"{self.__class__.__module__}.{self.__class__.__name__}"
[docs]
def get_loader_version(self) -> str:
"""Returns the version of the current loader."""
module_name = self.__class__.__module__ or ""
module_name_parts = module_name.split(".")
# Iterate rootward through the package hierarchy until we find a parent of this
# loader's module with a __version__ attribute.
for prefix_size in range(len(module_name_parts), 0, -1):
package_name = ".".join(module_name_parts[0:prefix_size])
module = sys.modules[package_name]
if hasattr(module, "__version__"):
return module.__version__
if module_name.startswith("swh.loader"):
# there is not explicitly defined version of the loader but it's
# one of the swh.loader.core ones, so return the known __version__
# of this later
return __version__
# If this loader's class has no parent package with a __version__,
# it should implement it itself.
raise NotImplementedError(
f"Could not dynamically find the version of {self.get_loader_name()}."
)
def _load_extrinsic_directory_metadata(
self,
p_info: TPackageInfo,
release_id: Sha1Git,
directory_id: Sha1Git,
) -> None:
metadata_objects = self.build_extrinsic_directory_metadata(
p_info, release_id, directory_id
)
self.load_metadata_objects(metadata_objects)
def _load_extids(self, extids: Set[ExtID]) -> None:
if not extids:
return
try:
self.storage.extid_add(list(extids))
except Exception as e:
logger.exception("Failed to load new ExtIDs for %s", self.origin.url)
sentry_sdk.capture_exception(e)
# No big deal, it just means the next visit will load the same versions
# again.
[docs]
def rev2rel(rev: Revision, version: str) -> Release:
"""Converts a revision to a release."""
message = rev.message
if message and not message.endswith(b"\n"):
message += b"\n"
return Release(
name=version.encode(),
message=message,
target=rev.directory,
target_type=ReleaseTargetType.DIRECTORY,
synthetic=rev.synthetic,
author=rev.author,
date=rev.date,
)