Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 28 additions & 22 deletions src/launchpad/artifact_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,17 +101,10 @@ def process_message(
logger.info(f"Skipping processing for project {project_id}")
return

artifact_type = "unknown"
with contextlib.ExitStack() as stack:
stack.enter_context(request_context())
stack.enter_context(
statsd.timed(
"artifact.processing.duration",
tags=[
f"project_id:{project_id}",
f"organization_id:{organization_id}",
],
)
)
processing_start = time.monotonic()
scope = stack.enter_context(sentry_sdk.new_scope())
scope.set_tag("launchpad.project_id", project_id)
scope.set_tag("launchpad.organization_id", organization_id)
Expand All @@ -120,7 +113,7 @@ def process_message(
statsd.increment("artifact.processing.started")
logger.info(f"Processing artifact {artifact_id} (project: {project_id}, org: {organization_id})")
try:
artifact_processor.process_artifact(organization_id, project_id, artifact_id)
artifact_type = artifact_processor.process_artifact(organization_id, project_id, artifact_id)
except Exception:
statsd.increment("artifact.processing.failed")
duration = time.time() - start_time
Expand All @@ -133,14 +126,24 @@ def process_message(
logger.info(
f"Processing complete for artifact {artifact_id} (project: {project_id}, org: {organization_id}) in {duration:.2f}s"
)
finally:
statsd.timing(
"artifact.processing.duration",
time.monotonic() - processing_start,
tags=[
f"project_id:{project_id}",
f"organization_id:{organization_id}",
f"artifact_type:{artifact_type}",
],
)

def process_artifact(
self,
organization_id: str,
project_id: str,
artifact_id: str,
) -> None:
"""Process an artifact with the requested features."""
) -> str:
"""Process an artifact with the requested features. Returns the artifact type string."""
dequeued_at = datetime.now()

with contextlib.ExitStack() as stack:
Expand Down Expand Up @@ -170,6 +173,8 @@ def process_artifact(
if PreprodFeature.BUILD_DISTRIBUTION in server_requested_features:
self._do_distribution(organization_id, project_id, artifact_id, artifact, info)

return _get_artifact_type(artifact).name.lower()

@contextlib.contextmanager
def _download_artifact(
self,
Expand Down Expand Up @@ -462,16 +467,6 @@ def _prepare_update_data(
dequeued_at: datetime,
app_icon_id: str | None,
) -> Dict[str, Any]:
def _get_artifact_type(artifact: Artifact) -> ArtifactType:
if isinstance(artifact, ZippedXCArchive):
return ArtifactType.XCARCHIVE
elif isinstance(artifact, (AAB, ZippedAAB)):
return ArtifactType.AAB
elif isinstance(artifact, (APK, ZippedAPK)):
return ArtifactType.APK
else:
raise ValueError(f"Unsupported artifact type: {type(artifact)}")

build_number = int(app_info.build) if app_info.build.isdigit() else None

apple_app_info = None
Expand Down Expand Up @@ -545,6 +540,17 @@ def _upload_results(
logger.info(f"Successfully uploaded analysis results for artifact {artifact_id}")


def _get_artifact_type(artifact: Artifact) -> ArtifactType:
if isinstance(artifact, ZippedXCArchive):
return ArtifactType.XCARCHIVE
elif isinstance(artifact, (AAB, ZippedAAB)):
return ArtifactType.AAB
elif isinstance(artifact, (APK, ZippedAPK)):
return ArtifactType.APK
else:
raise ValueError(f"Unsupported artifact type: {type(artifact)}")


def _guess_message(code: ProcessingErrorCode, e: Exception) -> ProcessingErrorMessage:
if code == ProcessingErrorCode.ARTIFACT_PROCESSING_ERROR:
if isinstance(e, NotImplementedError):
Expand Down
Loading