Skip to content

Commit

Permalink
fix(server): Enforce size limits on UE4 crash reports (#1099)
Browse files Browse the repository at this point in the history
UE4 crash reports are compressed archives that are unpacked during
ingestion in Relay. After expansion, the size of the resulting envelope
and its file attachments was never checked, which could pass attachments
far exceeding the maximum size limits.
  • Loading branch information
jan-auer authored Oct 7, 2021
1 parent 29176f8 commit c2daf43
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 49 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
- Correctly validate timestamps for outcomes and sessions. ([#1086](https://github.com/getsentry/relay/pull/1086))
- Run compression on a thread pool when sending to upstream. ([#1085](https://github.com/getsentry/relay/pull/1085))
- Report proper status codes and error messages when sending invalid JSON payloads to an endpoint with a `X-Sentry-Relay-Signature` header. ([#1090](https://github.com/getsentry/relay/pull/1090))
- Enforce attachment and event size limits on UE4 crash reports. ([#1099](https://github.com/getsentry/relay/pull/1099))

**Internal**:

Expand Down
8 changes: 8 additions & 0 deletions relay-server/src/actors/envelopes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -934,6 +934,10 @@ impl EnvelopeProcessor {
/// envelope contains an `UnrealReport` item, it removes it from the envelope and inserts new
/// items for each of its contents.
///
/// The envelope may be dropped if it exceeds size limits after decompression. Particularly,
/// this includes cases where a single attachment file exceeds the maximum file size. This is in
/// line with the behavior of the envelope endpoint.
///
/// After this, the `EnvelopeProcessor` should be able to process the envelope the same way it
/// processes any other envelopes.
#[cfg(feature = "processing")]
Expand All @@ -943,6 +947,10 @@ impl EnvelopeProcessor {
if let Some(item) = envelope.take_item_by(|item| item.ty() == ItemType::UnrealReport) {
utils::expand_unreal_envelope(item, envelope)
.map_err(ProcessingError::InvalidUnrealReport)?;

if !utils::check_envelope_size_limits(&self.config, envelope) {
return Err(ProcessingError::PayloadTooLarge);
}
}

Ok(())
Expand Down
50 changes: 1 addition & 49 deletions relay-server/src/endpoints/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ use futures::prelude::*;
use serde::Deserialize;

use relay_common::{clone, tryf};
use relay_config::Config;
use relay_general::protocol::{EventId, EventType};
use relay_log::LogError;
use relay_quotas::RateLimits;
Expand Down Expand Up @@ -314,53 +313,6 @@ pub fn cors(app: ServiceApp) -> CorsBuilder<ServiceState> {
builder
}

/// Checks for size limits of items in this envelope.
///
/// Returns `true`, if the envelope adheres to the configured size limits. Otherwise, returns
/// `false`, in which case the envelope should be discarded and a `413 Payload Too Large` response
/// shoult be given.
///
/// The following limits are checked:
///
/// - `max_event_size`
/// - `max_attachment_size`
/// - `max_attachments_size`
/// - `max_session_count`
fn check_envelope_size_limits(config: &Config, envelope: &Envelope) -> bool {
let mut event_size = 0;
let mut attachments_size = 0;
let mut session_count = 0;
let mut client_reports_size = 0;

for item in envelope.items() {
match item.ty() {
ItemType::Event
| ItemType::Transaction
| ItemType::Security
| ItemType::RawSecurity
| ItemType::FormData => event_size += item.len(),
ItemType::Attachment | ItemType::UnrealReport => {
if item.len() > config.max_attachment_size() {
return false;
}

attachments_size += item.len()
}
ItemType::Session => session_count += 1,
ItemType::Sessions => session_count += 1,
ItemType::UserReport => (),
ItemType::Metrics => (),
ItemType::MetricBuckets => (),
ItemType::ClientReport => client_reports_size += item.len(),
}
}

event_size <= config.max_event_size()
&& attachments_size <= config.max_attachments_size()
&& session_count <= config.max_session_count()
&& client_reports_size <= config.max_client_reports_size()
}

/// Handles Sentry events.
///
/// Sentry events may come either directly from a http request ( the store endpoint calls this
Expand Down Expand Up @@ -442,7 +394,7 @@ where
};

envelope_context.update(&envelope);
if check_envelope_size_limits(&config, &envelope) {
if utils::check_envelope_size_limits(&config, &envelope) {
Ok((envelope, checked.rate_limits))
} else {
envelope_context.send_outcomes(Outcome::Invalid(DiscardReason::TooLarge));
Expand Down
2 changes: 2 additions & 0 deletions relay-server/src/utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ mod param_parser;
mod rate_limits;
mod request;
mod shutdown;
mod sizes;
mod timer;
mod tracked_future;
mod with_outcome;
Expand All @@ -27,6 +28,7 @@ pub use self::param_parser::*;
pub use self::rate_limits::*;
pub use self::request::*;
pub use self::shutdown::*;
pub use self::sizes::*;
pub use self::timer::*;
pub use self::tracked_future::*;
pub use self::with_outcome::*;
Expand Down
50 changes: 50 additions & 0 deletions relay-server/src/utils/sizes.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
use relay_config::Config;

use crate::envelope::{Envelope, ItemType};

/// Checks for size limits of items in this envelope.
///
/// Returns `true`, if the envelope adheres to the configured size limits. Otherwise, returns
/// `false`, in which case the envelope should be discarded and a `413 Payload Too Large` response
/// should be given.
///
/// The following limits are checked:
///
/// - `max_event_size`
/// - `max_attachment_size`
/// - `max_attachments_size`
/// - `max_session_count`
pub fn check_envelope_size_limits(config: &Config, envelope: &Envelope) -> bool {
let mut event_size = 0;
let mut attachments_size = 0;
let mut session_count = 0;
let mut client_reports_size = 0;

for item in envelope.items() {
match item.ty() {
ItemType::Event
| ItemType::Transaction
| ItemType::Security
| ItemType::RawSecurity
| ItemType::FormData => event_size += item.len(),
ItemType::Attachment | ItemType::UnrealReport => {
if item.len() > config.max_attachment_size() {
return false;
}

attachments_size += item.len()
}
ItemType::Session => session_count += 1,
ItemType::Sessions => session_count += 1,
ItemType::UserReport => (),
ItemType::Metrics => (),
ItemType::MetricBuckets => (),
ItemType::ClientReport => client_reports_size += item.len(),
}
}

event_size <= config.max_event_size()
&& attachments_size <= config.max_attachments_size()
&& session_count <= config.max_session_count()
&& client_reports_size <= config.max_client_reports_size()
}
23 changes: 23 additions & 0 deletions tests/integration/test_unreal.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import pytest
import json
import time


def _load_dump_file(base_file_name: str):
Expand Down Expand Up @@ -313,3 +314,25 @@ def test_unreal_minidump_with_config_and_processing(
mini_dump_process_marker_found = True

assert mini_dump_process_marker_found


def test_unreal_crash_too_large(mini_sentry, relay_with_processing, outcomes_consumer):
PROJECT_ID = 42
unreal_content = _load_dump_file("unreal_crash")
print("UE4 size: %s" % len(unreal_content))

# Configure Relay so that it accepts the compressed UE4 archive. Once uncompressed, the file
# attachments are larger and should be dropped.
relay = relay_with_processing(
{"limits": {"max_attachments_size": len(unreal_content) + 1}}
)
mini_sentry.add_full_project_config(PROJECT_ID)
outcomes_consumer = outcomes_consumer()

# Relay accepts the archive, expands it asynchronously, and then drops it.
response = relay.send_unreal_request(PROJECT_ID, unreal_content)
assert response.ok

outcome = outcomes_consumer.get_outcome()
assert outcome["outcome"] == 3 # dropped as invalid
assert mini_sentry.captured_events.empty()

0 comments on commit c2daf43

Please # to comment.