|
2 | 2 |
|
3 | 3 | import 'package:dio/dio.dart';
|
4 | 4 | import 'package:sentry/sentry.dart';
|
| 5 | +import 'dart:convert'; |
5 | 6 |
|
6 | 7 | /// This is an [EventProcessor], which improves crash reports of [DioError]s.
|
7 | 8 | /// It adds information about [DioError.requestOptions] if present and also about
|
@@ -49,27 +50,98 @@ class DioEventProcessor implements EventProcessor {
|
49 | 50 | uri: options.uri,
|
50 | 51 | method: options.method,
|
51 | 52 | headers: _options.sendDefaultPii ? headers : null,
|
52 |
| - data: _getRequestData(dioError.requestOptions.data), |
| 53 | + data: _getRequestData(dioError.requestOptions.data, options), |
53 | 54 | );
|
54 | 55 | }
|
55 | 56 |
|
56 | 57 | /// Returns the request data, if possible according to the users settings.
|
57 |
| - Object? _getRequestData(Object? data) { |
58 |
| - if (!_options.sendDefaultPii) { |
| 58 | + /// Takes into account the content type to determine proper encoding. |
| 59 | + /// |
| 60 | + Object? _getRequestData(Object? data, RequestOptions requestOptions) { |
| 61 | + if (!_options.sendDefaultPii || data == null) { |
59 | 62 | return null;
|
60 | 63 | }
|
| 64 | + |
| 65 | + // Handle different data types based on Dio's encoding behavior and content type |
61 | 66 | if (data is String) {
|
62 |
| - if (_options.maxRequestBodySize.shouldAddBody(data.codeUnits.length)) { |
| 67 | + // For all strings, use UTF-8 encoding for accurate size validation |
| 68 | + if (_canEncodeStringWithinLimit( |
| 69 | + data, |
| 70 | + // ignore: invalid_use_of_internal_member |
| 71 | + hardLimit: _options.maxRequestBodySize.getSizeLimit(), |
| 72 | + )) { |
63 | 73 | return data;
|
64 | 74 | }
|
65 |
| - } else if (data is List<int>) { |
| 75 | + } |
| 76 | + // For List<int> data (including Uint8List), we have exact size information |
| 77 | + else if (data is List<int>) { |
66 | 78 | if (_options.maxRequestBodySize.shouldAddBody(data.length)) {
|
67 | 79 | return data;
|
68 | 80 | }
|
| 81 | + } else if (data is num || data is bool) { |
| 82 | + if (_options.maxRequestBodySize != MaxRequestBodySize.never) { |
| 83 | + return data; |
| 84 | + } |
| 85 | + } else if (Transformer.isJsonMimeType(requestOptions.contentType)) { |
| 86 | + if (_canEncodeJsonWithinLimit( |
| 87 | + data, |
| 88 | + // ignore: invalid_use_of_internal_member |
| 89 | + hardLimit: _options.maxRequestBodySize.getSizeLimit(), |
| 90 | + )) { |
| 91 | + return data; |
| 92 | + } |
| 93 | + } else if (data is FormData) { |
| 94 | + // FormData has a built-in length property for size checking |
| 95 | + if (_options.maxRequestBodySize.shouldAddBody(data.length)) { |
| 96 | + return _convertFormDataToMap(data); |
| 97 | + } |
| 98 | + } else if (data is MultipartFile) { |
| 99 | + if (_options.maxRequestBodySize.shouldAddBody(data.length)) { |
| 100 | + return _convertMultipartFileToMap(data); |
| 101 | + } |
69 | 102 | }
|
| 103 | + |
70 | 104 | return null;
|
71 | 105 | }
|
72 | 106 |
|
| 107 | + /// Converts FormData to a map representation that SentryRequest can handle |
| 108 | + Map<String, dynamic> _convertFormDataToMap(FormData formData) { |
| 109 | + final result = <String, dynamic>{}; |
| 110 | + |
| 111 | + // Add form fields - ensure proper typing |
| 112 | + for (final field in formData.fields) { |
| 113 | + result[field.key] = field.value; |
| 114 | + } |
| 115 | + |
| 116 | + // Add file information (metadata only, not the actual file content) |
| 117 | + for (final file in formData.files) { |
| 118 | + result['${file.key}_file'] = _convertMultipartFileToMap(file.value); |
| 119 | + } |
| 120 | + |
| 121 | + return result; |
| 122 | + } |
| 123 | + |
| 124 | + /// Converts a MultipartFile to a map representation that SentryRequest can handle |
| 125 | + Map<String, dynamic> _convertMultipartFileToMap(MultipartFile file) { |
| 126 | + final result = <String, dynamic>{ |
| 127 | + 'filename': file.filename, |
| 128 | + 'contentType': file.contentType?.toString(), |
| 129 | + 'length': file.length, |
| 130 | + }; |
| 131 | + |
| 132 | + // Only add headers if they exist and are not empty |
| 133 | + if (file.headers != null && file.headers!.isNotEmpty) { |
| 134 | + // Convert headers to a proper Map<String, dynamic> |
| 135 | + final headersMap = <String, dynamic>{}; |
| 136 | + for (final entry in file.headers!.entries) { |
| 137 | + headersMap[entry.key] = entry.value; |
| 138 | + } |
| 139 | + result['headers'] = headersMap; |
| 140 | + } |
| 141 | + |
| 142 | + return result; |
| 143 | + } |
| 144 | + |
73 | 145 | SentryResponse _responseFrom(DioError dioError) {
|
74 | 146 | final response = dioError.response;
|
75 | 147 |
|
@@ -104,3 +176,65 @@ class DioEventProcessor implements EventProcessor {
|
104 | 176 | return data;
|
105 | 177 | }
|
106 | 178 | }
|
| 179 | + |
| 180 | +/// Returns true if the data can be encoded as JSON within the given byte limit. |
| 181 | +bool _canEncodeJsonWithinLimit(Object? data, {int? hardLimit}) { |
| 182 | + if (hardLimit == null) { |
| 183 | + // No limit means always allow |
| 184 | + return true; |
| 185 | + } |
| 186 | + if (hardLimit == 0) { |
| 187 | + // Zero limit means never allow |
| 188 | + return false; |
| 189 | + } |
| 190 | + |
| 191 | + // Only proceed with encoding if we have a positive limit |
| 192 | + final sink = _CountingByteSink(hardLimit); |
| 193 | + final conv = JsonUtf8Encoder().startChunkedConversion(sink); |
| 194 | + try { |
| 195 | + conv.add(data); |
| 196 | + conv.close(); |
| 197 | + return true; |
| 198 | + } on _SizeLimitExceeded { |
| 199 | + return false; |
| 200 | + } catch (_) { |
| 201 | + return false; |
| 202 | + } |
| 203 | +} |
| 204 | + |
| 205 | +/// Returns true if the string can be encoded as UTF-8 within the given byte limit. |
| 206 | +bool _canEncodeStringWithinLimit(String data, {int? hardLimit}) { |
| 207 | + if (hardLimit == null) { |
| 208 | + // No limit means always allow |
| 209 | + return true; |
| 210 | + } |
| 211 | + if (hardLimit == 0) { |
| 212 | + // Zero limit means never allow |
| 213 | + return false; |
| 214 | + } |
| 215 | + |
| 216 | + // Only proceed with encoding if we have a positive limit |
| 217 | + final utf8Bytes = utf8.encode(data); |
| 218 | + return utf8Bytes.length <= hardLimit; |
| 219 | +} |
| 220 | + |
| 221 | +/// Exception thrown when the hard limit is exceeded during counting. |
| 222 | +class _SizeLimitExceeded implements Exception {} |
| 223 | + |
| 224 | +/// A sink that counts bytes without storing them, with an optional hard limit. |
| 225 | +class _CountingByteSink implements Sink<List<int>> { |
| 226 | + int count = 0; |
| 227 | + final int? hardLimit; |
| 228 | + _CountingByteSink([this.hardLimit]); |
| 229 | + |
| 230 | + @override |
| 231 | + void add(List<int> chunk) { |
| 232 | + count += chunk.length; |
| 233 | + if (hardLimit != null && count > hardLimit!) { |
| 234 | + throw _SizeLimitExceeded(); |
| 235 | + } |
| 236 | + } |
| 237 | + |
| 238 | + @override |
| 239 | + void close() {} |
| 240 | +} |
0 commit comments