Skip to content

Commit

Permalink
feat: Support orientation change for session replay (#4194)
Browse files Browse the repository at this point in the history
Session replay will adapt to screen size changes, that usually happens during orientation change.
  • Loading branch information
brustolin committed Aug 5, 2024
1 parent 76c2ac7 commit 817009f
Show file tree
Hide file tree
Showing 8 changed files with 234 additions and 180 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

### Features

- Support orientation change for session replay (#4194)
- Replay for crashes (#4171)
- Redact web view from replay (#4203)
- Add beforeCaptureViewHierarchy callback (#4210)
Expand Down Expand Up @@ -32,6 +33,9 @@
### Fixes

- Session replay crash when writing the replay (#4186)

### Features

- Collect only unique UIWindow references (#4159)

### Deprecated
Expand Down
57 changes: 30 additions & 27 deletions Sources/Sentry/SentrySessionReplayIntegration.m
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

# import "SentryClient+Private.h"
# import "SentryDependencyContainer.h"
# import "SentryDispatchQueueWrapper.h"
# import "SentryDisplayLinkWrapper.h"
# import "SentryEvent+Private.h"
# import "SentryFileManager.h"
Expand All @@ -21,7 +22,6 @@
# import "SentrySwizzle.h"
# import "SentryUIApplication.h"
# import <UIKit/UIKit.h>

NS_ASSUME_NONNULL_BEGIN

static NSString *SENTRY_REPLAY_FOLDER = @"replay";
Expand Down Expand Up @@ -77,6 +77,12 @@ - (BOOL)installWithOptions:(nonnull SentryOptions *)options
return YES;
}

/**
* Send the cached frames from a previous session that eventually crashed.
* This function is called when processing an event created by SentryCrashIntegration,
* which runs in the background. That's why we don't need to dispatch the generation of the
* replay to the background in this function.
*/
- (void)resumePreviousSessionReplay:(SentryEvent *)event
{
NSURL *dir = [self replayDirectory];
Expand Down Expand Up @@ -114,39 +120,36 @@ - (void)resumePreviousSessionReplay:(SentryEvent *)event
}
}

_resumeReplayMaker = [[SentryOnDemandReplay alloc] initWithContentFrom:lastReplayURL.path];
_resumeReplayMaker.bitRate = _replayOptions.replayBitRate;
_resumeReplayMaker.videoScale = _replayOptions.sizeScale;
SentryOnDemandReplay *resumeReplayMaker =
[[SentryOnDemandReplay alloc] initWithContentFrom:lastReplayURL.path];
resumeReplayMaker.bitRate = _replayOptions.replayBitRate;
resumeReplayMaker.videoScale = _replayOptions.sizeScale;

NSDate *beginning = hasCrashInfo
? [NSDate dateWithTimeIntervalSinceReferenceDate:crashInfo.lastSegmentEnd]
: [_resumeReplayMaker oldestFrameDate];
: [resumeReplayMaker oldestFrameDate];

if (beginning == nil) {
return; // no frames to send
}

SentryReplayType _type = type;
int _segmentId = segmentId;

NSError *error;
if (![_resumeReplayMaker
createVideoWithBeginning:beginning
end:[beginning dateByAddingTimeInterval:duration]
outputFileURL:[lastReplayURL URLByAppendingPathComponent:@"lastVideo.mp4"]
error:&error
completion:^(SentryVideoInfo *video, NSError *renderError) {
if (renderError != nil) {
SENTRY_LOG_ERROR(
@"Could not create replay video: %@", renderError);
} else {
[self captureVideo:video
replayId:replayId
segmentId:segmentId
type:type];
}
self->_resumeReplayMaker = nil;
}]) {
NSArray<SentryVideoInfo *> *videos =
[resumeReplayMaker createVideoWithBeginning:beginning
end:[beginning dateByAddingTimeInterval:duration]
error:&error];
if (videos == nil) {
SENTRY_LOG_ERROR(@"Could not create replay video: %@", error);
return;
}
for (SentryVideoInfo *video in videos) {
[self captureVideo:video replayId:replayId segmentId:_segmentId++ type:_type];
// type buffer is only for the first segment
_type = SentryReplayTypeSession;
}

NSMutableDictionary *eventContext = event.context.mutableCopy;
eventContext[@"replay"] =
Expand Down Expand Up @@ -174,13 +177,15 @@ - (void)captureVideo:(SentryVideoInfo *)video

NSError *error = nil;
if (![[NSFileManager defaultManager] removeItemAtURL:video.path error:&error]) {
NSLog(@"[SentrySessionReplay:%d] Could not delete replay segment from disk: %@", __LINE__,
error.localizedDescription);
SENTRY_LOG_DEBUG(
@"Could not delete replay segment from disk: %@", error.localizedDescription);
}
}

- (void)startSession
{
[self.sessionReplay stop];

_startedAsFullSession = [self shouldReplayFullSession:_replayOptions.sessionSampleRate];

if (!_startedAsFullSession && _replayOptions.onErrorSampleRate == 0) {
Expand Down Expand Up @@ -247,6 +252,7 @@ - (void)startWithOptions:(SentryReplayOptions *)replayOptions
touchTracker:_touchTracker
dateProvider:SentryDependencyContainer.sharedInstance.dateProvider
delegate:self
dispatchQueue:[[SentryDispatchQueueWrapper alloc] init]
displayLinkWrapper:[[SentryDisplayLinkWrapper alloc] init]];

[self.sessionReplay
Expand Down Expand Up @@ -320,9 +326,6 @@ - (void)sentrySessionEnded:(SentrySession *)session

- (void)sentrySessionStarted:(SentrySession *)session
{
if (_sessionReplay) {
return;
}
[self startSession];
}

Expand Down
177 changes: 98 additions & 79 deletions Sources/Swift/Integrations/SessionReplay/SentryOnDemandReplay.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,23 +13,16 @@ struct SentryReplayFrame {
let screenName: String?
}

private struct VideoFrames {
let framesPaths: [String]
let screens: [String]
let start: Date
let end: Date
}

enum SentryOnDemandReplayError: Error {
case cantReadVideoSize
case assetWriterNotReady
case cantCreatePixelBuffer
case errorRenderingVideo
}

@objcMembers
class SentryOnDemandReplay: NSObject, SentryReplayVideoMaker {

private let _outputPath: String
private var _currentPixelBuffer: SentryPixelBuffer?
private var _totalFrames = 0
private let dateProvider: SentryCurrentDateProvider
private let workingQueue: SentryDispatchQueueWrapper
Expand All @@ -42,16 +35,10 @@ class SentryOnDemandReplay: NSObject, SentryReplayVideoMaker {
set { _frames = newValue }
}
#endif // TEST || TESTCI || DEBUG

var videoWidth = 200
var videoHeight = 434
var videoScale: Float = 1
var bitRate = 20_000
var frameRate = 1
var cacheMaxSize = UInt.max

private var actualWidth: Int { Int(Float(videoWidth) * videoScale) }
private var actualHeight: Int { Int(Float(videoHeight) * videoScale) }

init(outputPath: String, workingQueue: SentryDispatchQueueWrapper, dateProvider: SentryCurrentDateProvider) {
self._outputPath = outputPath
Expand Down Expand Up @@ -85,10 +72,6 @@ class SentryOnDemandReplay: NSObject, SentryReplayVideoMaker {
self.init(withContentFrom: outputPath,
workingQueue: SentryDispatchQueueWrapper(name: "io.sentry.onDemandReplay", attributes: nil),
dateProvider: SentryCurrentDateProvider())

guard let last = _frames.last, let image = UIImage(contentsOfFile: last.imagePath) else { return }
videoWidth = Int(image.size.width)
videoHeight = Int(image.size.height)
}

func addFrameAsync(image: UIImage, forScreen: String?) {
Expand Down Expand Up @@ -140,92 +123,128 @@ class SentryOnDemandReplay: NSObject, SentryReplayVideoMaker {
return _frames.first?.time
}

func createVideoWith(beginning: Date, end: Date, outputFileURL: URL, completion: @escaping (SentryVideoInfo?, Error?) -> Void) throws {
var frameCount = 0
func createVideoWith(beginning: Date, end: Date) throws -> [SentryVideoInfo] {
let videoFrames = filterFrames(beginning: beginning, end: end)
if videoFrames.framesPaths.isEmpty { return }
var frameCount = 0

var videos = [SentryVideoInfo]()

while frameCount < videoFrames.count {
let outputFileURL = URL(fileURLWithPath: _outputPath.appending("/\(videoFrames[frameCount].time.timeIntervalSinceReferenceDate).mp4"))
if let videoInfo = try renderVideo(with: videoFrames, from: &frameCount, at: outputFileURL) {
videos.append(videoInfo)
} else {
frameCount++
}
}
return videos
}

private func renderVideo(with videoFrames: [SentryReplayFrame], from: inout Int, at outputFileURL: URL) throws -> SentryVideoInfo? {
guard from < videoFrames.count, let image = UIImage(contentsOfFile: videoFrames[from].imagePath) else { return nil }
let videoWidth = image.size.width * CGFloat(videoScale)
let videoHeight = image.size.height * CGFloat(videoScale)

let videoWriter = try AVAssetWriter(url: outputFileURL, fileType: .mp4)
let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: createVideoSettings())
let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: createVideoSettings(width: videoWidth, height: videoHeight))

_currentPixelBuffer = SentryPixelBuffer(size: CGSize(width: actualWidth, height: actualHeight), videoWriterInput: videoWriterInput)
if _currentPixelBuffer == nil { return }
guard let currentPixelBuffer = SentryPixelBuffer(size: CGSize(width: videoWidth, height: videoHeight), videoWriterInput: videoWriterInput)
else { throw SentryOnDemandReplayError.cantCreatePixelBuffer }

videoWriter.add(videoWriterInput)
videoWriter.startWriting()
videoWriter.startSession(atSourceTime: .zero)

videoWriterInput.requestMediaDataWhenReady(on: workingQueue.queue) { [weak self] in
guard let self = self, videoWriter.status == .writing else {
var lastImageSize: CGSize = image.size
var usedFrames = [SentryReplayFrame]()
let group = DispatchGroup()

var result: Result<SentryVideoInfo?, Error>?
var frameCount = from

group.enter()
videoWriterInput.requestMediaDataWhenReady(on: workingQueue.queue) {
guard videoWriter.status == .writing else {
videoWriter.cancelWriting()
completion(nil, SentryOnDemandReplayError.assetWriterNotReady)
result = .failure(videoWriter.error ?? SentryOnDemandReplayError.errorRenderingVideo )
group.leave()
return
}

if frameCount < videoFrames.framesPaths.count {
let imagePath = videoFrames.framesPaths[frameCount]
if let image = UIImage(contentsOfFile: imagePath) {
let presentTime = CMTime(seconds: Double(frameCount), preferredTimescale: CMTimeScale(1 / self.frameRate))

guard self._currentPixelBuffer?.append(image: image, presentationTime: presentTime) == true
else {
completion(nil, videoWriter.error)
videoWriterInput.markAsFinished()
return
}
if frameCount >= videoFrames.count {
result = self.finishVideo(outputFileURL: outputFileURL, usedFrames: usedFrames, videoHeight: Int(videoHeight), videoWidth: Int(videoWidth), videoWriter: videoWriter)
group.leave()
return
}
let frame = videoFrames[frameCount]
if let image = UIImage(contentsOfFile: frame.imagePath) {
if lastImageSize != image.size {
result = self.finishVideo(outputFileURL: outputFileURL, usedFrames: usedFrames, videoHeight: Int(videoHeight), videoWidth: Int(videoWidth), videoWriter: videoWriter)
group.leave()
return
}
frameCount += 1
} else {
videoWriterInput.markAsFinished()
videoWriter.finishWriting {
var videoInfo: SentryVideoInfo?
if videoWriter.status == .completed {
do {
let fileAttributes = try FileManager.default.attributesOfItem(atPath: outputFileURL.path)
guard let fileSize = fileAttributes[FileAttributeKey.size] as? Int else {
completion(nil, SentryOnDemandReplayError.cantReadVideoSize)
return
}
videoInfo = SentryVideoInfo(path: outputFileURL, height: self.actualHeight, width: self.actualWidth, duration: TimeInterval(videoFrames.framesPaths.count / self.frameRate), frameCount: videoFrames.framesPaths.count, frameRate: self.frameRate, start: videoFrames.start, end: videoFrames.end, fileSize: fileSize, screens: videoFrames.screens)
} catch {
completion(nil, error)
}
}
completion(videoInfo, videoWriter.error)
lastImageSize = image.size

let presentTime = CMTime(seconds: Double(frameCount), preferredTimescale: CMTimeScale(1 / self.frameRate))
if currentPixelBuffer.append(image: image, presentationTime: presentTime) != true {
videoWriter.cancelWriting()
result = .failure(videoWriter.error ?? SentryOnDemandReplayError.errorRenderingVideo )
group.leave()
return
}
usedFrames.append(frame)
}
frameCount += 1
}
guard group.wait(timeout: .now() + 2) == .success else { throw SentryOnDemandReplayError.errorRenderingVideo }
from = frameCount

return try result?.get()
}

private func filterFrames(beginning: Date, end: Date) -> VideoFrames {
var framesPaths = [String]()

var screens = [String]()
private func finishVideo(outputFileURL: URL, usedFrames: [SentryReplayFrame], videoHeight: Int, videoWidth: Int, videoWriter: AVAssetWriter) -> Result<SentryVideoInfo?, Error> {
let group = DispatchGroup()
var finishError: Error?
var result: SentryVideoInfo?

var start = dateProvider.date()
var actualEnd = start
workingQueue.dispatchSync({
for frame in self._frames {
if frame.time < beginning { continue } else if frame.time > end { break }

if frame.time < start { start = frame.time }

if let screenName = frame.screenName {
screens.append(screenName)
group.enter()
videoWriter.inputs.forEach { $0.markAsFinished() }
videoWriter.finishWriting {
defer { group.leave() }
if videoWriter.status == .completed {
do {
let fileAttributes = try FileManager.default.attributesOfItem(atPath: outputFileURL.path)
guard let fileSize = fileAttributes[FileAttributeKey.size] as? Int else {
finishError = SentryOnDemandReplayError.cantReadVideoSize
return
}
guard let start = usedFrames.min(by: { $0.time < $1.time })?.time else { return }
let duration = TimeInterval(usedFrames.count / self.frameRate)
result = SentryVideoInfo(path: outputFileURL, height: Int(videoHeight), width: Int(videoWidth), duration: duration, frameCount: usedFrames.count, frameRate: self.frameRate, start: start, end: start.addingTimeInterval(duration), fileSize: fileSize, screens: usedFrames.compactMap({ $0.screenName }))
} catch {
finishError = error
}

actualEnd = frame.time
framesPaths.append(frame.imagePath)
}
}
group.wait()

if let finishError = finishError { return .failure(finishError) }
return .success(result)
}

private func filterFrames(beginning: Date, end: Date) -> [SentryReplayFrame] {
var frames = [SentryReplayFrame]()
//Using dispatch queue as sync mechanism since we need a queue already to generate the video.
workingQueue.dispatchSync({
frames = self._frames.filter { $0.time >= beginning && $0.time <= end }
})
return VideoFrames(framesPaths: framesPaths, screens: screens, start: start, end: actualEnd + TimeInterval((1 / Double(frameRate))))
return frames
}

private func createVideoSettings() -> [String: Any] {
private func createVideoSettings(width: CGFloat, height: CGFloat) -> [String: Any] {
return [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: actualWidth,
AVVideoHeightKey: actualHeight,
AVVideoWidthKey: width,
AVVideoHeightKey: height,
AVVideoCompressionPropertiesKey: [
AVVideoAverageBitRateKey: bitRate,
AVVideoProfileLevelKey: AVVideoProfileLevelH264BaselineAutoLevel
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,9 @@ import UIKit

@objc
protocol SentryReplayVideoMaker: NSObjectProtocol {
var videoWidth: Int { get set }
var videoHeight: Int { get set }

func addFrameAsync(image: UIImage, forScreen: String?)
func releaseFramesUntil(_ date: Date)
func createVideoWith(beginning: Date, end: Date, outputFileURL: URL, completion: @escaping (SentryVideoInfo?, Error?) -> Void) throws
func createVideoWith(beginning: Date, end: Date) throws -> [SentryVideoInfo]
}

extension SentryReplayVideoMaker {
Expand Down
Loading

0 comments on commit 817009f

Please sign in to comment.