diff --git a/LayoutTests/fast/mediastream/image-capture-take-photo-expected.txt b/LayoutTests/fast/mediastream/image-capture-take-photo-expected.txt
new file mode 100644
index 000000000000..d53c7f2f9931
--- /dev/null
+++ b/LayoutTests/fast/mediastream/image-capture-take-photo-expected.txt
@@ -0,0 +1,7 @@
+
+PASS takePhoto() on an 'ended' track should throw "InvalidStateError"
+PASS "OperationError" should be thrown if the track ends before the 'takePhoto' promise resolves
+PASS Image returned by 'takePhoto' should be at least as big as { photoSettings.imageHeight, photoSettings.imageWidth }
+PASS If 'takePhoto' has to reconfigure capture track, 'mute' and 'unmute' should fire and track size should be restored
+PASS applyConstraints should not run until 'takePhoto' has completed
+
diff --git a/LayoutTests/fast/mediastream/image-capture-take-photo.html b/LayoutTests/fast/mediastream/image-capture-take-photo.html
new file mode 100644
index 000000000000..9a4083dffa91
--- /dev/null
+++ b/LayoutTests/fast/mediastream/image-capture-take-photo.html
@@ -0,0 +1,111 @@
+
+
+
+
+ ImageCapture takePhoto
+
+
+
+
+
+
+
diff --git a/Source/WebCore/Modules/mediastream/ImageCapture.cpp b/Source/WebCore/Modules/mediastream/ImageCapture.cpp
index 5b21b76e9458..2a1399178ca8 100644
--- a/Source/WebCore/Modules/mediastream/ImageCapture.cpp
+++ b/Source/WebCore/Modules/mediastream/ImageCapture.cpp
@@ -28,7 +28,9 @@
#if ENABLE(MEDIA_STREAM)
+#include "JSBlob.h"
#include "JSPhotoCapabilities.h"
+#include "TaskSource.h"
#include
namespace WebCore {
@@ -53,6 +55,18 @@ ImageCapture::ImageCapture(Document& document, Ref track)
ImageCapture::~ImageCapture() = default;
+void ImageCapture::takePhoto(PhotoSettings&& settings, DOMPromiseDeferred>&& promise)
+{
+ m_track->takePhoto(WTFMove(settings))->whenSettled(RunLoop::main(), [protectedThis = Ref { *this }, promise = WTFMove(promise)] (auto&& result) mutable {
+ queueTaskKeepingObjectAlive(protectedThis.get(), TaskSource::ImageCapture, [promise = WTFMove(promise), result = WTFMove(result), protectedThis] () mutable {
+ if (!result)
+ promise.reject(WTFMove(result.error()));
+ else
+ promise.resolve(Blob::create(protectedThis->scriptExecutionContext(), WTFMove(get<0>(result.value())), WTFMove(get<1>(result.value()))));
+ });
+ });
+}
+
void ImageCapture::getPhotoCapabilities(PhotoCapabilitiesPromise&& promise)
{
if (m_track->readyState() == MediaStreamTrack::State::Ended) {
diff --git a/Source/WebCore/Modules/mediastream/ImageCapture.h b/Source/WebCore/Modules/mediastream/ImageCapture.h
index ff03eda74dc8..c7f91bb72e85 100644
--- a/Source/WebCore/Modules/mediastream/ImageCapture.h
+++ b/Source/WebCore/Modules/mediastream/ImageCapture.h
@@ -28,6 +28,7 @@
#if ENABLE(MEDIA_STREAM)
#include "ActiveDOMObject.h"
+#include "Blob.h"
#include "Document.h"
#include "JSDOMPromiseDeferred.h"
#include "MediaStreamTrack.h"
@@ -43,6 +44,8 @@ class ImageCapture : public RefCounted, public ActiveDOMObject {
~ImageCapture();
+ void takePhoto(PhotoSettings&&, DOMPromiseDeferred>&&);
+
using PhotoCapabilitiesPromise = DOMPromiseDeferred>;
void getPhotoCapabilities(PhotoCapabilitiesPromise&&);
diff --git a/Source/WebCore/Modules/mediastream/ImageCapture.idl b/Source/WebCore/Modules/mediastream/ImageCapture.idl
index 0dd7ff529f56..16217dbffbe8 100644
--- a/Source/WebCore/Modules/mediastream/ImageCapture.idl
+++ b/Source/WebCore/Modules/mediastream/ImageCapture.idl
@@ -34,8 +34,7 @@
Promise getPhotoCapabilities();
- // FIXME: https://bugs.webkit.org/show_bug.cgi?id=262467
- // Promise takePhoto(optional PhotoSettings photoSettings = {});
+ [NewObject] Promise takePhoto(optional PhotoSettings photoSettings = {});
Promise getPhotoSettings();
diff --git a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
index eeeac843e9af..80b1f67fd96d 100644
--- a/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
+++ b/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp
@@ -35,6 +35,7 @@
#include "Event.h"
#include "EventNames.h"
#include "FrameLoader.h"
+#include "JSBlob.h"
#include "JSDOMPromiseDeferred.h"
#include "JSMeteringMode.h"
#include "JSOverconstrainedError.h"
@@ -61,6 +62,7 @@
#include
#include
#include
+#include
namespace WebCore {
@@ -314,6 +316,47 @@ MediaStreamTrack::TrackCapabilities MediaStreamTrack::getCapabilities() const
return result;
}
+void MediaStreamTrack::queueAndProcessSerialAction(Function[()>&& action)
+{
+ ASSERT(isMainThread());
+ m_pendingActions = m_pendingActions->isResolved() ? action() : m_pendingActions->whenSettled(RunLoop::main(), WTFMove(action));
+}
+
+auto MediaStreamTrack::takePhoto(PhotoSettings&& settings) -> Ref
+{
+ TakePhotoPromise::Producer producer;
+ Ref promise = producer;
+
+ queueAndProcessSerialAction([settings = WTFMove(settings), protectedThis = Ref { *this }, producer = WTFMove(producer)]() mutable -> Ref {
+ // https://w3c.github.io/mediacapture-image/#dom-imagecapture-takephoto
+ // If the readyState of track provided in the constructor is not live, return
+ // a promise rejected with a new DOMException whose name is InvalidStateError,
+ // and abort these steps.
+ if (protectedThis->m_ended || protectedThis->m_readyState != State::Live) {
+ producer.reject(Exception { InvalidStateError, "Track has ended"_s });
+ return GenericPromise::createAndResolve();
+ }
+ return protectedThis->m_private->takePhoto(WTFMove(settings))->whenSettled(RunLoop::main(),
+ [protectedThis = WTFMove(protectedThis), producer = WTFMove(producer)] (auto&& result) mutable {
+
+ // https://w3c.github.io/mediacapture-image/#dom-imagecapture-takephoto
+ // If the operation cannot be completed for any reason (for example, upon
+ // invocation of multiple takePhoto() method calls in rapid succession),
+ // then reject p with a new DOMException whose name is UnknownError, and
+ // abort these steps.
+ if (!result)
+ producer.reject(Exception { UnknownError, WTFMove(result.error()) });
+ else if (RefPtr context = protectedThis->scriptExecutionContext(); !context || context->activeDOMObjectsAreStopped() || protectedThis->m_ended)
+ producer.reject(Exception { OperationError, "Track has ended"_s });
+ else
+ producer.resolve(WTFMove(result.value()));
+ return GenericPromise::createAndResolve();
+ });
+ });
+
+ return promise;
+}
+
void MediaStreamTrack::getPhotoCapabilities(DOMPromiseDeferred>&& promise) const
{
m_private->getPhotoCapabilities([protectedThis = Ref { *this }, promise = WTFMove(promise)](auto&& result) mutable {
@@ -367,15 +410,27 @@ static MediaConstraints createMediaConstraints(const std::optional& constraints, DOMPromiseDeferred&& promise)
{
- auto completionHandler = [this, protectedThis = Ref { *this }, constraints, promise = WTFMove(promise)](auto&& error) mutable {
- if (error) {
- promise.rejectType>(OverconstrainedError::create(WTFMove(error->badConstraint), WTFMove(error->message)));
- return;
+ queueAndProcessSerialAction([protectedThis = Ref { *this }, constraints, domPromise = WTFMove(promise)]() mutable {
+ if (protectedThis->m_ended) {
+ domPromise.reject(Exception { InvalidAccessError, "Track has ended"_s });
+ return GenericPromise::createAndResolve();
}
- promise.resolve();
- m_constraints = valueOrDefault(constraints);
- };
- m_private->applyConstraints(createMediaConstraints(constraints), WTFMove(completionHandler));
+ GenericPromise::Producer producer;
+ Ref nativePromise = producer;
+
+ protectedThis->m_private->applyConstraints(createMediaConstraints(constraints), [protectedThis = WTFMove(protectedThis), constraints, domPromise = WTFMove(domPromise), producer = WTFMove(producer)](auto&& error) mutable {
+ if (error) {
+ domPromise.rejectType>(OverconstrainedError::create(WTFMove(error->badConstraint), WTFMove(error->message)));
+ producer.resolve();
+ return;
+ }
+
+ protectedThis->m_constraints = valueOrDefault(constraints);
+ domPromise.resolve();
+ producer.resolve();
+ });
+ return nativePromise;
+ });
}
void MediaStreamTrack::addObserver(Observer& observer)
diff --git a/Source/WebCore/Modules/mediastream/MediaStreamTrack.h b/Source/WebCore/Modules/mediastream/MediaStreamTrack.h
index 1df27635dba0..7b83bb596561 100644
--- a/Source/WebCore/Modules/mediastream/MediaStreamTrack.h
+++ b/Source/WebCore/Modules/mediastream/MediaStreamTrack.h
@@ -30,10 +30,10 @@
#if ENABLE(MEDIA_STREAM)
#include "ActiveDOMObject.h"
-#include "DoubleRange.h"
+#include "Blob.h"
#include "EventTarget.h"
#include "IDLTypes.h"
-#include "LongRange.h"
+#include "JSDOMPromiseDeferred.h"
#include "MediaProducer.h"
#include "MediaStreamTrackPrivate.h"
#include "MediaTrackCapabilities.h"
@@ -41,6 +41,7 @@
#include "PhotoCapabilities.h"
#include "PhotoSettings.h"
#include "PlatformMediaSession.h"
+#include
#include
namespace WebCore {
@@ -50,8 +51,6 @@ class Document;
struct MediaTrackConstraints;
-template class DOMPromiseDeferred;
-
class MediaStreamTrack
: public RefCounted
, public ActiveDOMObject
@@ -130,11 +129,14 @@ class MediaStreamTrack
using TrackCapabilities = MediaTrackCapabilities;
TrackCapabilities getCapabilities() const;
+ using TakePhotoPromise = NativePromise, String>, Exception>;
+ Ref takePhoto(PhotoSettings&&);
void getPhotoCapabilities(DOMPromiseDeferred>&&) const;
void getPhotoSettings(DOMPromiseDeferred>&&) const;
const MediaTrackConstraints& getConstraints() const { return m_constraints; }
void setConstraints(MediaTrackConstraints&& constraints) { m_constraints = WTFMove(constraints); }
+
void applyConstraints(const std::optional&, DOMPromiseDeferred&&);
RealtimeMediaSource& source() const { return m_private->source(); }
@@ -203,6 +205,10 @@ class MediaStreamTrack
WTFLogChannel& logChannel() const final;
#endif
+ using SerialAction = Function][()>;
+ void queueAndProcessSerialAction(SerialAction&&);
+ Ref m_pendingActions { GenericPromise::createAndResolve() };
+
Vector m_observers;
MediaTrackConstraints m_constraints;
diff --git a/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.h b/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.h
index 8ae3575e120b..55a5034c21ed 100644
--- a/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.h
+++ b/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.h
@@ -95,10 +95,12 @@ SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCaptureDevice)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCaptureDeviceFormat)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCaptureDeviceInput)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCaptureOutput)
+SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCapturePhotoSettings)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCaptureSession)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCaptureVideoDataOutput)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVFrameRateRange)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCaptureDeviceDiscoverySession)
+SOFT_LINK_CLASS_FOR_HEADER(PAL, AVCapturePhotoOutput)
#endif
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString *)
@@ -204,6 +206,8 @@ SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVFileTypeQuickTimeMovie, NSStr
#define AVFileTypeQuickTimeMovie PAL::get_AVFoundation_AVFileTypeQuickTimeMovie()
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVVideoCodecKey, NSString *)
#define AVVideoCodecKey PAL::get_AVFoundation_AVVideoCodecKey()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVVideoCodecTypeJPEG, NSString *)
+#define AVVideoCodecTypeJPEG PAL::get_AVFoundation_AVVideoCodecTypeJPEG()
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVVideoCodecH264, NSString *)
#define AVVideoCodecH264 PAL::get_AVFoundation_AVVideoCodecH264()
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVVideoWidthKey, NSString *)
@@ -222,6 +226,8 @@ SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVVideoProfileLevelH264MainAuto
#define AVVideoProfileLevelH264MainAutoLevel PAL::get_AVFoundation_AVVideoProfileLevelH264MainAutoLevel()
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVVideoCompressionPropertiesKey, NSString *)
#define AVVideoCompressionPropertiesKey PAL::get_AVFoundation_AVVideoCompressionPropertiesKey()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, AVFoundation, AVVideoQualityKey, NSString *)
+#define AVVideoQualityKey PAL::get_AVFoundation_AVVideoQualityKey()
SOFT_LINK_CONSTANT_MAY_FAIL_FOR_HEADER(PAL, AVFoundation, AVEncoderBitRateKey, NSString *)
#define AVEncoderBitRateKey PAL::get_AVFoundation_AVEncoderBitRateKey()
SOFT_LINK_CONSTANT_MAY_FAIL_FOR_HEADER(PAL, AVFoundation, AVFormatIDKey, NSString *)
diff --git a/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.mm b/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.mm
index 380a08523796..8f48ac2fea03 100644
--- a/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.mm
+++ b/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.mm
@@ -125,10 +125,12 @@ static BOOL justReturnsNO()
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCaptureDeviceFormat, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCaptureDeviceInput, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCaptureOutput, PAL_EXPORT)
+SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCapturePhotoSettings, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCaptureSession, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCaptureVideoDataOutput, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVFrameRateRange, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCaptureDeviceDiscoverySession, PAL_EXPORT)
+SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCapturePhotoOutput, PAL_EXPORT)
#endif
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVAssetExportPresetHighestQuality, NSString *, PAL_EXPORT)
@@ -140,6 +142,7 @@ static BOOL justReturnsNO()
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVCaptureDeviceWasDisconnectedNotification, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVFileTypeMPEG4, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVFileTypeQuickTimeMovie, NSString *, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoCodecTypeJPEG, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVLayerVideoGravityResize, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVLayerVideoGravityResizeAspect, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *, PAL_EXPORT)
@@ -199,6 +202,7 @@ static BOOL justReturnsNO()
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoAverageBitRateKey, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoCodecH264, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoCodecKey, NSString *, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoQualityKey, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoCompressionPropertiesKey, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoExpectedSourceFrameRateKey, NSString *, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVVideoHeightKey, NSString *, PAL_EXPORT)
diff --git a/Source/WebCore/dom/TaskSource.h b/Source/WebCore/dom/TaskSource.h
index fec6804b4135..fdc0c971e5b3 100644
--- a/Source/WebCore/dom/TaskSource.h
+++ b/Source/WebCore/dom/TaskSource.h
@@ -35,6 +35,7 @@ enum class TaskSource : uint8_t {
Gamepad,
Geolocation,
IdleTask,
+ ImageCapture,
IndexedDB,
MediaElement,
Microtask,
diff --git a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp
index 8ac220429642..af504aa9c78a 100644
--- a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp
+++ b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp
@@ -199,6 +199,11 @@ Ref MediaStreamTrackPrivate::ge
return m_source->getPhotoSettings();
}
+Ref MediaStreamTrackPrivate::takePhoto(PhotoSettings&& settings)
+{
+ return m_source->takePhoto(WTFMove(settings));
+}
+
void MediaStreamTrackPrivate::applyConstraints(const MediaConstraints& constraints, RealtimeMediaSource::ApplyConstraintsHandler&& completionHandler)
{
m_source->applyConstraints(constraints, WTFMove(completionHandler));
diff --git a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h
index ff31b85bc1e2..e434bd00f62c 100644
--- a/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h
+++ b/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h
@@ -111,6 +111,7 @@ class MediaStreamTrackPrivate final
WEBCORE_EXPORT const RealtimeMediaSourceSettings& settings() const;
const RealtimeMediaSourceCapabilities& capabilities() const;
+ Ref takePhoto(PhotoSettings&&);
void getPhotoCapabilities(RealtimeMediaSource::PhotoCapabilitiesHandler&&);
Ref getPhotoSettings();
diff --git a/Source/WebCore/platform/mediastream/PhotoSettings.h b/Source/WebCore/platform/mediastream/PhotoSettings.h
index 3e3a15ef41c8..13132033a266 100644
--- a/Source/WebCore/platform/mediastream/PhotoSettings.h
+++ b/Source/WebCore/platform/mediastream/PhotoSettings.h
@@ -39,6 +39,31 @@ struct PhotoSettings {
std::optional redEyeReduction;
};
+inline bool operator==(const PhotoSettings& a, const PhotoSettings& b)
+{
+ if (!!a.fillLightMode != !!b.fillLightMode)
+ return false;
+ if (a.fillLightMode && *a.fillLightMode != *b.fillLightMode)
+ return false;
+
+ if (!!a.imageHeight != !!b.imageHeight)
+ return false;
+ if (a.imageHeight && *a.imageHeight != *b.imageHeight)
+ return false;
+
+ if (!!a.imageWidth != !!b.imageWidth)
+ return false;
+ if (a.imageWidth && *a.imageWidth != *b.imageWidth)
+ return false;
+
+ if (!!a.redEyeReduction != !!b.redEyeReduction)
+ return false;
+ if (a.redEyeReduction && *a.redEyeReduction != *b.redEyeReduction)
+ return false;
+
+ return true;
+}
+
}
#endif
diff --git a/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp b/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp
index 9ed1e451cc76..d4babce73d72 100644
--- a/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp
+++ b/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp
@@ -1456,6 +1456,11 @@ void RealtimeMediaSource::setType(Type type)
});
}
+auto RealtimeMediaSource::takePhoto(PhotoSettings&&) -> Ref
+{
+ return TakePhotoNativePromise::createAndReject("Not supported"_s);
+}
+
void RealtimeMediaSource::getPhotoCapabilities(PhotoCapabilitiesHandler&& completion)
{
completion(PhotoCapabilitiesOrError("Not supported"_s));
diff --git a/Source/WebCore/platform/mediastream/RealtimeMediaSource.h b/Source/WebCore/platform/mediastream/RealtimeMediaSource.h
index 15a84653578b..8f23b730899d 100644
--- a/Source/WebCore/platform/mediastream/RealtimeMediaSource.h
+++ b/Source/WebCore/platform/mediastream/RealtimeMediaSource.h
@@ -52,6 +52,7 @@
#include
#include
#include
+#include
#include
#include
#include
@@ -211,6 +212,9 @@ class WEBCORE_EXPORT RealtimeMediaSource
virtual void deref() const = 0;
virtual ThreadSafeWeakPtrControlBlock& controlBlock() const = 0;
+ using TakePhotoNativePromise = NativePromise, String>, String>;
+ virtual Ref takePhoto(PhotoSettings&&);
+
using PhotoCapabilitiesHandler = CompletionHandler;
virtual void getPhotoCapabilities(PhotoCapabilitiesHandler&&);
diff --git a/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.cpp b/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.cpp
index 171b24f18518..87e2a6b7fcd4 100644
--- a/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.cpp
+++ b/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2018-2022 Apple Inc. All rights reserved.
+ * Copyright (C) 2018-2023 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -396,7 +396,7 @@ void RealtimeVideoCaptureSource::clientUpdatedSizeFrameRateAndZoom(std::optional
setSizeFrameRateAndZoom(width, height, frameRate, zoom);
}
-void RealtimeVideoCaptureSource::setSizeFrameRateAndZoom(std::optional width, std::optional height, std::optional frameRate, std::optional zoom)
+std::optional RealtimeVideoCaptureSource::bestSupportedSizeFrameRateAndZoomConsideringObservers(std::optional width, std::optional height, std::optional frameRate, std::optional zoom)
{
auto& settings = this->settings();
@@ -412,20 +412,105 @@ void RealtimeVideoCaptureSource::setSizeFrameRateAndZoom(std::optional widt
}
if (!width && !height && !frameRate && !zoom)
- return;
+ return { };
+
+ return bestSupportedSizeFrameRateAndZoom(width, height, frameRate, zoom);
+}
- auto match = bestSupportedSizeFrameRateAndZoom(width, height, frameRate, zoom);
+void RealtimeVideoCaptureSource::setSizeFrameRateAndZoom(std::optional width, std::optional height, std::optional frameRate, std::optional zoom)
+{
+ auto match = bestSupportedSizeFrameRateAndZoomConsideringObservers(width, height, frameRate, zoom);
ERROR_LOG_IF(loggerPtr() && !match, LOGIDENTIFIER, "unable to find a preset that would match the size, frame rate and zoom");
if (!match)
return;
m_currentPreset = match->encodingPreset;
+ auto newSize = match->encodingPreset->size();
setFrameRateAndZoomWithPreset(match->requestedFrameRate, match->requestedZoom, WTFMove(match->encodingPreset));
- setSize(match->encodingPreset->size());
+ setSize(newSize);
setFrameRate(match->requestedFrameRate);
setZoom(match->requestedZoom);
}
+auto RealtimeVideoCaptureSource::takePhotoInternal(PhotoSettings&&) -> Ref
+{
+ return TakePhotoNativePromise::createAndReject("Not supported"_s);
+}
+
+auto RealtimeVideoCaptureSource::takePhoto(PhotoSettings&& photoSettings) -> Ref
+{
+ ASSERT(isMainThread());
+
+ if ((photoSettings.imageHeight && !photoSettings.imageWidth) || (!photoSettings.imageHeight && photoSettings.imageWidth)) {
+ IntSize sanitizedSize;
+ if (photoSettings.imageHeight)
+ sanitizedSize.setHeight(*photoSettings.imageHeight);
+ if (photoSettings.imageWidth)
+ sanitizedSize.setWidth(*photoSettings.imageWidth);
+
+ auto intrinsicSize = this->intrinsicSize();
+ if (!sanitizedSize.height())
+ sanitizedSize.setHeight(sanitizedSize.width() * (intrinsicSize.height() / static_cast(intrinsicSize.width())));
+ else if (!sanitizedSize.width())
+ sanitizedSize.setWidth(sanitizedSize.height() * (intrinsicSize.width() / static_cast(intrinsicSize.height())));
+
+ photoSettings.imageHeight = sanitizedSize.height();
+ photoSettings.imageWidth = sanitizedSize.width();
+ }
+
+ std::optional newPresetForPhoto;
+ if (photoSettings.imageHeight || photoSettings.imageWidth) {
+ newPresetForPhoto = bestSupportedSizeFrameRateAndZoomConsideringObservers(photoSettings.imageWidth, photoSettings.imageHeight, { }, { });
+ ERROR_LOG_IF(loggerPtr() && !newPresetForPhoto, LOGIDENTIFIER, "unable to find a preset to match the size of requested photo, using current preset");
+
+ if (newPresetForPhoto && m_currentPreset && m_currentPreset->size() == newPresetForPhoto->encodingPreset->size())
+ newPresetForPhoto = { };
+ }
+
+ std::optional configurationToRestore;
+ if (newPresetForPhoto) {
+ configurationToRestore = {
+ { m_currentPreset },
+ size(),
+ frameRate(),
+ zoom()
+ };
+
+ // 3.2.2 - Devices MAY temporarily stop streaming data, reconfigure themselves with the appropriate photo
+ // settings, take the photo, and then resume streaming. In this case, the stopping and restarting of
+ // streaming SHOULD cause onmute and onunmute events to fire on the track in question.
+ if (!muted()) {
+ setMuted(true);
+ m_mutedForPhotoCapture = true;
+ }
+
+ m_currentPreset = newPresetForPhoto->encodingPreset;
+ auto newSize = newPresetForPhoto->encodingPreset->size();
+ setFrameRateAndZoomWithPreset(newPresetForPhoto->requestedFrameRate, newPresetForPhoto->requestedZoom, WTFMove(newPresetForPhoto->encodingPreset));
+ setSize(newSize);
+ }
+
+ return takePhotoInternal(WTFMove(photoSettings))->whenSettled(RunLoop::main(), [this, protectedThis = Ref { *this }, configurationToRestore = WTFMove(configurationToRestore)] (auto&& result) mutable {
+
+ ASSERT(isMainThread());
+
+ if (configurationToRestore) {
+ m_currentPreset = configurationToRestore->encodingPreset;
+ auto newSize = configurationToRestore->encodingPreset->size();
+ setFrameRateAndZoomWithPreset(configurationToRestore->requestedFrameRate, configurationToRestore->requestedZoom, WTFMove(configurationToRestore->encodingPreset));
+ setSize(newSize);
+ if (m_mutedForPhotoCapture) {
+ m_mutedForPhotoCapture = false;
+ setMuted(false);
+ }
+ }
+
+ // FIXME: Resize image if preset size doesn't match requested size.
+
+ return TakePhotoNativePromise::createAndSettle(WTFMove(result));
+ });
+}
+
void RealtimeVideoCaptureSource::ensureIntrinsicSizeMaintainsAspectRatio()
{
auto intrinsicSize = this->intrinsicSize();
diff --git a/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.h b/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.h
index c10bacdb1c5b..a8b40b3d380e 100644
--- a/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.h
+++ b/Source/WebCore/platform/mediastream/RealtimeVideoCaptureSource.h
@@ -53,8 +53,6 @@ class WEBCORE_EXPORT RealtimeVideoCaptureSource : public RealtimeMediaSource, pu
void ensureIntrinsicSizeMaintainsAspectRatio();
- const std::optional currentPreset() const { return m_currentPreset; }
-
void ref() const final;
void deref() const final;
ThreadSafeWeakPtrControlBlock& controlBlock() const final;
@@ -81,6 +79,9 @@ class WEBCORE_EXPORT RealtimeVideoCaptureSource : public RealtimeMediaSource, pu
static std::span standardVideoSizes();
+ virtual Ref takePhotoInternal(PhotoSettings&&);
+ bool mutedForPhotoCapture() const { return m_mutedForPhotoCapture; }
+
private:
struct CaptureSizeFrameRateAndZoom {
std::optional encodingPreset;
@@ -92,10 +93,13 @@ class WEBCORE_EXPORT RealtimeVideoCaptureSource : public RealtimeMediaSource, pu
enum class TryPreservingSize { No, Yes };
std::optional bestSupportedSizeFrameRateAndZoom(std::optional width, std::optional height, std::optional, std::optional, TryPreservingSize = TryPreservingSize::Yes);
+ std::optional bestSupportedSizeFrameRateAndZoomConsideringObservers(std::optional width, std::optional height, std::optional, std::optional);
bool presetSupportsFrameRate(const VideoPreset&, double);
bool presetSupportsZoom(const VideoPreset&, double);
+ Ref takePhoto(PhotoSettings&&) final;
+
#if !RELEASE_LOG_DISABLED
const char* logClassName() const override { return "RealtimeVideoCaptureSource"; }
#endif
@@ -104,6 +108,7 @@ class WEBCORE_EXPORT RealtimeVideoCaptureSource : public RealtimeMediaSource, pu
Vector m_presets;
Deque m_observedFrameTimeStamps;
double m_observedFrameRate { 0 };
+ bool m_mutedForPhotoCapture { false };
};
struct SizeFrameRateAndZoom {
diff --git a/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h b/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h
index 769777f948cc..c93b04034d1f 100644
--- a/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h
+++ b/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h
@@ -31,7 +31,9 @@
#include "OrientationNotifier.h"
#include "RealtimeVideoCaptureSource.h"
#include "Timer.h"
+#include
#include
+#include
#include
typedef struct opaqueCMSampleBuffer* CMSampleBufferRef;
@@ -39,7 +41,11 @@ typedef struct opaqueCMSampleBuffer* CMSampleBufferRef;
OBJC_CLASS AVCaptureConnection;
OBJC_CLASS AVCaptureDevice;
OBJC_CLASS AVCaptureDeviceFormat;
+OBJC_CLASS AVCapturePhoto;
+OBJC_CLASS AVCapturePhotoOutput;
+OBJC_CLASS AVCapturePhotoSettings;
OBJC_CLASS AVCaptureOutput;
+OBJC_CLASS AVCaptureResolvedPhotoSettings;
OBJC_CLASS AVCaptureSession;
OBJC_CLASS AVCaptureVideoDataOutput;
OBJC_CLASS AVFrameRateRange;
@@ -71,6 +77,7 @@ class AVVideoCaptureSource : public RealtimeVideoCaptureSource, private Orientat
void captureSessionRuntimeError(RetainPtr);
void captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutput*, CMSampleBufferRef, AVCaptureConnection*);
void captureDeviceSuspendedDidChange();
+ void captureOutputDidFinishProcessingPhoto(RetainPtr, RetainPtr, RetainPtr);
private:
AVVideoCaptureSource(AVCaptureDevice*, const CaptureDevice&, MediaDeviceHashSalts&&, PageIdentifier);
@@ -84,6 +91,7 @@ class AVVideoCaptureSource : public RealtimeVideoCaptureSource, private Orientat
const RealtimeMediaSourceCapabilities& capabilities() final;
const RealtimeMediaSourceSettings& settings() final;
+ Ref takePhotoInternal(PhotoSettings&&) final;
void getPhotoCapabilities(PhotoCapabilitiesHandler&&) final;
Ref getPhotoSettings() final;
double facingModeFitnessScoreAdjustment() const final;
@@ -107,6 +115,7 @@ class AVVideoCaptureSource : public RealtimeVideoCaptureSource, private Orientat
bool setPreset(NSString*);
void computeVideoFrameRotation();
AVFrameRateRange* frameDurationForFrameRate(double);
+ void stopSession();
// OrientationNotifier::Observer API
void orientationChanged(IntDegrees orientation) final;
@@ -137,9 +146,14 @@ class AVVideoCaptureSource : public RealtimeVideoCaptureSource, private Orientat
void updateWhiteBalanceMode();
void updateTorch();
+ void rejectPendingPhotoRequest(const String&);
+ void resolvePendingPhotoRequest(Vector&&, const String&);
+ RetainPtr photoConfiguration(const PhotoSettings&);
+ IntSize maxPhotoSizeForCurrentPreset(IntSize requestedSize) const;
+ AVCapturePhotoOutput* photoOutput();
+
RefPtr m_buffer;
RetainPtr m_videoOutput;
- std::unique_ptr m_imageTransferSession;
IntDegrees m_sensorOrientation { 0 };
IntDegrees m_deviceOrientation { 0 };
@@ -148,11 +162,14 @@ class AVVideoCaptureSource : public RealtimeVideoCaptureSource, private Orientat
std::optional m_currentSettings;
std::optional m_capabilities;
std::optional m_photoCapabilities;
- std::optional m_photoSettings;
RetainPtr m_objcObserver;
RetainPtr m_session;
RetainPtr m_device;
+ RetainPtr m_photoOutput WTF_GUARDED_BY_CAPABILITY(RunLoop::main());
+ std::unique_ptr m_photoProducer WTF_GUARDED_BY_LOCK(m_photoLock);
+
+ Lock m_photoLock;
std::optional m_currentPreset;
std::optional m_appliedPreset;
RetainPtr m_appliedFrameRateRange;
@@ -172,7 +189,7 @@ class AVVideoCaptureSource : public RealtimeVideoCaptureSource, private Orientat
Timer m_verifyCapturingTimer;
uint64_t m_framesCount { 0 };
uint64_t m_lastFramesCount { 0 };
- int64_t m_defaultTorchMode;
+ int64_t m_defaultTorchMode { 0 };
};
} // namespace WebCore
diff --git a/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm b/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm
index 77f7267c5d35..4755a1c42bf8 100644
--- a/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm
+++ b/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm
@@ -42,6 +42,7 @@
#import
#import
#import
+#import
#import
#import
#import
@@ -49,6 +50,7 @@
#import
#import
#import
+#import
#import "CoreVideoSoftLink.h"
#import
@@ -56,7 +58,23 @@
using namespace WebCore;
-@interface WebCoreAVVideoCaptureSourceObserver : NSObject {
+@interface AVCaptureDeviceFormat (AVCaptureDeviceFormat_New_API)
+@property (nonatomic, readonly) NSArray *supportedMaxPhotoDimensions;
+@end
+
+@interface AVCapturePhotoSettings (AVCapturePhotoSettings_New_API)
+@property (nonatomic) CMVideoDimensions maxPhotoDimensions;
+@end
+
+@interface AVCapturePhotoOutput (AVCapturePhotoOutput_New_API)
+@property (nonatomic) CMVideoDimensions maxPhotoDimensions;
+@end
+
+@interface NSValue (NSValueCMVideoDimensionsExtensions_New_API)
+@property (readonly) CMVideoDimensions CMVideoDimensionsValue;
+@end
+
+@interface WebCoreAVVideoCaptureSourceObserver : NSObject {
AVVideoCaptureSource* m_callback;
}
@@ -72,6 +90,7 @@ -(void)beginSessionInterrupted:(NSNotification*)notification;
-(void)endSessionInterrupted:(NSNotification*)notification;
-(void)deviceConnectedDidChange:(NSNotification*)notification;
#endif
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error;
@end
namespace WebCore {
@@ -95,6 +114,44 @@ static dispatch_queue_t globaVideoCaptureSerialQueue()
return globalQueue;
}
+static FillLightMode toFillLightMode(AVCaptureTorchMode mode)
+{
+ switch (mode) {
+ case AVCaptureTorchModeOff:
+ return FillLightMode::Off;
+ break;
+ case AVCaptureTorchModeOn:
+ return FillLightMode::Flash;
+ break;
+ case AVCaptureTorchModeAuto:
+ return FillLightMode::Auto;
+ break;
+ }
+
+ ASSERT_NOT_REACHED();
+ return FillLightMode::Auto;
+}
+
+#if PLATFORM(IOS_FAMILY)
+static AVCaptureFlashMode toAVCaptureFlashMode(FillLightMode mode)
+{
+ switch (mode) {
+ case FillLightMode::Off:
+ return AVCaptureFlashModeOff;
+ break;
+ case FillLightMode::Flash:
+ return AVCaptureFlashModeOn;
+ break;
+ case FillLightMode::Auto:
+ return AVCaptureFlashModeAuto;
+ break;
+ }
+
+ ASSERT_NOT_REACHED();
+ return AVCaptureFlashModeAuto;
+}
+#endif
+
static AVCaptureWhiteBalanceMode whiteBalanceModeFromMeteringMode(MeteringMode mode)
{
switch (mode) {
@@ -153,6 +210,12 @@ static MeteringMode meteringModeFromAVCaptureWhiteBalanceMode(AVCaptureWhiteBala
#endif
}
+static WorkQueue& photoQueue()
+{
+ static NeverDestroyed][> queue = WorkQueue::create("WebKit::AVPhotoCapture Queue");
+ return queue.get();
+}
+
CaptureSourceOrError AVVideoCaptureSource::create(const CaptureDevice& device, MediaDeviceHashSalts&& hashSalts, const MediaConstraints* constraints, PageIdentifier pageIdentifier)
{
auto *avDevice = [PAL::getAVCaptureDeviceClass() deviceWithUniqueID:device.persistentId()];
@@ -200,9 +263,7 @@ static double cameraZoomScaleFactor(AVCaptureDeviceType deviceType)
if (!m_session)
return;
- if ([m_session isRunning])
- [m_session stopRunning];
-
+ stopSession();
clearSession();
}
@@ -264,7 +325,7 @@ static double cameraZoomScaleFactor(AVCaptureDeviceType deviceType)
ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, !![m_session isRunning]);
[m_objcObserver removeNotificationObservers];
- [m_session stopRunning];
+ stopSession();
m_interrupted = false;
@@ -273,6 +334,12 @@ static double cameraZoomScaleFactor(AVCaptureDeviceType deviceType)
#endif
}
+void AVVideoCaptureSource::stopSession()
+{
+ [m_session stopRunning];
+ rejectPendingPhotoRequest("Track stopped"_s);
+}
+
void AVVideoCaptureSource::startApplyingConstraints()
{
ASSERT(!m_hasBegunConfigurationForConstraints);
@@ -448,6 +515,146 @@ static bool isZoomSupported(const Vector& presets)
return *m_capabilities;
}
+AVCapturePhotoOutput* AVVideoCaptureSource::photoOutput()
+{
+ assertIsCurrent(RunLoop::main());
+
+ if (!m_photoOutput) {
+ m_photoOutput = adoptNS([PAL::allocAVCapturePhotoOutputInstance() init]);
+
+ if (![session() canAddOutput:m_photoOutput.get()]) {
+ ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "unable to add photo output");
+ return nullptr;
+ }
+ [session() addOutput:m_photoOutput.get()];
+ }
+
+ return m_photoOutput.get();
+}
+
+void AVVideoCaptureSource::resolvePendingPhotoRequest(Vector&& data, const String& mimeType)
+{
+ Locker lock { m_photoLock };
+
+ if (!m_photoProducer)
+ return;
+
+ m_photoProducer->resolve(std::make_pair(WTFMove(data), mimeType));
+ m_photoProducer = nullptr;
+}
+
+void AVVideoCaptureSource::rejectPendingPhotoRequest(const String& error)
+{
+ Locker lock { m_photoLock };
+
+ if (!m_photoProducer)
+ return;
+
+ m_photoProducer->reject(error);
+ m_photoProducer = nullptr;
+}
+
+IntSize AVVideoCaptureSource::maxPhotoSizeForCurrentPreset(IntSize requestedSize) const
+{
+ ASSERT(isMainThread());
+
+ CMVideoDimensions bestMaxPhotoSize;
+
+ auto *format = [m_device activeFormat];
+ if ([format respondsToSelector:@selector(supportedMaxPhotoDimensions)]) {
+ NSArray *maxPhotoDimensions = format.supportedMaxPhotoDimensions;
+ if (!maxPhotoDimensions.count)
+ return { };
+
+ bestMaxPhotoSize = maxPhotoDimensions.firstObject.CMVideoDimensionsValue;
+ for (NSValue *value in maxPhotoDimensions) {
+ CMVideoDimensions dimensions = value.CMVideoDimensionsValue;
+ if (dimensions.width >= requestedSize.width() && dimensions.height >= requestedSize.height()) {
+ if (dimensions.width * dimensions.height < bestMaxPhotoSize.width * bestMaxPhotoSize.height)
+ bestMaxPhotoSize = dimensions;
+ }
+ }
+ } else {
+ if (!m_currentPreset)
+ return { };
+
+ return m_currentPreset->size();
+ }
+
+ return { bestMaxPhotoSize.width, bestMaxPhotoSize.height };
+}
+
+RetainPtr AVVideoCaptureSource::photoConfiguration(const PhotoSettings& photoSettings)
+{
+ assertIsCurrent(RunLoop::main());
+
+ IntSize requestedPhotoDimensions = { 0, 0 };
+ if (photoSettings.imageHeight && photoSettings.imageWidth)
+ requestedPhotoDimensions = { static_cast(*photoSettings.imageWidth), static_cast(*photoSettings.imageHeight) };
+
+ AVCapturePhotoSettings* avPhotoSettings = [PAL::getAVCapturePhotoSettingsClass() photoSettingsWithFormat:@{
+ AVVideoCodecKey : AVVideoCodecTypeJPEG,
+ AVVideoCompressionPropertiesKey : @{ AVVideoQualityKey : @(1) }
+ }];
+
+#if PLATFORM(IOS_FAMILY)
+ auto* photoOutput = this->photoOutput();
+ ASSERT(photoOutput);
+
+ if (photoSettings.fillLightMode) {
+ auto flashMode = toAVCaptureFlashMode(*photoSettings.fillLightMode);
+ if ([photoOutput.supportedFlashModes containsObject:@(flashMode)])
+ [avPhotoSettings setFlashMode:flashMode];
+ }
+
+ if (photoSettings.redEyeReduction && photoOutput.isAutoRedEyeReductionSupported)
+ [avPhotoSettings setAutoRedEyeReductionEnabled:!!photoSettings.redEyeReduction.value()];
+#endif
+
+ requestedPhotoDimensions = maxPhotoSizeForCurrentPreset(requestedPhotoDimensions);
+ if (!requestedPhotoDimensions.isEmpty() && [avPhotoSettings respondsToSelector:@selector(setMaxPhotoDimensions:)])
+ [avPhotoSettings setMaxPhotoDimensions: { requestedPhotoDimensions.width(), requestedPhotoDimensions.height() }];
+
+ return avPhotoSettings;
+}
+
+auto AVVideoCaptureSource::takePhotoInternal(PhotoSettings&& photoSettings) -> Ref
+{
+ assertIsCurrent(RunLoop::main());
+
+ RetainPtr photoOutput = this->photoOutput();
+ if (!photoOutput)
+ return TakePhotoNativePromise::createAndReject("Internal error"_s);
+
+ RefPtr promise;
+ {
+ Locker lock { m_photoLock };
+ if (m_photoProducer)
+ return TakePhotoNativePromise::createAndReject("Internal error"_s);
+
+ m_photoProducer = makeUnique();
+ promise = static_cast][>(*m_photoProducer);
+ }
+
+ auto avPhotoSettings = photoConfiguration(photoSettings);
+ photoQueue().dispatch([this, protectedThis = Ref { *this }, avPhotoSettings = WTFMove(avPhotoSettings), photoOutput = WTFMove(photoOutput)] {
+ ASSERT(!isMainThread());
+
+ if ([avPhotoSettings respondsToSelector:@selector(setMaxPhotoDimensions:)]) {
+ auto requestedPhotoDimensions = [avPhotoSettings maxPhotoDimensions];
+ if (requestedPhotoDimensions.width && requestedPhotoDimensions.height) {
+ auto currentMaxPhotoDimensions = [photoOutput maxPhotoDimensions];
+ if (requestedPhotoDimensions.width > currentMaxPhotoDimensions.width || requestedPhotoDimensions.height > currentMaxPhotoDimensions.height)
+ [photoOutput setMaxPhotoDimensions:requestedPhotoDimensions];
+ }
+ }
+
+ [photoOutput capturePhotoWithSettings:avPhotoSettings.get() delegate:m_objcObserver.get()];
+ });
+
+ return promise.releaseNonNull();
+}
+
void AVVideoCaptureSource::getPhotoCapabilities(PhotoCapabilitiesHandler&& completion)
{
if (m_photoCapabilities) {
@@ -469,36 +676,20 @@ static bool isZoomSupported(const Vector& presets)
completion({ *m_photoCapabilities });
}
-static FillLightMode toFillLightMode(AVCaptureTorchMode mode)
+auto AVVideoCaptureSource::getPhotoSettings() -> Ref
{
- switch (mode) {
- case AVCaptureTorchModeOff:
- return FillLightMode::Off;
- break;
- case AVCaptureTorchModeOn:
- return FillLightMode::Flash;
- break;
- case AVCaptureTorchModeAuto:
- return FillLightMode::Auto;
- break;
- }
+ ASSERT(isMainThread());
- ASSERT_NOT_REACHED();
- return FillLightMode::Auto;
-}
+ PhotoSettings settings;
-auto AVVideoCaptureSource::getPhotoSettings() -> Ref
-{
- if (!m_photoSettings) {
- std::optional fillLightMode;
- if ([device() hasTorch])
- fillLightMode = { toFillLightMode([device() torchMode]) };
+ std::optional fillLightMode;
+ if ([device() hasTorch])
+ fillLightMode = { toFillLightMode([device() torchMode]) };
- auto settings = this->settings();
- m_photoSettings = PhotoSettings { fillLightMode, settings.height(), settings.width(), { } };
- }
+ auto trackSettings = this->settings();
+ settings = { fillLightMode, trackSettings.height(), trackSettings.width(), { } };
- return PhotoSettingsNativePromise::createAndResolve(*m_photoSettings);
+ return PhotoSettingsNativePromise::createAndResolve(settings);
}
NSMutableArray* AVVideoCaptureSource::cameraCaptureDeviceTypes()
@@ -915,6 +1106,22 @@ static inline IntDegrees sensorOrientation(AVCaptureVideoOrientation videoOrient
dispatchVideoFrameToObservers(WTFMove(videoFrame), metadata);
}
+void AVVideoCaptureSource::captureOutputDidFinishProcessingPhoto(RetainPtr, RetainPtr photo, RetainPtr error)
+{
+ if (!error) {
+ NSData* data = [photo fileDataRepresentation];
+ resolvePendingPhotoRequest({ static_cast(data.bytes), data.length }, "image/jpeg"_s);
+ } else
+ rejectPendingPhotoRequest("AVCapturePhotoOutput failed"_s);
+
+ if (error) {
+ RunLoop::main().dispatch([this, protectedThis = Ref { *this }, logIdentifier = LOGIDENTIFIER, error = WTFMove(error) ] {
+ ASSERT(isMainThread());
+ ALWAYS_LOG_IF(loggerPtr(), logIdentifier, "failed: ", [error code], ", ", error.get());
+ });
+ }
+}
+
void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
{
scheduleDeferredTask([this, logIdentifier = LOGIDENTIFIER, state] {
@@ -1065,6 +1272,15 @@ - (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMS
m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
}
+- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error
+{
+ if (!m_callback)
+ return;
+
+ m_callback->captureOutputDidFinishProcessingPhoto(captureOutput, photo, error);
+
+}
+
- (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
{
UNUSED_PARAM(object);
diff --git a/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm b/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm
index d29188e089ab..9db03ca29691 100644
--- a/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm
+++ b/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm
@@ -84,7 +84,7 @@
void MockRealtimeVideoSourceMac::updateSampleBuffer()
{
- RefPtr imageBuffer = this->imageBuffer();
+ RefPtr imageBuffer = this->imageBufferInternal();
if (!imageBuffer)
return;
diff --git a/Source/WebCore/platform/mock/MockRealtimeVideoSource.cpp b/Source/WebCore/platform/mock/MockRealtimeVideoSource.cpp
index a1ff8a3d3769..92d0b25743f0 100644
--- a/Source/WebCore/platform/mock/MockRealtimeVideoSource.cpp
+++ b/Source/WebCore/platform/mock/MockRealtimeVideoSource.cpp
@@ -46,6 +46,7 @@
#include "VideoFrame.h"
#include
#include
+#include
#include
#include
@@ -77,6 +78,67 @@ static ThreadSafeWeakHashSet& allMockRealtimeVideoSourc
return videoSources;
}
+static RunLoop& takePhotoRunLoop()
+{
+ static NeverDestroyed][> runLoop = RunLoop::create("WebKit::MockRealtimeVideoSource takePhoto runloop");
+ return runLoop.get();
+}
+
+FontCascadeDescription& MockRealtimeVideoSource::DrawingState::fontDescription()
+{
+ if (!m_fontDescription) {
+ FontCascadeDescription fontDescription;
+ fontDescription.setOneFamily("Courier"_s);
+ fontDescription.setWeight(FontSelectionValue(500));
+ m_fontDescription = { fontDescription };
+ }
+
+ return *m_fontDescription;
+}
+
+const FontCascade& MockRealtimeVideoSource::DrawingState::timeFont()
+{
+ if (m_timeFont)
+ return *m_timeFont;
+
+ auto& description = fontDescription();
+ description.setSpecifiedSize(m_baseFontSize);
+ description.setComputedSize(m_baseFontSize);
+ m_timeFont = { FontCascadeDescription { description }, 0, 0 };
+ m_timeFont->update(nullptr);
+
+ return *m_timeFont;
+}
+
+const FontCascade& MockRealtimeVideoSource::DrawingState::bipBopFont()
+{
+ if (m_bipBopFont)
+ return *m_bipBopFont;
+
+ auto& description = fontDescription();
+ description.setSpecifiedSize(m_bipBopFontSize);
+ description.setComputedSize(m_bipBopFontSize);
+ m_bipBopFont = { FontCascadeDescription { description }, 0, 0 };
+ m_bipBopFont->update(nullptr);
+
+ return *m_bipBopFont;
+}
+
+const FontCascade& MockRealtimeVideoSource::DrawingState::statsFont()
+{
+ if (m_statsFont)
+ return *m_statsFont;
+
+ auto& description = fontDescription();
+ description.setSpecifiedSize(m_statsFontSize);
+ description.setComputedSize(m_statsFontSize);
+ m_statsFont = { FontCascadeDescription { description }, 0, 0 };
+ m_statsFont->update(nullptr);
+
+ return *m_statsFont;
+}
+
+
MockRealtimeVideoSource::MockRealtimeVideoSource(String&& deviceID, AtomString&& name, MediaDeviceHashSalts&& hashSalts, PageIdentifier pageIdentifier)
: RealtimeVideoCaptureSource(CaptureDevice { WTFMove(deviceID), CaptureDevice::DeviceType::Camera, WTFMove(name) }, WTFMove(hashSalts), pageIdentifier)
, m_emitFrameTimer(RunLoop::current(), this, &MockRealtimeVideoSource::generateFrame)
@@ -183,6 +245,20 @@ const RealtimeMediaSourceCapabilities& MockRealtimeVideoSource::capabilities()
return m_capabilities.value();
}
+auto MockRealtimeVideoSource::takePhotoInternal(PhotoSettings&&) -> Ref
+{
+ {
+ Locker lock { m_imageBufferLock };
+ invalidateDrawingState();
+ }
+
+ return invokeAsync(takePhotoRunLoop(), [this, protectedThis = Ref { *this }] () mutable {
+ if (auto currentImage = generatePhoto())
+ return TakePhotoNativePromise::createAndResolve(std::make_pair(ImageBuffer::toData(*currentImage, "image/jpeg"_s), "image/jpeg"_s));
+ return TakePhotoNativePromise::createAndReject("Failed to capture photo"_s);
+ });
+}
+
void MockRealtimeVideoSource::getPhotoCapabilities(PhotoCapabilitiesHandler&& completion)
{
if (m_photoCapabilities) {
@@ -307,15 +383,32 @@ VideoFrameRotation MockRealtimeVideoSource::videoFrameRotation() const
return m_deviceOrientation;
}
+void MockRealtimeVideoSource::invalidateDrawingState()
+{
+ assertIsHeld(m_imageBufferLock);
+
+ m_imageBuffer = nullptr;
+ m_drawingState = { };
+}
+
+MockRealtimeVideoSource::DrawingState& MockRealtimeVideoSource::drawingState()
+{
+ assertIsHeld(m_imageBufferLock);
+
+ if (!m_drawingState)
+ m_drawingState = { DrawingState(captureSize().height() * .08) };
+
+ return *m_drawingState;
+}
+
void MockRealtimeVideoSource::settingsDidChange(OptionSet settings)
{
m_currentSettings = std::nullopt;
if (settings.containsAny({ RealtimeMediaSourceSettings::Flag::Width, RealtimeMediaSourceSettings::Flag::Height })) {
- m_baseFontSize = captureSize().height() * .08;
- m_bipBopFontSize = m_baseFontSize * 2.5;
- m_statsFontSize = m_baseFontSize * .5;
- m_imageBuffer = nullptr;
+ Locker lock { m_imageBufferLock };
+ invalidateDrawingState();
}
+
if (settings.contains(RealtimeMediaSourceSettings::Flag::Torch))
m_photoSettings = std::nullopt;
}
@@ -430,58 +523,42 @@ void MockRealtimeVideoSource::drawBoxes(GraphicsContext& context)
void MockRealtimeVideoSource::drawText(GraphicsContext& context)
{
+ assertIsHeld(m_imageBufferLock);
+
unsigned milliseconds = lround(elapsedTime().milliseconds());
unsigned seconds = milliseconds / 1000 % 60;
unsigned minutes = seconds / 60 % 60;
unsigned hours = minutes / 60 % 60;
- FontCascadeDescription fontDescription;
- fontDescription.setOneFamily("Courier"_s);
- fontDescription.setWeight(FontSelectionValue(500));
-
- fontDescription.setSpecifiedSize(m_baseFontSize);
- fontDescription.setComputedSize(m_baseFontSize);
- FontCascade timeFont { FontCascadeDescription { fontDescription }, 0, 0 };
- timeFont.update(nullptr);
-
- fontDescription.setSpecifiedSize(m_bipBopFontSize);
- fontDescription.setComputedSize(m_bipBopFontSize);
- FontCascade bipBopFont { FontCascadeDescription { fontDescription }, 0, 0 };
- bipBopFont.update(nullptr);
-
- fontDescription.setSpecifiedSize(m_statsFontSize);
- fontDescription.setComputedSize(m_statsFontSize);
- FontCascade statsFont { WTFMove(fontDescription), 0, 0 };
- statsFont.update(nullptr);
-
+ auto drawingState = this->drawingState();
IntSize captureSize = this->captureSize();
FloatPoint timeLocation(captureSize.width() * .05, captureSize.height() * .15);
context.setFillColor(Color::white);
context.setTextDrawingMode(TextDrawingMode::Fill);
auto string = makeString(pad('0', 2, hours), ':', pad('0', 2, minutes), ':', pad('0', 2, seconds), '.', pad('0', 3, milliseconds % 1000));
- context.drawText(timeFont, TextRun((StringView(string))), timeLocation);
+ context.drawText(drawingState.timeFont(), TextRun((StringView(string))), timeLocation);
string = makeString(pad('0', 6, m_frameNumber++));
- timeLocation.move(0, m_baseFontSize);
- context.drawText(timeFont, TextRun((StringView(string))), timeLocation);
+ timeLocation.move(0, drawingState.baseFontSize());
+ context.drawText(drawingState.timeFont(), TextRun((StringView(string))), timeLocation);
FloatPoint statsLocation(captureSize.width() * .45, captureSize.height() * .75);
string = makeString("Requested frame rate: ", FormattedNumber::fixedWidth(frameRate(), 1), " fps");
- context.drawText(statsFont, TextRun((StringView(string))), statsLocation);
+ context.drawText(drawingState.statsFont(), TextRun((StringView(string))), statsLocation);
- statsLocation.move(0, m_statsFontSize);
+ statsLocation.move(0, drawingState.statsFontSize());
string = makeString("Observed frame rate: ", FormattedNumber::fixedWidth(observedFrameRate(), 1), " fps");
- context.drawText(statsFont, TextRun((StringView(string))), statsLocation);
+ context.drawText(drawingState.statsFont(), TextRun((StringView(string))), statsLocation);
auto size = this->size();
- statsLocation.move(0, m_statsFontSize);
+ statsLocation.move(0, drawingState.statsFontSize());
string = makeString("Size: ", size.width(), " x ", size.height());
- context.drawText(statsFont, TextRun((StringView(string))), statsLocation);
+ context.drawText(drawingState.statsFont(), TextRun((StringView(string))), statsLocation);
if (mockCamera()) {
- statsLocation.move(0, m_statsFontSize);
+ statsLocation.move(0, drawingState.statsFontSize());
string = makeString("Preset size: ", captureSize.width(), " x ", captureSize.height());
- context.drawText(statsFont, TextRun((StringView(string))), statsLocation);
+ context.drawText(drawingState.statsFont(), TextRun((StringView(string))), statsLocation);
const char* camera;
switch (facingMode()) {
@@ -502,11 +579,11 @@ void MockRealtimeVideoSource::drawText(GraphicsContext& context)
break;
}
string = makeString("Camera: ", camera);
- statsLocation.move(0, m_statsFontSize);
- context.drawText(statsFont, TextRun(string), statsLocation);
+ statsLocation.move(0, drawingState.statsFontSize());
+ context.drawText(drawingState.statsFont(), TextRun(string), statsLocation);
} else if (!name().isNull()) {
- statsLocation.move(0, m_statsFontSize);
- context.drawText(statsFont, TextRun { name().string() }, statsLocation);
+ statsLocation.move(0, drawingState.statsFontSize());
+ context.drawText(drawingState.statsFont(), TextRun { name().string() }, statsLocation);
}
FloatPoint bipBopLocation(captureSize.width() * .6, captureSize.height() * .6);
@@ -514,11 +591,11 @@ void MockRealtimeVideoSource::drawText(GraphicsContext& context)
if (frameMod <= 15) {
context.setFillColor(Color::cyan);
String bip("Bip"_s);
- context.drawText(bipBopFont, TextRun(StringView(bip)), bipBopLocation);
+ context.drawText(drawingState.bipBopFont(), TextRun(StringView(bip)), bipBopLocation);
} else if (frameMod > 30 && frameMod <= 45) {
context.setFillColor(Color::yellow);
String bop("Bop"_s);
- context.drawText(bipBopFont, TextRun(StringView(bop)), bipBopLocation);
+ context.drawText(drawingState.bipBopFont(), TextRun(StringView(bop)), bipBopLocation);
}
}
@@ -527,37 +604,64 @@ void MockRealtimeVideoSource::delaySamples(Seconds delta)
m_delayUntil = MonotonicTime::now() + delta;
}
-void MockRealtimeVideoSource::generateFrame()
+RefPtr MockRealtimeVideoSource::generatePhoto()
{
- if (m_delayUntil) {
- if (m_delayUntil < MonotonicTime::now())
- return;
- m_delayUntil = MonotonicTime();
- }
+ ASSERT(!isMainThread());
+ ASSERT(!m_drawingState);
+
+ Locker lock { m_imageBufferLock };
+ auto currentImage = generateFrameInternal();
+ invalidateDrawingState();
+
+ return currentImage;
+}
+
+RefPtr MockRealtimeVideoSource::generateFrameInternal()
+{
+ assertIsHeld(m_imageBufferLock);
- RefPtr buffer = imageBuffer();
+ RefPtr buffer = imageBufferInternal();
if (!buffer)
- return;
+ return nullptr;
GraphicsContext& context = buffer->context();
GraphicsContextStateSaver stateSaver(context);
- auto size = this->captureSize();
- FloatRect frameRect(FloatPoint(), size);
-
- context.fillRect(FloatRect(FloatPoint(), size), zoom() >= 2 ? m_fillColorWithZoom : m_fillColor);
+ context.fillRect(FloatRect(FloatPoint(), captureSize()), zoom() >= 2 ? m_fillColorWithZoom : m_fillColor);
- if (!muted()) {
+ if (!muted() || mutedForPhotoCapture()) {
drawText(context);
drawAnimation(context);
drawBoxes(context);
}
updateSampleBuffer();
+
+ return imageBufferInternal();
+}
+
+void MockRealtimeVideoSource::generateFrame()
+{
+ if (m_delayUntil) {
+ if (m_delayUntil < MonotonicTime::now())
+ return;
+ m_delayUntil = MonotonicTime();
+ }
+
+ Locker lock { m_imageBufferLock };
+ generateFrameInternal();
}
-ImageBuffer* MockRealtimeVideoSource::imageBuffer() const
+ImageBuffer* MockRealtimeVideoSource::imageBuffer()
{
+ Locker lock { m_imageBufferLock };
+ return imageBufferInternal();
+}
+
+ImageBuffer* MockRealtimeVideoSource::imageBufferInternal()
+{
+ assertIsHeld(m_imageBufferLock);
+
if (m_imageBuffer)
return m_imageBuffer.get();
diff --git a/Source/WebCore/platform/mock/MockRealtimeVideoSource.h b/Source/WebCore/platform/mock/MockRealtimeVideoSource.h
index be9ee5765485..db0aba720773 100644
--- a/Source/WebCore/platform/mock/MockRealtimeVideoSource.h
+++ b/Source/WebCore/platform/mock/MockRealtimeVideoSource.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015-2022 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2023 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -38,6 +38,7 @@
#include "OrientationNotifier.h"
#include "RealtimeMediaSourceFactory.h"
#include "RealtimeVideoCaptureSource.h"
+#include
#include
namespace WebCore {
@@ -54,7 +55,7 @@ class MockRealtimeVideoSource : public RealtimeVideoCaptureSource, private Orien
static void setIsInterrupted(bool);
- ImageBuffer* imageBuffer() const;
+ ImageBuffer* imageBuffer();
protected:
MockRealtimeVideoSource(String&& deviceID, AtomString&& name, MediaDeviceHashSalts&&, PageIdentifier);
@@ -68,12 +69,15 @@ class MockRealtimeVideoSource : public RealtimeVideoCaptureSource, private Orien
IntSize captureSize() const;
+ ImageBuffer* imageBufferInternal();
+
private:
friend class MockDisplayCaptureSourceGStreamer;
friend class MockRealtimeVideoSourceGStreamer;
const RealtimeMediaSourceCapabilities& capabilities() final;
const RealtimeMediaSourceSettings& settings() final;
+ Ref takePhotoInternal(PhotoSettings&&) final;
void getPhotoCapabilities(PhotoCapabilitiesHandler&&) final;
Ref getPhotoSettings() final;
@@ -85,7 +89,6 @@ class MockRealtimeVideoSource : public RealtimeVideoCaptureSource, private Orien
void setSizeFrameRateAndZoom(std::optional width, std::optional height, std::optional, std::optional) final;
void setFrameRateAndZoomWithPreset(double, double, std::optional&&) final;
-
bool isMockSource() const final { return true; }
// OrientationNotifier::Observer
@@ -97,7 +100,9 @@ class MockRealtimeVideoSource : public RealtimeVideoCaptureSource, private Orien
void drawBoxes(GraphicsContext&);
void generateFrame();
+ RefPtr generateFrameInternal();
void startCaptureTimer();
+ RefPtr generatePhoto();
void delaySamples(Seconds) final;
@@ -107,11 +112,39 @@ class MockRealtimeVideoSource : public RealtimeVideoCaptureSource, private Orien
bool mockWindow() const { return mockDisplayType(CaptureDevice::DeviceType::Window); }
bool mockDisplayType(CaptureDevice::DeviceType) const;
- float m_baseFontSize { 0 };
- float m_bipBopFontSize { 0 };
- float m_statsFontSize { 0 };
+ class DrawingState {
+ public:
+ DrawingState(float baseFontSize)
+ : m_baseFontSize(baseFontSize)
+ , m_bipBopFontSize(baseFontSize * 2.5)
+ , m_statsFontSize(baseFontSize * .5)
+ {
+ }
+
+ float baseFontSize() const { return m_baseFontSize; }
+ float statsFontSize() const { return m_statsFontSize; }
+
+ const FontCascade& timeFont();
+ const FontCascade& bipBopFont();
+ const FontCascade& statsFont();
+
+ private:
+ FontCascadeDescription& fontDescription();
+
+ float m_baseFontSize { 0 };
+ float m_bipBopFontSize { 0 };
+ float m_statsFontSize { 0 };
+ std::optional m_timeFont;
+ std::optional m_bipBopFont;
+ std::optional m_statsFont;
+ std::optional m_fontDescription;
+ };
- mutable RefPtr m_imageBuffer;
+ DrawingState& drawingState();
+ void invalidateDrawingState();
+
+ std::optional m_drawingState;
+ mutable RefPtr m_imageBuffer WTF_GUARDED_BY_LOCK(m_imageBufferLock);
Path m_path;
DashArray m_dashWidths;
@@ -124,14 +157,16 @@ class MockRealtimeVideoSource : public RealtimeVideoCaptureSource, private Orien
RunLoop::Timer m_emitFrameTimer;
std::optional m_capabilities;
std::optional m_currentSettings;
- std::optional m_photoCapabilities;
- std::optional m_photoSettings;
RealtimeMediaSourceSupportedConstraints m_supportedConstraints;
Color m_fillColor { Color::black };
Color m_fillColorWithZoom { Color::red };
MockMediaDevice m_device;
std::optional m_preset;
VideoFrameRotation m_deviceOrientation;
+
+ Lock m_imageBufferLock;
+ std::optional m_photoCapabilities;
+ std::optional m_photoSettings;
};
} // namespace WebCore
diff --git a/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h b/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h
index f7a24acedd12..8c69c00801a1 100644
--- a/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h
+++ b/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h
@@ -91,7 +91,6 @@ class RemoteSampleBufferDisplayLayer : public WebCore::SampleBufferDisplayLayer:
GPUConnectionToWebProcess& m_gpuConnection WTF_GUARDED_BY_CAPABILITY(m_consumeThread);
SampleBufferDisplayLayerIdentifier m_identifier;
Ref m_connection;
- std::unique_ptr m_imageTransferSession;
RefPtr m_sampleBufferDisplayLayer;
std::unique_ptr m_layerHostingContext;
SharedVideoFrameReader m_sharedVideoFrameReader;
diff --git a/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp b/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp
index 3e08e9806329..d6a96bc4d778 100644
--- a/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp
+++ b/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp
@@ -209,6 +209,11 @@ class UserMediaCaptureManagerProxy::SourceProxy
m_frameRateConstraint = proxy.m_frameRateConstraint;
}
+ Ref takePhoto(PhotoSettings&& settings)
+ {
+ return m_source->takePhoto(WTFMove(settings));
+ }
+
void getPhotoCapabilities(GetPhotoCapabilitiesCallback&& handler)
{
m_source->getPhotoCapabilities(WTFMove(handler));
@@ -561,6 +566,21 @@ void UserMediaCaptureManagerProxy::clone(RealtimeMediaSourceIdentifier clonedID,
}
}
+void UserMediaCaptureManagerProxy::takePhoto(RealtimeMediaSourceIdentifier sourceID, WebCore::PhotoSettings&& settings, TakePhotoCallback&& handler)
+{
+ auto* proxy = m_proxies.get(sourceID);
+ if (!proxy) {
+ handler(Unexpected("Device not available"_s));
+ return;
+ }
+
+ proxy->takePhoto(WTFMove(settings))->whenSettled(RunLoop::main(), [handler = WTFMove(handler)] (auto&& result) mutable {
+ handler(WTFMove(result));
+ });
+}
+
+
+
void UserMediaCaptureManagerProxy::getPhotoCapabilities(RealtimeMediaSourceIdentifier sourceID, GetPhotoCapabilitiesCallback&& handler)
{
if (auto* proxy = m_proxies.get(sourceID)) {
diff --git a/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h b/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h
index e5c67a731ef7..6749e156ccef 100644
--- a/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h
+++ b/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h
@@ -97,6 +97,9 @@ class UserMediaCaptureManagerProxy : private IPC::MessageReceiver {
void setShouldApplyRotation(WebCore::RealtimeMediaSourceIdentifier, bool shouldApplyRotation);
void setIsInBackground(WebCore::RealtimeMediaSourceIdentifier, bool);
+ using TakePhotoCallback = CompletionHandler, String>, String>&&)>;
+ void takePhoto(WebCore::RealtimeMediaSourceIdentifier, WebCore::PhotoSettings&&, TakePhotoCallback&&);
+
using GetPhotoCapabilitiesCallback = CompletionHandler;
void getPhotoCapabilities(WebCore::RealtimeMediaSourceIdentifier, GetPhotoCapabilitiesCallback&&);
diff --git a/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.messages.in b/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.messages.in
index 0a76b4f88a2c..f46f653e10d7 100644
--- a/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.messages.in
+++ b/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.messages.in
@@ -29,6 +29,7 @@ messages -> UserMediaCaptureManagerProxy NotRefCounted {
StopProducingData(WebCore::RealtimeMediaSourceIdentifier id)
RemoveSource(WebCore::RealtimeMediaSourceIdentifier id)
ApplyConstraints(WebCore::RealtimeMediaSourceIdentifier id, struct WebCore::MediaConstraints constraints)
+ TakePhoto(WebCore::RealtimeMediaSourceIdentifier sourceID, struct WebCore::PhotoSettings settings) -> (Expected, String>, String> result);
GetPhotoCapabilities(WebCore::RealtimeMediaSourceIdentifier sourceID) -> (struct WebCore::PhotoCapabilitiesOrError result) Async
GetPhotoSettings(WebCore::RealtimeMediaSourceIdentifier sourceID) -> (Expected result) Async
Clone(WebCore::RealtimeMediaSourceIdentifier clonedID, WebCore::RealtimeMediaSourceIdentifier cloneID, WebCore::PageIdentifier pageIdentifier)
diff --git a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.cpp b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.cpp
index 20dc965aac56..ad090d7c76cc 100644
--- a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.cpp
+++ b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.cpp
@@ -80,6 +80,11 @@ void RemoteRealtimeMediaSource::setSettings(RealtimeMediaSourceSettings&& settin
notifySettingsDidChangeObservers(changed);
}
+Ref RemoteRealtimeMediaSource::takePhoto(PhotoSettings&& settings)
+{
+ return m_proxy.takePhoto(WTFMove(settings));
+}
+
void RemoteRealtimeMediaSource::getPhotoCapabilities(PhotoCapabilitiesHandler&& callback)
{
m_proxy.getPhotoCapabilities(WTFMove(callback));
diff --git a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.h b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.h
index 9d4dd7a1098a..b769e1370c91 100644
--- a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.h
+++ b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSource.h
@@ -73,6 +73,8 @@ class RemoteRealtimeMediaSource : public WebCore::RealtimeMediaSource
const WebCore::RealtimeMediaSourceSettings& settings() final { return m_settings; }
const WebCore::RealtimeMediaSourceCapabilities& capabilities() final { return m_capabilities; }
+
+ Ref takePhoto(WebCore::PhotoSettings&&) final;
void getPhotoCapabilities(PhotoCapabilitiesHandler&&) final;
Ref getPhotoSettings() final;
diff --git a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.cpp b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.cpp
index 236f4272fb03..dffa6c500b90 100644
--- a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.cpp
+++ b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.cpp
@@ -113,6 +113,17 @@ void RemoteRealtimeMediaSourceProxy::applyConstraints(const MediaConstraints& co
m_connection->send(Messages::UserMediaCaptureManagerProxy::ApplyConstraints { m_identifier, constraints }, 0);
}
+Ref RemoteRealtimeMediaSourceProxy::takePhoto(PhotoSettings&& settings)
+{
+ return m_connection->sendWithPromisedReply(Messages::UserMediaCaptureManagerProxy::TakePhoto(identifier(), WTFMove(settings)))->whenSettled(RunLoop::main(), [](Messages::UserMediaCaptureManagerProxy::TakePhoto::Promise::Result&& result) {
+
+ if (result)
+ return WebCore::RealtimeMediaSource::TakePhotoNativePromise::createAndSettle(WTFMove(result.value()));
+
+ return WebCore::RealtimeMediaSource::TakePhotoNativePromise::createAndReject(String("IPC Connection closed"_s));
+ });
+}
+
void RemoteRealtimeMediaSourceProxy::getPhotoCapabilities(WebCore::RealtimeMediaSource::PhotoCapabilitiesHandler&& handler)
{
m_connection->sendWithAsyncReply(Messages::UserMediaCaptureManagerProxy::GetPhotoCapabilities(identifier()), WTFMove(handler));
diff --git a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.h b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.h
index 790f708bb29e..9d306d6c72b4 100644
--- a/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.h
+++ b/Source/WebKit/WebProcess/cocoa/RemoteRealtimeMediaSourceProxy.h
@@ -73,6 +73,8 @@ class RemoteRealtimeMediaSourceProxy {
void stopProducingData();
void endProducingData();
void applyConstraints(const WebCore::MediaConstraints&, WebCore::RealtimeMediaSource::ApplyConstraintsHandler&&);
+
+ Ref takePhoto(WebCore::PhotoSettings&&);
void getPhotoCapabilities(WebCore::RealtimeMediaSource::PhotoCapabilitiesHandler&&);
Ref getPhotoSettings();
]