LibMedia+LibWeb: Rewrite PlaybackManager using the provider/sink model

With this commit, all PlaybackManager can do is autoplay a file from
start to finish, with no pausing or seeking functionality.

All audio playback functionality has been removed from HTMLMediaElement
and HTMLAudioElement in anticipation of PlaybackManager taking that
over, for both audio-only and audio/video.
This commit is contained in:
Zaggy1024 2025-10-03 00:39:39 -05:00 committed by Jelle Raaijmakers
parent 0f9fa47352
commit 6caa2f99aa
Notes: github-actions[bot] 2025-10-28 00:35:44 +00:00
15 changed files with 371 additions and 1234 deletions

View file

@ -1,734 +1,134 @@
/*
* Copyright (c) 2022, Gregory Bertilson <zaggy1024@gmail.com>
* Copyright (c) 2022-2025, Gregory Bertilson <gregory@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/Format.h>
#include <LibCore/MappedFile.h>
#include <LibCore/Timer.h>
#include <LibMedia/FFmpeg/FFmpegDemuxer.h>
#include <LibMedia/FFmpeg/FFmpegVideoDecoder.h>
#include <LibMedia/VideoFrame.h>
#include <LibMedia/MutexedDemuxer.h>
#include <LibMedia/Providers/VideoDataProvider.h>
#include <LibMedia/Sinks/DisplayingVideoSink.h>
#include <LibMedia/Track.h>
#include "PlaybackManager.h"
namespace Media {
#define TRY_OR_FATAL_ERROR(expression) \
({ \
auto&& _fatal_expression = (expression); \
if (_fatal_expression.is_error()) { \
dispatch_fatal_error(_fatal_expression.release_error()); \
return; \
} \
static_assert(!::AK::Detail::IsLvalueReference<decltype(_fatal_expression.release_value())>, \
"Do not return a reference from a fallible expression"); \
_fatal_expression.release_value(); \
})
DecoderErrorOr<NonnullOwnPtr<PlaybackManager>> PlaybackManager::from_data(ReadonlyBytes data)
DecoderErrorOr<NonnullRefPtr<PlaybackManager>> PlaybackManager::try_create(NonnullOwnPtr<SeekableStream>&& stream)
{
auto stream = make<FixedMemoryStream>(data);
return from_stream(move(stream));
auto inner_demuxer = DECODER_TRY_ALLOC(FFmpeg::FFmpegDemuxer::create(move(stream)));
auto demuxer = DECODER_TRY_ALLOC(try_make_ref_counted<MutexedDemuxer>(inner_demuxer));
// Create the weak wrapper.
auto weak_playback_manager = DECODER_TRY_ALLOC(try_make_ref_counted<WeakPlaybackManager>());
// Create the video tracks and their data providers.
auto all_video_tracks = TRY(demuxer->get_tracks_for_type(TrackType::Video));
auto supported_video_tracks = VideoTracks();
auto supported_video_track_datas = VideoTrackDatas();
supported_video_tracks.ensure_capacity(all_video_tracks.size());
supported_video_track_datas.ensure_capacity(all_video_tracks.size());
for (auto const& track : all_video_tracks) {
auto video_data_provider_result = VideoDataProvider::try_create(demuxer, track);
if (video_data_provider_result.is_error())
continue;
supported_video_tracks.append(track);
supported_video_track_datas.empend(VideoTrackData(track, video_data_provider_result.release_value(), nullptr));
}
supported_video_tracks.shrink_to_fit();
supported_video_track_datas.shrink_to_fit();
if (supported_video_tracks.is_empty())
return DecoderError::with_description(DecoderErrorCategory::NotImplemented, "No supported video tracks found"sv);
auto playback_manager = DECODER_TRY_ALLOC(adopt_nonnull_ref_or_enomem(new (nothrow) PlaybackManager(demuxer, weak_playback_manager, move(supported_video_tracks), move(supported_video_track_datas))));
weak_playback_manager->m_manager = playback_manager;
playback_manager->set_up_error_handlers();
return playback_manager;
}
DecoderErrorOr<NonnullOwnPtr<PlaybackManager>> PlaybackManager::from_stream(NonnullOwnPtr<SeekableStream> stream)
{
auto demuxer_or_error = FFmpeg::FFmpegDemuxer::create(move(stream));
if (demuxer_or_error.is_error())
return DecoderError::format(DecoderErrorCategory::Unknown, "{}", demuxer_or_error.error());
return create(demuxer_or_error.release_value());
}
PlaybackManager::PlaybackManager(NonnullRefPtr<Demuxer> const& demuxer, Track video_track, NonnullOwnPtr<VideoDecoder>&& decoder, VideoFrameQueue&& frame_queue)
PlaybackManager::PlaybackManager(NonnullRefPtr<MutexedDemuxer> const& demuxer, NonnullRefPtr<WeakPlaybackManager> const& weak_wrapper, VideoTracks&& video_tracks, VideoTrackDatas&& video_track_datas)
: m_demuxer(demuxer)
, m_selected_video_track(video_track)
, m_frame_queue(move(frame_queue))
, m_decoder(move(decoder))
, m_decode_wait_condition(m_decode_wait_mutex)
, m_weak_wrapper(weak_wrapper)
, m_video_tracks(video_tracks)
, m_video_track_datas(video_track_datas)
, m_real_time_base(MonotonicTime::now())
{
}
PlaybackManager::~PlaybackManager()
{
terminate_playback();
m_weak_wrapper->revoke();
}
void PlaybackManager::resume_playback()
void PlaybackManager::set_up_error_handlers()
{
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Resuming playback.");
TRY_OR_FATAL_ERROR(m_playback_handler->play());
}
void PlaybackManager::pause_playback()
{
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Pausing playback.");
if (!m_playback_handler->is_playing())
warnln("Cannot pause.");
TRY_OR_FATAL_ERROR(m_playback_handler->pause());
}
void PlaybackManager::terminate_playback()
{
m_stop_decoding.exchange(true);
m_decode_wait_condition.broadcast();
if (m_decode_thread->needs_to_be_joined()) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Waiting for decode thread to end...");
(void)m_decode_thread->join();
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Successfully destroyed PlaybackManager.");
for (auto const& video_track_data : m_video_track_datas) {
video_track_data.provider->set_error_handler([weak_self = m_weak_wrapper](DecoderError&& error) {
auto self = weak_self->take_strong();
if (!self)
return;
self->dispatch_error(move(error));
});
}
}
AK::Duration PlaybackManager::current_playback_time()
void PlaybackManager::dispatch_error(DecoderError&& error)
{
return m_playback_handler->current_time();
if (m_is_in_error_state)
return;
m_is_in_error_state = true;
if (on_error)
on_error(move(error));
}
AK::Duration PlaybackManager::duration()
AK::Duration PlaybackManager::current_time() const
{
auto duration_result = ({
auto demuxer_locker = Threading::MutexLocker(m_decoder_mutex);
m_demuxer->duration_of_track(m_selected_video_track);
});
if (duration_result.is_error()) {
dispatch_decoder_error(duration_result.release_error());
// FIXME: We should determine the last sample that the demuxer knows is available and
// use that as the current duration. The duration may change if the demuxer doesn't
// know there is a fixed duration.
return AK::Duration::zero();
}
return duration_result.release_value();
return MonotonicTime::now() - m_real_time_base;
}
void PlaybackManager::dispatch_fatal_error(Error error)
AK::Duration PlaybackManager::duration() const
{
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Encountered fatal error: {}", error.string_literal());
// FIXME: For threading, this will have to use a pre-allocated event to send to the main loop
// to be able to gracefully handle OOM.
if (on_fatal_playback_error)
on_fatal_playback_error(move(error));
return m_demuxer->total_duration().value_or(AK::Duration::zero());
}
void PlaybackManager::dispatch_decoder_error(DecoderError error)
Optional<Track> PlaybackManager::preferred_video_track()
{
switch (error.category()) {
case DecoderErrorCategory::EndOfStream:
dbgln_if(PLAYBACK_MANAGER_DEBUG, "{}", error.string_literal());
TRY_OR_FATAL_ERROR(m_playback_handler->stop());
break;
default:
dbgln("Playback error encountered: {}", error.string_literal());
TRY_OR_FATAL_ERROR(m_playback_handler->stop());
if (on_decoder_error)
on_decoder_error(move(error));
break;
}
}
void PlaybackManager::dispatch_new_frame(RefPtr<Gfx::Bitmap> frame)
{
if (on_video_frame)
on_video_frame(move(frame));
}
bool PlaybackManager::dispatch_frame_queue_item(FrameQueueItem&& item)
{
if (item.is_error()) {
dispatch_decoder_error(item.release_error());
return true;
}
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Sent frame for presentation with timestamp {}ms, late by {}ms", item.timestamp().to_milliseconds(), (current_playback_time() - item.timestamp()).to_milliseconds());
dispatch_new_frame(item.bitmap());
return false;
}
void PlaybackManager::dispatch_state_change()
{
if (on_playback_state_change)
on_playback_state_change();
}
void PlaybackManager::timer_callback()
{
TRY_OR_FATAL_ERROR(m_playback_handler->do_timed_state_update());
}
void PlaybackManager::seek_to_timestamp(AK::Duration target_timestamp, SeekMode seek_mode)
{
TRY_OR_FATAL_ERROR(m_playback_handler->seek(target_timestamp, seek_mode));
}
DecoderErrorOr<Optional<AK::Duration>> PlaybackManager::seek_demuxer_to_most_recent_keyframe(AK::Duration timestamp, Optional<AK::Duration> earliest_available_sample)
{
auto seeked_timestamp = TRY(m_demuxer->seek_to_most_recent_keyframe(m_selected_video_track, timestamp, move(earliest_available_sample)));
if (seeked_timestamp.has_value())
m_decoder->flush();
return seeked_timestamp;
}
Optional<FrameQueueItem> PlaybackManager::dequeue_one_frame()
{
auto result = m_frame_queue.dequeue();
m_decode_wait_condition.broadcast();
if (result.is_error()) {
if (result.error() != VideoFrameQueue::QueueStatus::Empty)
dispatch_fatal_error(Error::from_string_literal("Dequeue failed with an unexpected error"));
auto result = m_demuxer->get_preferred_track_for_type(TrackType::Video).value_or({});
if (result.has_value() && !m_video_tracks.contains_slow(result.value()))
return {};
}
return result.release_value();
return result;
}
void PlaybackManager::set_state_update_timer(int delay_ms)
PlaybackManager::VideoTrackData& PlaybackManager::get_video_data_for_track(Track const& track)
{
m_state_update_timer->start(delay_ms);
for (auto& track_data : m_video_track_datas) {
if (track_data.track == track)
return track_data;
}
VERIFY_NOT_REACHED();
}
void PlaybackManager::restart_playback()
NonnullRefPtr<DisplayingVideoSink> PlaybackManager::get_or_create_the_displaying_video_sink_for_track(Track const& track)
{
seek_to_timestamp(AK::Duration::zero());
auto& track_data = get_video_data_for_track(track);
if (track_data.display == nullptr) {
track_data.display = MUST(Media::DisplayingVideoSink::try_create(m_weak_wrapper));
track_data.display->set_provider(track, track_data.provider);
track_data.provider->seek(current_time());
}
VERIFY(track_data.display->provider(track) == track_data.provider);
return *track_data.display;
}
void PlaybackManager::decode_and_queue_one_sample()
void PlaybackManager::remove_the_displaying_video_sink_for_track(Track const& track)
{
#if PLAYBACK_MANAGER_DEBUG
auto start_time = MonotonicTime::now();
#endif
FrameQueueItem item_to_enqueue;
while (item_to_enqueue.is_empty()) {
OwnPtr<VideoFrame> decoded_frame = nullptr;
CodingIndependentCodePoints container_cicp;
{
Threading::MutexLocker decoder_locker(m_decoder_mutex);
// Get a sample to decode.
auto sample_result = m_demuxer->get_next_sample_for_track(m_selected_video_track);
if (sample_result.is_error()) {
item_to_enqueue = FrameQueueItem::error_marker(sample_result.release_error(), FrameQueueItem::no_timestamp);
break;
}
auto sample = sample_result.release_value();
container_cicp = sample.auxiliary_data().get<CodedVideoFrameData>().container_cicp();
// Submit the sample to the decoder.
auto decode_result = m_decoder->receive_coded_data(sample.timestamp(), sample.data());
if (decode_result.is_error()) {
item_to_enqueue = FrameQueueItem::error_marker(decode_result.release_error(), sample.timestamp());
break;
}
// Retrieve the last available frame to present.
while (true) {
auto frame_result = m_decoder->get_decoded_frame();
if (frame_result.is_error()) {
if (frame_result.error().category() == DecoderErrorCategory::NeedsMoreInput) {
break;
}
item_to_enqueue = FrameQueueItem::error_marker(frame_result.release_error(), sample.timestamp());
break;
}
decoded_frame = frame_result.release_value();
}
}
// Convert the frame for display.
if (decoded_frame != nullptr) {
auto& cicp = decoded_frame->cicp();
cicp.adopt_specified_values(container_cicp);
cicp.default_code_points_if_unspecified({ ColorPrimaries::BT709, TransferCharacteristics::BT709, MatrixCoefficients::BT709, VideoFullRangeFlag::Studio });
// BT.470 M, B/G, BT.601, BT.709 and BT.2020 have a similar transfer function to sRGB, so other applications
// (Chromium, VLC) forgo transfer characteristics conversion. We will emulate that behavior by
// handling those as sRGB instead, which causes no transfer function change in the output,
// unless display color management is later implemented.
switch (cicp.transfer_characteristics()) {
case TransferCharacteristics::BT470BG:
case TransferCharacteristics::BT470M:
case TransferCharacteristics::BT601:
case TransferCharacteristics::BT709:
case TransferCharacteristics::BT2020BitDepth10:
case TransferCharacteristics::BT2020BitDepth12:
cicp.set_transfer_characteristics(TransferCharacteristics::SRGB);
break;
default:
break;
}
auto bitmap_result = decoded_frame->to_bitmap();
if (bitmap_result.is_error())
item_to_enqueue = FrameQueueItem::error_marker(bitmap_result.release_error(), decoded_frame->timestamp());
else
item_to_enqueue = FrameQueueItem::frame(bitmap_result.release_value(), decoded_frame->timestamp());
break;
}
}
VERIFY(!item_to_enqueue.is_empty());
#if PLAYBACK_MANAGER_DEBUG
dbgln("Media Decoder: Sample at {}ms took {}ms to decode, queue contains ~{} items", item_to_enqueue.timestamp().to_milliseconds(), (MonotonicTime::now() - start_time).to_milliseconds(), m_frame_queue.weak_used());
#endif
auto wait = [&] {
auto wait_locker = Threading::MutexLocker(m_decode_wait_mutex);
m_decode_wait_condition.wait();
};
bool had_error = item_to_enqueue.is_error();
while (true) {
if (m_frame_queue.can_enqueue()) {
MUST(m_frame_queue.enqueue(move(item_to_enqueue)));
break;
}
if (m_stop_decoding.load()) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Media Decoder: Received signal to stop, exiting decode function...");
return;
}
m_buffer_is_full.exchange(true);
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Media Decoder: Waiting for a frame to be dequeued...");
wait();
}
if (had_error) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Media Decoder: Encountered {}, waiting...", item_to_enqueue.error().category() == DecoderErrorCategory::EndOfStream ? "end of stream"sv : "error"sv);
m_buffer_is_full.exchange(true);
wait();
}
m_buffer_is_full.exchange(false);
}
AK::Duration PlaybackManager::PlaybackStateHandler::current_time() const
{
return m_manager.m_last_present_in_media_time;
}
ErrorOr<void> PlaybackManager::PlaybackStateHandler::seek(AK::Duration target_timestamp, SeekMode seek_mode)
{
return replace_handler_and_delete_this<SeekingStateHandler>(is_playing(), target_timestamp, seek_mode);
}
ErrorOr<void> PlaybackManager::PlaybackStateHandler::stop()
{
return replace_handler_and_delete_this<StoppedStateHandler>();
}
template<class T, class... Args>
ErrorOr<void> PlaybackManager::PlaybackStateHandler::replace_handler_and_delete_this(Args... args)
{
OwnPtr<PlaybackStateHandler> temp_handler = TRY(try_make<T>(m_manager, args...));
m_manager.m_playback_handler.swap(temp_handler);
#if PLAYBACK_MANAGER_DEBUG
m_has_exited = true;
dbgln("Changing state from {} to {}", temp_handler->name(), m_manager.m_playback_handler->name());
#endif
TRY(m_manager.m_playback_handler->on_enter());
m_manager.dispatch_state_change();
return {};
}
PlaybackManager& PlaybackManager::PlaybackStateHandler::manager() const
{
#if PLAYBACK_MANAGER_DEBUG
VERIFY(!m_has_exited);
#endif
return m_manager;
}
class PlaybackManager::ResumingStateHandler : public PlaybackManager::PlaybackStateHandler {
public:
ResumingStateHandler(PlaybackManager& manager, bool playing)
: PlaybackStateHandler(manager)
, m_playing(playing)
{
}
~ResumingStateHandler() override = default;
protected:
ErrorOr<void> assume_next_state()
{
if (!m_playing)
return replace_handler_and_delete_this<PausedStateHandler>();
return replace_handler_and_delete_this<PlayingStateHandler>();
}
ErrorOr<void> play() override
{
m_playing = true;
manager().dispatch_state_change();
return {};
}
bool is_playing() const override { return m_playing; }
ErrorOr<void> pause() override
{
m_playing = false;
manager().dispatch_state_change();
return {};
}
bool m_playing { false };
};
class PlaybackManager::PlayingStateHandler : public PlaybackManager::PlaybackStateHandler {
public:
PlayingStateHandler(PlaybackManager& manager)
: PlaybackStateHandler(manager)
{
}
~PlayingStateHandler() override = default;
private:
ErrorOr<void> on_enter() override
{
m_last_present_in_real_time = MonotonicTime::now();
return do_timed_state_update();
}
StringView name() override { return "Playing"sv; }
bool is_playing() const override { return true; }
PlaybackState get_state() const override { return PlaybackState::Playing; }
ErrorOr<void> pause() override
{
manager().m_last_present_in_media_time = current_time();
return replace_handler_and_delete_this<PausedStateHandler>();
}
ErrorOr<void> buffer() override
{
manager().m_last_present_in_media_time = current_time();
return replace_handler_and_delete_this<BufferingStateHandler>(true);
}
AK::Duration current_time() const override
{
return manager().m_last_present_in_media_time + (MonotonicTime::now() - m_last_present_in_real_time);
}
ErrorOr<void> do_timed_state_update() override
{
auto set_presentation_timer = [&]() {
auto frame_time_ms = (manager().m_next_frame->timestamp() - current_time()).to_milliseconds();
VERIFY(frame_time_ms <= NumericLimits<int>::max());
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Time until next frame is {}ms", frame_time_ms);
manager().set_state_update_timer(max(static_cast<int>(frame_time_ms), 0));
};
if (manager().m_next_frame.has_value() && current_time() < manager().m_next_frame->timestamp()) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Current time {}ms is too early to present the next frame at {}ms, delaying", current_time().to_milliseconds(), manager().m_next_frame->timestamp().to_milliseconds());
set_presentation_timer();
return {};
}
Optional<FrameQueueItem> future_frame_item;
bool should_present_frame = false;
// Skip frames until we find a frame past the current playback time, and keep the one that precedes it to display.
while (true) {
future_frame_item = manager().dequeue_one_frame();
if (!future_frame_item.has_value())
break;
if (future_frame_item->timestamp() >= current_time() || future_frame_item->timestamp() == FrameQueueItem::no_timestamp) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Should present frame, future {} is error or after {}ms", future_frame_item->debug_string(), current_time().to_milliseconds());
should_present_frame = true;
break;
}
if (manager().m_next_frame.has_value()) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "At {}ms: Dropped {} in favor of {}", current_time().to_milliseconds(), manager().m_next_frame->debug_string(), future_frame_item->debug_string());
manager().m_skipped_frames++;
}
manager().m_next_frame.emplace(future_frame_item.release_value());
}
// If we don't have both of these items, we can't present, since we need to set a timer for
// the next frame. Check if we need to buffer based on the current state.
if (!manager().m_next_frame.has_value() || !future_frame_item.has_value()) {
#if PLAYBACK_MANAGER_DEBUG
StringBuilder debug_string_builder;
debug_string_builder.append("We don't have "sv);
if (!manager().m_next_frame.has_value()) {
debug_string_builder.append("a frame to present"sv);
if (!future_frame_item.has_value())
debug_string_builder.append(" or a future frame"sv);
} else {
debug_string_builder.append("a future frame"sv);
}
debug_string_builder.append(", checking for error and buffering"sv);
dbgln_if(PLAYBACK_MANAGER_DEBUG, debug_string_builder.to_byte_string());
#endif
if (future_frame_item.has_value()) {
if (future_frame_item->is_error()) {
manager().dispatch_decoder_error(future_frame_item.release_value().release_error());
return {};
}
manager().m_next_frame.emplace(future_frame_item.release_value());
}
TRY(buffer());
return {};
}
// If we have a frame, send it for presentation.
if (should_present_frame) {
auto now = MonotonicTime::now();
manager().m_last_present_in_media_time += now - m_last_present_in_real_time;
m_last_present_in_real_time = now;
if (manager().dispatch_frame_queue_item(manager().m_next_frame.release_value()))
return {};
}
// Now that we've presented the current frame, we can throw whatever error is next in queue.
// This way, we always display a frame before the stream ends, and should also show any frames
// we already had when a real error occurs.
if (future_frame_item->is_error()) {
manager().dispatch_decoder_error(future_frame_item.release_value().release_error());
return {};
}
// The future frame item becomes the next one to present.
manager().m_next_frame.emplace(future_frame_item.release_value());
set_presentation_timer();
return {};
}
MonotonicTime m_last_present_in_real_time = MonotonicTime::now_coarse();
};
class PlaybackManager::PausedStateHandler : public PlaybackManager::PlaybackStateHandler {
public:
PausedStateHandler(PlaybackManager& manager)
: PlaybackStateHandler(manager)
{
}
~PausedStateHandler() override = default;
private:
StringView name() override { return "Paused"sv; }
ErrorOr<void> play() override
{
return replace_handler_and_delete_this<PlayingStateHandler>();
}
bool is_playing() const override { return false; }
PlaybackState get_state() const override { return PlaybackState::Paused; }
};
// FIXME: This is a placeholder variable that could be scaled based on how long each frame decode takes to
// avoid triggering the timer to check the queue constantly. However, doing so may reduce the speed
// of seeking due to the decode thread having to wait for a signal to continue decoding.
constexpr int buffering_or_seeking_decode_wait_time = 1;
class PlaybackManager::BufferingStateHandler : public PlaybackManager::ResumingStateHandler {
using PlaybackManager::ResumingStateHandler::ResumingStateHandler;
ErrorOr<void> on_enter() override
{
manager().set_state_update_timer(buffering_or_seeking_decode_wait_time);
return {};
}
StringView name() override { return "Buffering"sv; }
ErrorOr<void> do_timed_state_update() override
{
auto buffer_is_full = manager().m_buffer_is_full.load();
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Buffering timer callback has been called. Buffer is {}.", buffer_is_full ? "full, exiting"sv : "not full, waiting"sv);
if (buffer_is_full)
return assume_next_state();
manager().set_state_update_timer(buffering_or_seeking_decode_wait_time);
return {};
}
PlaybackState get_state() const override { return PlaybackState::Buffering; }
};
class PlaybackManager::SeekingStateHandler : public PlaybackManager::ResumingStateHandler {
public:
SeekingStateHandler(PlaybackManager& manager, bool playing, AK::Duration target_timestamp, SeekMode seek_mode)
: ResumingStateHandler(manager, playing)
, m_target_timestamp(target_timestamp)
, m_seek_mode(seek_mode)
{
}
~SeekingStateHandler() override = default;
private:
ErrorOr<void> on_enter() override
{
auto earliest_available_sample = manager().m_last_present_in_media_time;
if (manager().m_next_frame.has_value() && manager().m_next_frame->timestamp() != FrameQueueItem::no_timestamp) {
earliest_available_sample = min(earliest_available_sample, manager().m_next_frame->timestamp());
}
{
Threading::MutexLocker demuxer_locker(manager().m_decoder_mutex);
auto demuxer_seek_result = manager().seek_demuxer_to_most_recent_keyframe(m_target_timestamp, earliest_available_sample);
if (demuxer_seek_result.is_error()) {
manager().dispatch_decoder_error(demuxer_seek_result.release_error());
return {};
}
auto keyframe_timestamp = demuxer_seek_result.release_value();
#if PLAYBACK_MANAGER_DEBUG
auto seek_mode_name = m_seek_mode == SeekMode::Accurate ? "Accurate"sv : "Fast"sv;
if (keyframe_timestamp.has_value())
dbgln("{} seeking to timestamp target {}ms, selected keyframe at {}ms", seek_mode_name, m_target_timestamp.to_milliseconds(), keyframe_timestamp->to_milliseconds());
else
dbgln("{} seeking to timestamp target {}ms, demuxer kept its iterator position after {}ms", seek_mode_name, m_target_timestamp.to_milliseconds(), earliest_available_sample.to_milliseconds());
#endif
if (m_seek_mode == SeekMode::Fast)
m_target_timestamp = keyframe_timestamp.value_or(manager().m_last_present_in_media_time);
if (keyframe_timestamp.has_value()) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Keyframe is nearer to the target than the current frames, emptying queue");
while (manager().dequeue_one_frame().has_value()) { }
manager().m_next_frame.clear();
manager().m_last_present_in_media_time = keyframe_timestamp.value();
} else if (m_target_timestamp >= manager().m_last_present_in_media_time && manager().m_next_frame.has_value() && manager().m_next_frame.value().timestamp() > m_target_timestamp) {
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Target timestamp is between the last presented frame and the next frame, exiting seek at {}ms", m_target_timestamp.to_milliseconds());
manager().m_last_present_in_media_time = m_target_timestamp;
return assume_next_state();
}
}
return skip_samples_until_timestamp();
}
ErrorOr<void> skip_samples_until_timestamp()
{
while (true) {
auto optional_item = manager().dequeue_one_frame();
if (!optional_item.has_value())
break;
auto item = optional_item.release_value();
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Dequeuing frame at {}ms and comparing to seek target {}ms", item.timestamp().to_milliseconds(), m_target_timestamp.to_milliseconds());
if (manager().m_next_frame.has_value() && (item.timestamp() > m_target_timestamp || item.timestamp() == FrameQueueItem::no_timestamp)) {
// If the frame we're presenting is later than the target timestamp, skip the timestamp forward to it.
if (manager().m_next_frame->timestamp() > m_target_timestamp) {
manager().m_last_present_in_media_time = manager().m_next_frame->timestamp();
} else {
manager().m_last_present_in_media_time = m_target_timestamp;
}
if (manager().dispatch_frame_queue_item(manager().m_next_frame.release_value()))
return {};
manager().m_next_frame.emplace(item);
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Exiting seek to {} state at {}ms", m_playing ? "Playing" : "Paused", manager().m_last_present_in_media_time.to_milliseconds());
return assume_next_state();
}
manager().m_next_frame.emplace(item);
}
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Frame queue is empty while seeking, waiting for buffer to fill.");
manager().set_state_update_timer(buffering_or_seeking_decode_wait_time);
return {};
}
StringView name() override { return "Seeking"sv; }
ErrorOr<void> seek(AK::Duration target_timestamp, SeekMode seek_mode) override
{
m_target_timestamp = target_timestamp;
m_seek_mode = seek_mode;
return on_enter();
}
AK::Duration current_time() const override
{
return m_target_timestamp;
}
// We won't need this override when threaded, the queue can pause us in on_enter().
ErrorOr<void> do_timed_state_update() override
{
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Seeking wait finished, attempting to dequeue until timestamp.");
return skip_samples_until_timestamp();
}
PlaybackState get_state() const override { return PlaybackState::Seeking; }
AK::Duration m_target_timestamp { AK::Duration::zero() };
SeekMode m_seek_mode { SeekMode::Accurate };
};
class PlaybackManager::StoppedStateHandler : public PlaybackManager::PlaybackStateHandler {
public:
StoppedStateHandler(PlaybackManager& manager)
: PlaybackStateHandler(manager)
{
}
~StoppedStateHandler() override = default;
private:
ErrorOr<void> on_enter() override
{
return {};
}
StringView name() override { return "Stopped"sv; }
ErrorOr<void> play() override
{
// When Stopped, the decoder thread will be waiting for a signal to start its loop going again.
manager().m_decode_wait_condition.broadcast();
return replace_handler_and_delete_this<SeekingStateHandler>(true, AK::Duration::zero(), SeekMode::Fast);
}
bool is_playing() const override { return false; }
PlaybackState get_state() const override { return PlaybackState::Stopped; }
};
DecoderErrorOr<NonnullOwnPtr<PlaybackManager>> PlaybackManager::create(NonnullRefPtr<Demuxer> const& demuxer)
{
auto optional_track = TRY(demuxer->get_preferred_track_for_type(TrackType::Video));
if (!optional_track.has_value()) {
return DecoderError::with_description(DecoderErrorCategory::Invalid, "No video track is present"sv);
}
auto track = optional_track.release_value();
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Selecting video track number {}", track.identifier());
auto codec_id = TRY(demuxer->get_codec_id_for_track(track));
auto codec_initialization_data = TRY(demuxer->get_codec_initialization_data_for_track(track));
NonnullOwnPtr<VideoDecoder> decoder = TRY(FFmpeg::FFmpegVideoDecoder::try_create(codec_id, codec_initialization_data));
auto frame_queue = DECODER_TRY_ALLOC(VideoFrameQueue::create());
auto playback_manager = DECODER_TRY_ALLOC(try_make<PlaybackManager>(demuxer, track, move(decoder), move(frame_queue)));
playback_manager->m_state_update_timer = Core::Timer::create_single_shot(0, [&self = *playback_manager] { self.timer_callback(); });
playback_manager->m_decode_thread = DECODER_TRY_ALLOC(Threading::Thread::try_create([&self = *playback_manager] {
while (!self.m_stop_decoding.load())
self.decode_and_queue_one_sample();
dbgln_if(PLAYBACK_MANAGER_DEBUG, "Media Decoder thread ended.");
return 0;
},
"Media Decoder"sv));
playback_manager->m_playback_handler = make<SeekingStateHandler>(*playback_manager, false, AK::Duration::zero(), SeekMode::Fast);
DECODER_TRY_ALLOC(playback_manager->m_playback_handler->on_enter());
playback_manager->m_decode_thread->start();
return playback_manager;
auto& track_data = get_video_data_for_track(track);
track_data.display->set_provider(track, nullptr);
track_data.display = nullptr;
}
}

View file

@ -1,239 +1,113 @@
/*
* Copyright (c) 2022, Gregory Bertilson <zaggy1024@gmail.com>
* Copyright (c) 2022-2025, Gregory Bertilson <gregory@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Atomic.h>
#include <AK/Function.h>
#include <AK/NonnullOwnPtr.h>
#include <AK/Queue.h>
#include <AK/AtomicRefCounted.h>
#include <AK/Forward.h>
#include <AK/NonnullRefPtr.h>
#include <AK/Stream.h>
#include <AK/Time.h>
#include <LibCore/SharedCircularQueue.h>
#include <LibGfx/Bitmap.h>
#include <LibMedia/Demuxer.h>
#include <AK/Vector.h>
#include <LibMedia/DecoderError.h>
#include <LibMedia/Export.h>
#include <LibThreading/ConditionVariable.h>
#include <LibMedia/Forward.h>
#include <LibMedia/Providers/MediaTimeProvider.h>
#include <LibMedia/Track.h>
#include <LibThreading/Mutex.h>
#include <LibThreading/Thread.h>
#include "VideoDecoder.h"
namespace Media {
class FrameQueueItem {
public:
FrameQueueItem()
: m_data(Empty())
, m_timestamp(AK::Duration::zero())
{
}
static constexpr AK::Duration no_timestamp = AK::Duration::min();
enum class Type {
Frame,
Error,
};
static FrameQueueItem frame(RefPtr<Gfx::Bitmap> bitmap, AK::Duration timestamp)
{
return FrameQueueItem(move(bitmap), timestamp);
}
static FrameQueueItem error_marker(DecoderError&& error, AK::Duration timestamp)
{
return FrameQueueItem(move(error), timestamp);
}
bool is_frame() const { return m_data.has<RefPtr<Gfx::Bitmap>>(); }
RefPtr<Gfx::Bitmap> bitmap() const { return m_data.get<RefPtr<Gfx::Bitmap>>(); }
AK::Duration timestamp() const { return m_timestamp; }
bool is_error() const { return m_data.has<DecoderError>(); }
DecoderError const& error() const { return m_data.get<DecoderError>(); }
DecoderError release_error()
{
auto error = move(m_data.get<DecoderError>());
m_data.set(Empty());
return error;
}
bool is_empty() const { return m_data.has<Empty>(); }
ByteString debug_string() const
{
if (is_error())
return ByteString::formatted("{} at {}ms", error().string_literal(), timestamp().to_milliseconds());
return ByteString::formatted("frame at {}ms", timestamp().to_milliseconds());
}
private:
FrameQueueItem(RefPtr<Gfx::Bitmap> bitmap, AK::Duration timestamp)
: m_data(move(bitmap))
, m_timestamp(timestamp)
{
VERIFY(m_timestamp != no_timestamp);
}
FrameQueueItem(DecoderError&& error, AK::Duration timestamp)
: m_data(move(error))
, m_timestamp(timestamp)
{
}
Variant<Empty, RefPtr<Gfx::Bitmap>, DecoderError> m_data { Empty() };
AK::Duration m_timestamp { no_timestamp };
};
static constexpr size_t frame_buffer_count = 4;
using VideoFrameQueue = Core::SharedSingleProducerCircularQueue<FrameQueueItem, frame_buffer_count>;
enum class PlaybackState {
Playing,
Paused,
Buffering,
Seeking,
Stopped,
};
class MEDIA_API PlaybackManager {
class MEDIA_API PlaybackManager final : public AtomicRefCounted<PlaybackManager> {
AK_MAKE_NONCOPYABLE(PlaybackManager);
AK_MAKE_NONMOVABLE(PlaybackManager);
class WeakPlaybackManager;
public:
enum class SeekMode {
Accurate,
Fast,
};
static constexpr size_t EXPECTED_VIDEO_TRACK_COUNT = 1;
static constexpr SeekMode DEFAULT_SEEK_MODE = SeekMode::Accurate;
using VideoTracks = Vector<Track, EXPECTED_VIDEO_TRACK_COUNT>;
static DecoderErrorOr<NonnullOwnPtr<PlaybackManager>> from_data(ReadonlyBytes data);
static DecoderErrorOr<NonnullOwnPtr<PlaybackManager>> from_stream(NonnullOwnPtr<SeekableStream> stream);
PlaybackManager(NonnullRefPtr<Demuxer> const& demuxer, Track video_track, NonnullOwnPtr<VideoDecoder>&& decoder, VideoFrameQueue&& frame_queue);
static DecoderErrorOr<NonnullRefPtr<PlaybackManager>> try_create(NonnullOwnPtr<SeekableStream>&& stream);
~PlaybackManager();
void resume_playback();
void pause_playback();
void restart_playback();
void terminate_playback();
void seek_to_timestamp(AK::Duration, SeekMode = DEFAULT_SEEK_MODE);
bool is_playing() const
{
return m_playback_handler->is_playing();
}
PlaybackState get_state() const
{
return m_playback_handler->get_state();
}
AK::Duration current_time() const;
AK::Duration duration() const;
u64 number_of_skipped_frames() const { return m_skipped_frames; }
VideoTracks const& video_tracks() const { return m_video_tracks; }
Optional<Track> preferred_video_track();
AK::Duration current_playback_time();
AK::Duration duration();
// Creates a DisplayingVideoSink for the specified track.
//
// Note that in order for the current frame to change based on the media time, users must call
// DisplayingVideoSink::update(). It is recommended to drive this off of vertical sync.
NonnullRefPtr<DisplayingVideoSink> get_or_create_the_displaying_video_sink_for_track(Track const& track);
// Removes the DisplayingVideoSink for the specified track. This will prevent the sink from
// retrieving any subsequent frames from the decoder.
void remove_the_displaying_video_sink_for_track(Track const& track);
Function<void(RefPtr<Gfx::Bitmap>)> on_video_frame;
Function<void()> on_playback_state_change;
Function<void(DecoderError)> on_decoder_error;
Function<void(Error)> on_fatal_playback_error;
Track const& selected_video_track() const { return m_selected_video_track; }
Function<void(DecoderError&&)> on_error;
private:
class PlaybackStateHandler;
// Abstract class to allow resuming play/pause after the state is completed.
class ResumingStateHandler;
class PlayingStateHandler;
class PausedStateHandler;
class BufferingStateHandler;
class SeekingStateHandler;
class StoppedStateHandler;
class WeakPlaybackManager final : public MediaTimeProvider {
friend class PlaybackManager;
static DecoderErrorOr<NonnullOwnPtr<PlaybackManager>> create(NonnullRefPtr<Demuxer> const& demuxer);
void timer_callback();
// This must be called with m_demuxer_mutex locked!
DecoderErrorOr<Optional<AK::Duration>> seek_demuxer_to_most_recent_keyframe(AK::Duration timestamp, Optional<AK::Duration> earliest_available_sample = OptionalNone());
Optional<FrameQueueItem> dequeue_one_frame();
void set_state_update_timer(int delay_ms);
void decode_and_queue_one_sample();
void dispatch_decoder_error(DecoderError error);
void dispatch_new_frame(RefPtr<Gfx::Bitmap> frame);
// Returns whether we changed playback states. If so, any PlaybackStateHandler processing must cease.
[[nodiscard]] bool dispatch_frame_queue_item(FrameQueueItem&&);
void dispatch_state_change();
void dispatch_fatal_error(Error);
AK::Duration m_last_present_in_media_time = AK::Duration::zero();
NonnullRefPtr<Demuxer> m_demuxer;
Threading::Mutex m_decoder_mutex;
Track m_selected_video_track;
VideoFrameQueue m_frame_queue;
RefPtr<Core::Timer> m_state_update_timer;
RefPtr<Threading::Thread> m_decode_thread;
NonnullOwnPtr<VideoDecoder> m_decoder;
Atomic<bool> m_stop_decoding { false };
Threading::Mutex m_decode_wait_mutex;
Threading::ConditionVariable m_decode_wait_condition;
Atomic<bool> m_buffer_is_full { false };
OwnPtr<PlaybackStateHandler> m_playback_handler;
Optional<FrameQueueItem> m_next_frame;
u64 m_skipped_frames { 0 };
// This is a nested class to allow private access.
class PlaybackStateHandler {
public:
PlaybackStateHandler(PlaybackManager& manager)
: m_manager(manager)
WeakPlaybackManager() = default;
RefPtr<PlaybackManager> take_strong() const
{
Threading::MutexLocker locker { m_mutex };
return m_manager;
}
virtual ~PlaybackStateHandler() = default;
virtual StringView name() = 0;
virtual ErrorOr<void> on_enter() { return {}; }
virtual ErrorOr<void> play() { return {}; }
virtual bool is_playing() const = 0;
virtual PlaybackState get_state() const = 0;
virtual ErrorOr<void> pause() { return {}; }
virtual ErrorOr<void> buffer() { return {}; }
virtual ErrorOr<void> seek(AK::Duration target_timestamp, SeekMode);
virtual ErrorOr<void> stop();
virtual AK::Duration current_time() const;
virtual ErrorOr<void> do_timed_state_update() { return {}; }
protected:
template<class T, class... Args>
ErrorOr<void> replace_handler_and_delete_this(Args... args);
PlaybackManager& manager() const;
PlaybackManager& manager()
virtual AK::Duration current_time() const override
{
return const_cast<PlaybackManager&>(const_cast<PlaybackStateHandler const*>(this)->manager());
Threading::MutexLocker locker { m_mutex };
if (m_manager)
return m_manager->current_time();
return AK::Duration::zero();
}
private:
PlaybackManager& m_manager;
#if PLAYBACK_MANAGER_DEBUG
bool m_has_exited { false };
#endif
void revoke()
{
Threading::MutexLocker locker { m_mutex };
m_manager = nullptr;
}
mutable Threading::Mutex m_mutex;
PlaybackManager* m_manager { nullptr };
};
struct VideoTrackData {
Track track;
NonnullRefPtr<VideoDataProvider> provider;
RefPtr<DisplayingVideoSink> display;
};
using VideoTrackDatas = Vector<VideoTrackData, EXPECTED_VIDEO_TRACK_COUNT>;
PlaybackManager(NonnullRefPtr<MutexedDemuxer> const&, NonnullRefPtr<WeakPlaybackManager> const&, VideoTracks&&, VideoTrackDatas&&);
void set_up_error_handlers();
void dispatch_error(DecoderError&&);
VideoTrackData& get_video_data_for_track(Track const& track);
NonnullRefPtr<MutexedDemuxer> m_demuxer;
NonnullRefPtr<WeakPlaybackManager> m_weak_wrapper;
VideoTracks m_video_tracks;
VideoTrackDatas m_video_track_datas;
MonotonicTime m_real_time_base;
bool m_is_in_error_state { false };
};
}

View file

@ -14,6 +14,7 @@
#include <LibWeb/DOM/Element.h>
#include <LibWeb/HTML/BrowsingContext.h>
#include <LibWeb/HTML/EventLoop/EventLoop.h>
#include <LibWeb/HTML/HTMLMediaElement.h>
#include <LibWeb/HTML/Scripting/Agent.h>
#include <LibWeb/HTML/Scripting/Environments.h>
#include <LibWeb/HTML/Scripting/TemporaryExecutionContext.h>
@ -362,6 +363,10 @@ void EventLoop::update_the_rendering()
return true;
});
// AD-HOC: Update all the displayed video frames on HTMLMediaElements in documents' pages.
for (auto& document : docs)
document->page().update_all_media_element_video_sinks();
// FIXME: 4. Unnecessary rendering: Remove from docs any Document object doc for which all of the following are true:
// FIXME: 5. Remove from docs all Document objects for which the user agent believes that it's preferable to skip updating the rendering for other reasons.

View file

@ -7,8 +7,6 @@
#include <LibWeb/Bindings/HTMLAudioElementPrototype.h>
#include <LibWeb/CSS/ComputedProperties.h>
#include <LibWeb/CSS/StyleValues/DisplayStyleValue.h>
#include <LibWeb/HTML/AudioTrack.h>
#include <LibWeb/HTML/AudioTrackList.h>
#include <LibWeb/HTML/HTMLAudioElement.h>
#include <LibWeb/HTML/Window.h>
#include <LibWeb/Layout/AudioBox.h>
@ -57,32 +55,4 @@ Layout::AudioBox const* HTMLAudioElement::layout_node() const
return static_cast<Layout::AudioBox const*>(Node::layout_node());
}
void HTMLAudioElement::on_playing()
{
audio_tracks()->for_each_enabled_track([](auto& audio_track) {
audio_track.play();
});
}
void HTMLAudioElement::on_paused()
{
audio_tracks()->for_each_enabled_track([](auto& audio_track) {
audio_track.pause();
});
}
void HTMLAudioElement::on_seek(double position, MediaSeekMode seek_mode)
{
audio_tracks()->for_each_enabled_track([&](auto& audio_track) {
audio_track.seek(position, seek_mode);
});
}
void HTMLAudioElement::on_volume_change()
{
audio_tracks()->for_each_enabled_track([&](auto& audio_track) {
audio_track.update_volume();
});
}
}

View file

@ -29,11 +29,6 @@ private:
virtual GC::Ptr<Layout::Node> create_layout_node(GC::Ref<CSS::ComputedProperties>) override;
virtual void adjust_computed_style(CSS::ComputedProperties&) override;
virtual void on_playing() override;
virtual void on_paused() override;
virtual void on_seek(double, MediaSeekMode) override;
virtual void on_volume_change() override;
};
}

View file

@ -1,13 +1,15 @@
/*
* Copyright (c) 2020, the SerenityOS developers.
* Copyright (c) 2023-2024, Tim Flynn <trflynn89@serenityos.org>
* Copyright (c) 2025, Gregory Bertilson <gregory@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibJS/Runtime/Promise.h>
#include <LibMedia/Audio/Loader.h>
#include <LibMedia/PlaybackManager.h>
#include <LibMedia/Sinks/DisplayingVideoSink.h>
#include <LibMedia/Track.h>
#include <LibWeb/Bindings/HTMLMediaElementPrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/DOM/Document.h>
@ -75,6 +77,13 @@ void HTMLMediaElement::initialize(JS::Realm& realm)
void HTMLMediaElement::finalize()
{
Base::finalize();
if (m_selected_video_track) {
VERIFY(m_selected_video_track_sink);
m_playback_manager->remove_the_displaying_video_sink_for_track(m_selected_video_track->track_in_playback_manager());
m_selected_video_track_sink = nullptr;
}
document().page().unregister_media_element({}, unique_id());
}
@ -97,6 +106,7 @@ void HTMLMediaElement::visit_edges(Cell::Visitor& visitor)
visitor.visit(m_source_element_selector);
visitor.visit(m_fetch_controller);
visitor.visit(m_pending_play_promises);
visitor.visit(m_selected_video_track);
}
void HTMLMediaElement::attribute_changed(FlyString const& name, Optional<String> const& old_value, Optional<String> const& value, Optional<FlyString> const& namespace_)
@ -300,9 +310,6 @@ void HTMLMediaElement::set_current_playback_position(double playback_position)
time_marches_on();
// NOTE: This notifies blocked seek_element() invocations that we have finished seeking.
m_seek_in_progress = false;
// NOTE: Invoking the following steps is not listed in the spec. Rather, the spec just describes the scenario in
// which these steps should be invoked, which is when we've reached the end of the media playback.
if (m_current_playback_position == m_duration)
@ -447,12 +454,12 @@ void HTMLMediaElement::volume_or_muted_attribute_changed()
if (auto* paintable = this->paintable())
paintable->set_needs_display();
on_volume_change();
// FIXME: Set the volume on the PlaybackManager.
}
void HTMLMediaElement::page_mute_state_changed(Badge<Page>)
{
on_volume_change();
// FIXME: Set the volume on the PlaybackManager.
}
// https://html.spec.whatwg.org/multipage/media.html#effective-media-volume
@ -1119,65 +1126,93 @@ bool HTMLMediaElement::verify_response(GC::Ref<Fetch::Infrastructure::Response>
TODO();
}
void HTMLMediaElement::set_selected_video_track(Badge<VideoTrack>, GC::Ptr<HTML::VideoTrack> video_track)
{
set_needs_style_update(true);
if (auto layout_node = this->layout_node())
layout_node->set_needs_layout_update(DOM::SetNeedsLayoutReason::HTMLVideoElementSetVideoTrack);
if (m_selected_video_track) {
VERIFY(m_selected_video_track_sink);
m_playback_manager->remove_the_displaying_video_sink_for_track(m_selected_video_track->track_in_playback_manager());
m_selected_video_track_sink = nullptr;
}
m_selected_video_track = video_track;
if (video_track)
m_selected_video_track_sink = m_playback_manager->get_or_create_the_displaying_video_sink_for_track(video_track->track_in_playback_manager());
}
void HTMLMediaElement::update_video_frame_and_timeline()
{
if (!paintable())
return;
if (!m_playback_manager)
return;
auto needs_display = false;
if (m_selected_video_track_sink) {
auto sink_update_result = m_selected_video_track_sink->update();
needs_display = sink_update_result == Media::DisplayingVideoSinkUpdateResult::NewFrameAvailable;
}
// FIXME: It might not be the best idea to send time updates at 60Hz, but this is a convenient place for this
// for now.
auto new_position = m_playback_manager->current_time().to_seconds_f64();
if (new_position != m_official_playback_position) {
set_current_playback_position(new_position);
needs_display = true;
}
if (needs_display)
paintable()->set_needs_display();
}
// https://html.spec.whatwg.org/multipage/media.html#media-data-processing-steps-list
WebIDL::ExceptionOr<void> HTMLMediaElement::process_media_data(Function<void(String)> failure_callback)
{
auto& realm = this->realm();
auto audio_loader = Audio::Loader::create(m_media_data.bytes());
auto playback_manager = Media::PlaybackManager::from_data(m_media_data);
auto playback_manager_result = Media::PlaybackManager::try_create(make<FixedMemoryStream>(m_media_data.bytes()));
// -> If the media data cannot be fetched at all, due to network errors, causing the user agent to give up trying to fetch the resource
// -> If the media data can be fetched but is found by inspection to be in an unsupported format, or can otherwise not be rendered at all
if (audio_loader.is_error() && playback_manager.is_error()) {
if (playback_manager_result.is_error()) {
// 1. The user agent should cancel the fetching process.
m_fetch_controller->stop_fetch();
// 2. Abort this subalgorithm, returning to the resource selection algorithm.
failure_callback(MUST(String::from_utf8(playback_manager.error().description())));
failure_callback(MUST(String::from_utf8(playback_manager_result.error().description())));
return {};
}
GC::Ptr<AudioTrack> audio_track;
GC::Ptr<VideoTrack> video_track;
// NOTE: The spec is unclear on whether the following media resource track conditions should trigger multiple
// times on one media resource, but it is implied to be possible by the start of the "Media elements"
// section, where it says that a "media resource can have multiple audio and video tracks."
// https://html.spec.whatwg.org/multipage/media.html#media-elements
// Therefore, we enumerate all the available tracks into our VideoTrackList and AudioTrackList.
// -> If the media resource is found to have an audio track
if (!audio_loader.is_error()) {
// 1. Create an AudioTrack object to represent the audio track.
audio_track = realm.create<AudioTrack>(realm, *this, audio_loader.release_value());
m_playback_manager = playback_manager_result.release_value();
// 2. Update the media element's audioTracks attribute's AudioTrackList object with the new AudioTrack object.
m_audio_tracks->add_track({}, *audio_track);
// 3. Let enable be unknown.
auto enable = TriState::Unknown;
// FIXME: 4. If either the media resource or the URL of the current media resource indicate a particular set of audio tracks to enable, or if
// the user agent has information that would facilitate the selection of specific audio tracks to improve the user's experience, then:
// if this audio track is one of the ones to enable, then set enable to true, otherwise, set enable to false.
// 5. If enable is still unknown, then, if the media element does not yet have an enabled audio track, then set enable to true, otherwise,
// set enable to false.
if (enable == TriState::Unknown)
enable = m_audio_tracks->has_enabled_track() ? TriState::False : TriState::True;
// 6. If enable is true, then enable this audio track, otherwise, do not enable this audio track.
if (enable == TriState::True)
audio_track->set_enabled(true);
// 7. Fire an event named addtrack at this AudioTrackList object, using TrackEvent, with the track attribute initialized to the new AudioTrack object.
TrackEventInit event_init {};
event_init.track = GC::make_root(audio_track);
auto event = TrackEvent::create(realm, HTML::EventNames::addtrack, move(event_init));
m_audio_tracks->dispatch_event(event);
}
// FIXME: -> If the media resource is found to have an audio track
// 1. Create an AudioTrack object to represent the audio track.
// 2. Update the media element's audioTracks attribute's AudioTrackList object with the new AudioTrack object.
// 3. Let enable be unknown.
// 4. If either the media resource or the URL of the current media resource indicate a particular set of audio tracks to enable, or if
// the user agent has information that would facilitate the selection of specific audio tracks to improve the user's experience, then:
// if this audio track is one of the ones to enable, then set enable to true, otherwise, set enable to false.
// 5. If enable is still unknown, then, if the media element does not yet have an enabled audio track, then set enable to true, otherwise,
// set enable to false.
// 6. If enable is true, then enable this audio track, otherwise, do not enable this audio track.
// 7. Fire an event named addtrack at this AudioTrackList object, using TrackEvent, with the track attribute initialized to the new AudioTrack object.
// -> If the media resource is found to have a video track
if (!playback_manager.is_error()) {
auto preferred_video_track = m_playback_manager->preferred_video_track();
auto has_selected_preferred_video_track = false;
for (auto const& track : m_playback_manager->video_tracks()) {
// 1. Create a VideoTrack object to represent the video track.
video_track = realm.create<VideoTrack>(realm, *this, playback_manager.release_value());
auto video_track = realm.create<VideoTrack>(realm, *this, track);
// 2. Update the media element's videoTracks attribute's VideoTrackList object with the new VideoTrack object.
m_video_tracks->add_track({}, *video_track);
@ -1185,9 +1220,17 @@ WebIDL::ExceptionOr<void> HTMLMediaElement::process_media_data(Function<void(Str
// 3. Let enable be unknown.
auto enable = TriState::Unknown;
// FIXME: 4. If either the media resource or the URL of the current media resource indicate a particular set of video tracks to enable, or if
// the user agent has information that would facilitate the selection of specific video tracks to improve the user's experience, then:
// if this video track is the first such video track, then set enable to true, otherwise, set enable to false.
// 4. If either the media resource or the URL of the current media resource indicate a particular set of video tracks to enable, or if
// the user agent has information that would facilitate the selection of specific video tracks to improve the user's experience, then:
// if this video track is the first such video track, then set enable to true, otherwise, set enable to false.
if (preferred_video_track.has_value()) {
if (track == preferred_video_track && !has_selected_preferred_video_track) {
enable = TriState::True;
has_selected_preferred_video_track = true;
} else {
enable = TriState::False;
}
}
// 5. If enable is still unknown, then, if the media element does not yet have a selected video track, then set enable to true, otherwise, set
// enable to false.
@ -1207,80 +1250,77 @@ WebIDL::ExceptionOr<void> HTMLMediaElement::process_media_data(Function<void(Str
m_video_tracks->dispatch_event(event);
}
if (preferred_video_track.has_value())
VERIFY(has_selected_preferred_video_track);
// -> Once enough of the media data has been fetched to determine the duration of the media resource, its dimensions, and other metadata
if (audio_track != nullptr || video_track != nullptr) {
// AD-HOC: After selecting a track, we do not need the source element selector anymore.
m_source_element_selector = nullptr;
// AD-HOC: After selecting a track, we do not need the source element selector anymore.
m_source_element_selector = nullptr;
// FIXME: 1. Establish the media timeline for the purposes of the current playback position and the earliest possible position, based on the media data.
// FIXME: 2. Update the timeline offset to the date and time that corresponds to the zero time in the media timeline established in the previous step,
// if any. If no explicit time and date is given by the media resource, the timeline offset must be set to Not-a-Number (NaN).
// FIXME: 1. Establish the media timeline for the purposes of the current playback position and the earliest possible position, based on the media data.
// FIXME: 2. Update the timeline offset to the date and time that corresponds to the zero time in the media timeline established in the previous step,
// if any. If no explicit time and date is given by the media resource, the timeline offset must be set to Not-a-Number (NaN).
// 3. Set the current playback position and the official playback position to the earliest possible position.
m_current_playback_position = 0;
m_official_playback_position = 0;
// 3. Set the current playback position and the official playback position to the earliest possible position.
m_current_playback_position = 0;
m_official_playback_position = 0;
// 4. Update the duration attribute with the time of the last frame of the resource, if known, on the media timeline established above. If it is
// not known (e.g. a stream that is in principle infinite), update the duration attribute to the value positive Infinity.
// FIXME: Handle unbounded media resources.
// 5. For video elements, set the videoWidth and videoHeight attributes, and queue a media element task given the media element to fire an event
// named resize at the media element.
if (video_track && is<HTMLVideoElement>(*this)) {
auto duration = video_track ? video_track->duration() : audio_track->duration();
set_duration(duration.to_seconds_f64());
// 4. Update the duration attribute with the time of the last frame of the resource, if known, on the media timeline established above. If it is
// not known (e.g. a stream that is in principle infinite), update the duration attribute to the value positive Infinity.
// FIXME: Handle unbounded media resources.
set_duration(m_playback_manager->duration().to_seconds_f64());
auto& video_element = as<HTMLVideoElement>(*this);
video_element.set_video_width(video_track->pixel_width());
video_element.set_video_height(video_track->pixel_height());
// 5. For video elements, set the videoWidth and videoHeight attributes, and queue a media element task given the media element to fire an event
// named resize at the media element.
auto* video_element = as_if<HTMLVideoElement>(*this);
if (m_selected_video_track && video_element) {
video_element->set_video_height(m_selected_video_track->track_in_playback_manager().video_data().pixel_height);
video_element->set_video_width(m_selected_video_track->track_in_playback_manager().video_data().pixel_width);
queue_a_media_element_task([this] {
dispatch_event(DOM::Event::create(this->realm(), HTML::EventNames::resize));
});
} else {
auto duration = audio_track ? audio_track->duration() : video_track->duration();
set_duration(duration.to_seconds_f64());
}
queue_a_media_element_task([this] {
dispatch_event(DOM::Event::create(this->realm(), HTML::EventNames::resize));
});
}
// 6. Set the readyState attribute to HAVE_METADATA.
set_ready_state(ReadyState::HaveMetadata);
// 6. Set the readyState attribute to HAVE_METADATA.
set_ready_state(ReadyState::HaveMetadata);
// 7. Let jumped be false.
[[maybe_unused]] auto jumped = false;
// 7. Let jumped be false.
[[maybe_unused]] auto jumped = false;
// 8. If the media element's default playback start position is greater than zero, then seek to that time, and let jumped be true.
if (m_default_playback_start_position > 0) {
seek_element(m_default_playback_start_position);
jumped = true;
}
// 8. If the media element's default playback start position is greater than zero, then seek to that time, and let jumped be true.
if (m_default_playback_start_position > 0) {
seek_element(m_default_playback_start_position);
jumped = true;
}
// 9. Set the media element's default playback start position to zero.
m_default_playback_start_position = 0;
// 9. Set the media element's default playback start position to zero.
m_default_playback_start_position = 0;
// FIXME: 10. Let the initial playback position be 0.
// FIXME: 11. If either the media resource or the URL of the current media resource indicate a particular start time, then set the initial playback
// position to that time and, if jumped is still false, seek to that time.
// FIXME: 10. Let the initial playback position be 0.
// FIXME: 11. If either the media resource or the URL of the current media resource indicate a particular start time, then set the initial playback
// position to that time and, if jumped is still false, seek to that time.
// 12. If there is no enabled audio track, then enable an audio track. This will cause a change event to be fired.
if (audio_track && !m_audio_tracks->has_enabled_track())
audio_track->set_enabled(true);
// FIXME: 12. If there is no enabled audio track, then enable an audio track. This will cause a change event to be fired.
// 13. If there is no selected video track, then select a video track. This will cause a change event to be fired.
if (video_track && m_video_tracks->selected_index() == -1)
video_track->set_selected(true);
// 13. If there is no selected video track, then select a video track. This will cause a change event to be fired.
if (m_video_tracks->selected_index() == -1) {
m_video_tracks->for_each_track([](auto& track) {
track.set_selected(true);
return IterationDecision::Break;
});
}
// -> Once the entire media resource has been fetched (but potentially before any of it has been decoded)
if (audio_track != nullptr || video_track != nullptr) {
// Fire an event named progress at the media element.
dispatch_event(DOM::Event::create(this->realm(), HTML::EventNames::progress));
// Fire an event named progress at the media element.
dispatch_event(DOM::Event::create(this->realm(), HTML::EventNames::progress));
// Set the networkState to NETWORK_IDLE and fire an event named suspend at the media element.
m_network_state = NetworkState::Idle;
dispatch_event(DOM::Event::create(this->realm(), HTML::EventNames::suspend));
// Set the networkState to NETWORK_IDLE and fire an event named suspend at the media element.
m_network_state = NetworkState::Idle;
dispatch_event(DOM::Event::create(this->realm(), HTML::EventNames::suspend));
// If the user agent ever discards any media data and then needs to resume the network activity to obtain it again, then it must queue a media
// element task given the media element to set the networkState to NETWORK_LOADING.
}
// FIXME: If the user agent ever discards any media data and then needs to resume the network activity to obtain it again, then it must queue a media
// element task given the media element to set the networkState to NETWORK_LOADING.
// FIXME: -> If the connection is interrupted after some media data has been received, causing the user agent to give up trying to fetch the resource
// FIXME: -> If the media data fetching process is aborted by the user
@ -1571,12 +1611,10 @@ void HTMLMediaElement::seek_element(double playback_position, MediaSeekMode seek
// aborted at any time by another instance of this algorithm being invoked.
// 6. If the new playback position is later than the end of the media resource, then let it be the end of the media resource instead.
if (playback_position > m_duration)
playback_position = m_duration;
playback_position = min(playback_position, m_duration);
// 7. If the new playback position is less than the earliest possible position, let it be that position instead.
if (playback_position < 0)
playback_position = 0;
playback_position = max(playback_position, 0);
// 8. If the (possibly now changed) new playback position is not in one of the ranges given in the seekable attribute,
auto time_ranges = seekable();
@ -1635,12 +1673,12 @@ void HTMLMediaElement::seek_element(double playback_position, MediaSeekMode seek
// 12. Wait until the user agent has established whether or not the media data for the new playback position is
// available, and, if it is, until it has decoded enough data to play back that position.
m_seek_in_progress = true;
on_seek(playback_position, seek_mode);
HTML::main_thread_event_loop().spin_until(GC::create_function(heap(), [&]() { return !m_seek_in_progress; }));
// FIXME: Implement seeking in PlaybackManager.
(void)seek_mode;
// FIXME: 13. Await a stable state. The synchronous section consists of all the remaining steps of this algorithm. (Steps in the
// synchronous section are marked with ⌛.)
// The following steps should be executed within the success callback of a seek promise from PlaybackManager.
// 14. ⌛ Set the seeking IDL attribute to false.
set_seeking(false);
@ -1674,7 +1712,7 @@ void HTMLMediaElement::notify_about_playing()
resolve_pending_play_promises(promises);
});
on_playing();
// FIXME: Implement resuming in PlaybackManager.
if (m_audio_tracks->has_enabled_track())
document().page().client().page_did_change_audio_play_state(AudioPlayState::Playing);
@ -1699,7 +1737,7 @@ void HTMLMediaElement::set_paused(bool paused)
m_paused = paused;
if (m_paused) {
on_paused();
// FIXME: Implement pausing in PlaybackManager.
if (m_audio_tracks->has_enabled_track())
document().page().client().page_did_change_audio_play_state(AudioPlayState::Paused);
@ -1840,14 +1878,7 @@ bool HTMLMediaElement::has_ended_playback() const
void HTMLMediaElement::reached_end_of_media_playback()
{
// 1. If the media element has a loop attribute specified, then seek to the earliest possible position of the media resource and return.
if (has_attribute(HTML::AttributeNames::loop)) {
seek_element(0);
// AD-HOC: LibVideo internally sets itself to a paused state when it reaches the end of a video. We must resume
// playing manually to actually loop. Note that we don't need to update any HTMLMediaElement state as
// it hasn't left the playing state by this point.
on_playing();
}
// FIXME: Implement this within PlaybackManager and its related classes.
// 2. As defined above, the ended IDL attribute starts returning true once the event loop returns to step 1.
@ -2068,12 +2099,12 @@ void HTMLMediaElement::set_layout_display_time(Badge<Painting::MediaPaintable>,
if (display_time.has_value() && !m_display_time.has_value()) {
if (potentially_playing()) {
m_tracking_mouse_position_while_playing = true;
on_paused();
// FIXME: Pause the PlaybackManager.
}
} else if (!display_time.has_value() && m_display_time.has_value()) {
if (m_tracking_mouse_position_while_playing) {
m_tracking_mouse_position_while_playing = false;
on_playing();
// FIXME: Resume the PlaybackManager.
}
}

View file

@ -1,6 +1,7 @@
/*
* Copyright (c) 2020, the SerenityOS developers.
* Copyright (c) 2023, Tim Flynn <trflynn89@serenityos.org>
* Copyright (c) 2025, Gregory Bertilson <gregory@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
@ -13,6 +14,7 @@
#include <AK/Variant.h>
#include <LibGC/RootVector.h>
#include <LibGfx/Rect.h>
#include <LibMedia/Forward.h>
#include <LibWeb/DOM/DocumentLoadEventDelayer.h>
#include <LibWeb/HTML/CORSSettingAttribute.h>
#include <LibWeb/HTML/EventLoop/Task.h>
@ -128,6 +130,10 @@ public:
GC::Ref<VideoTrackList> video_tracks() const { return *m_video_tracks; }
GC::Ref<TextTrackList> text_tracks() const { return *m_text_tracks; }
void set_selected_video_track(Badge<VideoTrack>, GC::Ptr<HTML::VideoTrack> video_track);
void update_video_frame_and_timeline();
GC::Ref<TextTrack> add_text_track(Bindings::TextTrackKind kind, String const& label, String const& language);
WebIDL::ExceptionOr<bool> handle_keydown(Badge<Web::EventHandler>, UIEvents::KeyCode, u32 modifiers);
@ -163,6 +169,8 @@ public:
CORSSettingAttribute crossorigin() const { return m_crossorigin; }
RefPtr<Media::DisplayingVideoSink> const& selected_video_track_sink() const { return m_selected_video_track_sink; }
protected:
HTMLMediaElement(DOM::Document&, DOM::QualifiedName);
@ -174,17 +182,6 @@ protected:
virtual void removed_from(DOM::Node* old_parent, DOM::Node& old_root) override;
virtual void children_changed(ChildrenChangedMetadata const* metadata) override;
// Override in subclasses to handle implementation-specific behavior when the element state changes
// to playing or paused, e.g. to start/stop play timers.
virtual void on_playing() { }
virtual void on_paused() { }
// Override in subclasses to handle implementation-specific seeking behavior. When seeking is complete,
// subclasses must invoke set_current_playback_position() to unblock the user agent.
virtual void on_seek(double, MediaSeekMode) { m_seek_in_progress = false; }
virtual void on_volume_change() { }
private:
friend SourceElementSelector;
@ -321,7 +318,9 @@ private:
GC::Ptr<Fetch::Infrastructure::FetchController> m_fetch_controller;
bool m_seek_in_progress = false;
RefPtr<Media::PlaybackManager> m_playback_manager;
GC::Ptr<VideoTrack> m_selected_video_track;
RefPtr<Media::DisplayingVideoSink> m_selected_video_track_sink;
// Cached state for layout.
Optional<MediaComponent> m_mouse_tracking_component;

View file

@ -6,6 +6,7 @@
*/
#include <LibGfx/Bitmap.h>
#include <LibMedia/Sinks/DisplayingVideoSink.h>
#include <LibWeb/Bindings/HTMLVideoElementPrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/CSS/ComputedProperties.h>
@ -44,9 +45,6 @@ void HTMLVideoElement::initialize(JS::Realm& realm)
void HTMLVideoElement::finalize()
{
Base::finalize();
for (auto video_track : video_tracks()->video_tracks())
video_track->stop_video({});
}
void HTMLVideoElement::visit_edges(Cell::Visitor& visitor)
@ -109,62 +107,6 @@ u32 HTMLVideoElement::video_height() const
return m_video_height;
}
void HTMLVideoElement::set_video_track(GC::Ptr<HTML::VideoTrack> video_track)
{
set_needs_style_update(true);
if (auto layout_node = this->layout_node())
layout_node->set_needs_layout_update(DOM::SetNeedsLayoutReason::HTMLVideoElementSetVideoTrack);
if (m_video_track)
m_video_track->pause_video({});
m_video_track = video_track;
}
void HTMLVideoElement::set_current_frame(Badge<VideoTrack>, RefPtr<Gfx::Bitmap> frame, double position)
{
m_current_frame = { move(frame), position };
if (paintable())
paintable()->set_needs_display();
}
void HTMLVideoElement::on_playing()
{
if (m_video_track)
m_video_track->play_video({});
audio_tracks()->for_each_enabled_track([](auto& audio_track) {
audio_track.play();
});
}
void HTMLVideoElement::on_paused()
{
if (m_video_track)
m_video_track->pause_video({});
audio_tracks()->for_each_enabled_track([](auto& audio_track) {
audio_track.pause();
});
}
void HTMLVideoElement::on_seek(double position, MediaSeekMode seek_mode)
{
if (m_video_track)
m_video_track->seek(AK::Duration::from_milliseconds(position * 1000.0), seek_mode);
audio_tracks()->for_each_enabled_track([&](auto& audio_track) {
audio_track.seek(position, seek_mode);
});
}
void HTMLVideoElement::on_volume_change()
{
audio_tracks()->for_each_enabled_track([&](auto& audio_track) {
audio_track.update_volume();
});
}
// https://html.spec.whatwg.org/multipage/media.html#attr-video-poster
WebIDL::ExceptionOr<void> HTMLVideoElement::determine_element_poster_frame(Optional<String> const& poster)
{
@ -245,4 +187,12 @@ WebIDL::ExceptionOr<void> HTMLVideoElement::determine_element_poster_frame(Optio
return {};
}
RefPtr<Gfx::Bitmap> HTMLVideoElement::bitmap() const
{
auto const& sink = selected_video_track_sink();
if (sink == nullptr)
return nullptr;
return sink->current_frame();
}
}

View file

@ -36,17 +36,10 @@ public:
void set_video_height(u32 video_height) { m_video_height = video_height; }
u32 video_height() const;
void set_video_track(GC::Ptr<VideoTrack>);
void set_current_frame(Badge<VideoTrack>, RefPtr<Gfx::Bitmap> frame, double position);
VideoFrame const& current_frame() const { return m_current_frame; }
RefPtr<Gfx::Bitmap> const& poster_frame() const { return m_poster_frame; }
// FIXME: This is a hack for images used as CanvasImageSource. Do something more elegant.
RefPtr<Gfx::Bitmap> bitmap() const
{
return current_frame().frame;
}
RefPtr<Gfx::Bitmap> bitmap() const;
private:
HTMLVideoElement(DOM::Document&, DOM::QualifiedName);
@ -63,11 +56,6 @@ private:
virtual GC::Ptr<Layout::Node> create_layout_node(GC::Ref<CSS::ComputedProperties>) override;
virtual void adjust_computed_style(CSS::ComputedProperties&) override;
virtual void on_playing() override;
virtual void on_paused() override;
virtual void on_seek(double, MediaSeekMode) override;
virtual void on_volume_change() override;
WebIDL::ExceptionOr<void> determine_element_poster_frame(Optional<String> const& poster);
GC::Ptr<HTML::VideoTrack> m_video_track;

View file

@ -6,17 +6,13 @@
#include <AK/IDAllocator.h>
#include <AK/Time.h>
#include <LibGfx/Bitmap.h>
#include <LibJS/Runtime/Realm.h>
#include <LibJS/Runtime/VM.h>
#include <LibMedia/PlaybackManager.h>
#include <LibMedia/Track.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/Bindings/VideoTrackPrototype.h>
#include <LibWeb/DOM/Event.h>
#include <LibWeb/HTML/EventNames.h>
#include <LibWeb/HTML/HTMLMediaElement.h>
#include <LibWeb/HTML/HTMLVideoElement.h>
#include <LibWeb/HTML/VideoTrack.h>
#include <LibWeb/HTML/VideoTrackList.h>
@ -26,42 +22,11 @@ GC_DEFINE_ALLOCATOR(VideoTrack);
static IDAllocator s_video_track_id_allocator;
VideoTrack::VideoTrack(JS::Realm& realm, GC::Ref<HTMLMediaElement> media_element, NonnullOwnPtr<Media::PlaybackManager> playback_manager)
VideoTrack::VideoTrack(JS::Realm& realm, GC::Ref<HTMLMediaElement> media_element, Media::Track const& track)
: PlatformObject(realm)
, m_media_element(media_element)
, m_playback_manager(move(playback_manager))
, m_track_in_playback_manager(track)
{
m_playback_manager->on_video_frame = [this](auto frame) {
auto playback_position = static_cast<double>(position().to_milliseconds()) / 1000.0;
if (is<HTMLVideoElement>(*m_media_element))
as<HTMLVideoElement>(*m_media_element).set_current_frame({}, move(frame), playback_position);
m_media_element->set_current_playback_position(playback_position);
};
m_playback_manager->on_playback_state_change = [this]() {
switch (m_playback_manager->get_state()) {
case Media::PlaybackState::Stopped: {
auto playback_position_ms = static_cast<double>(duration().to_milliseconds());
m_media_element->set_current_playback_position(playback_position_ms / 1000.0);
break;
}
default:
break;
}
};
m_playback_manager->on_decoder_error = [this](auto error) {
auto error_message = MUST(String::from_utf8(error.description()));
m_media_element->set_decoder_error(move(error_message));
};
m_playback_manager->on_fatal_playback_error = [this](auto error) {
auto error_message = MUST(String::from_utf8(error.string_literal()));
m_media_element->set_decoder_error(move(error_message));
};
}
VideoTrack::~VideoTrack()
@ -88,53 +53,6 @@ void VideoTrack::visit_edges(Cell::Visitor& visitor)
visitor.visit(m_video_track_list);
}
void VideoTrack::play_video(Badge<HTMLVideoElement>)
{
m_playback_manager->resume_playback();
}
void VideoTrack::pause_video(Badge<HTMLVideoElement>)
{
m_playback_manager->pause_playback();
}
void VideoTrack::stop_video(Badge<HTMLVideoElement>)
{
m_playback_manager->terminate_playback();
}
AK::Duration VideoTrack::position() const
{
return m_playback_manager->current_playback_time();
}
AK::Duration VideoTrack::duration() const
{
return m_playback_manager->duration();
}
void VideoTrack::seek(AK::Duration position, MediaSeekMode seek_mode)
{
switch (seek_mode) {
case MediaSeekMode::Accurate:
m_playback_manager->seek_to_timestamp(position, Media::PlaybackManager::SeekMode::Accurate);
break;
case MediaSeekMode::ApproximateForSpeed:
m_playback_manager->seek_to_timestamp(position, Media::PlaybackManager::SeekMode::Fast);
break;
}
}
u64 VideoTrack::pixel_width() const
{
return m_playback_manager->selected_video_track().video_data().pixel_width;
}
u64 VideoTrack::pixel_height() const
{
return m_playback_manager->selected_video_track().video_data().pixel_height;
}
// https://html.spec.whatwg.org/multipage/media.html#dom-videotrack-selected
void VideoTrack::set_selected(bool selected)
{
@ -167,11 +85,8 @@ void VideoTrack::set_selected(bool selected)
m_selected = selected;
// AD-HOC: Inform the video element node that we have (un)selected a video track for layout.
if (is<HTMLVideoElement>(*m_media_element)) {
auto& video_element = as<HTMLVideoElement>(*m_media_element);
video_element.set_video_track(m_selected ? this : nullptr);
}
// AD-HOC: Inform the element node that we have (un)selected a video track for layout.
m_media_element->set_selected_video_track({}, m_selected ? this : nullptr);
}
}

View file

@ -8,8 +8,7 @@
#include <AK/String.h>
#include <AK/Time.h>
#include <LibGfx/Forward.h>
#include <LibMedia/Forward.h>
#include <LibMedia/Track.h>
#include <LibWeb/Bindings/PlatformObject.h>
namespace Web::HTML {
@ -23,17 +22,6 @@ public:
void set_video_track_list(Badge<VideoTrackList>, GC::Ptr<VideoTrackList> video_track_list) { m_video_track_list = video_track_list; }
void play_video(Badge<HTMLVideoElement>);
void pause_video(Badge<HTMLVideoElement>);
void stop_video(Badge<HTMLVideoElement>);
AK::Duration position() const;
AK::Duration duration() const;
void seek(AK::Duration, MediaSeekMode);
u64 pixel_width() const;
u64 pixel_height() const;
String const& id() const { return m_id; }
String const& kind() const { return m_kind; }
String const& label() const { return m_label; }
@ -42,8 +30,10 @@ public:
bool selected() const { return m_selected; }
void set_selected(bool selected);
Media::Track const& track_in_playback_manager() const { return m_track_in_playback_manager; }
private:
VideoTrack(JS::Realm&, GC::Ref<HTMLMediaElement>, NonnullOwnPtr<Media::PlaybackManager>);
VideoTrack(JS::Realm&, GC::Ref<HTMLMediaElement>, Media::Track const& track);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
@ -66,7 +56,7 @@ private:
GC::Ref<HTMLMediaElement> m_media_element;
GC::Ptr<VideoTrackList> m_video_track_list;
NonnullOwnPtr<Media::PlaybackManager> m_playback_manager;
Media::Track m_track_in_playback_manager;
};
}

View file

@ -30,6 +30,16 @@ public:
GC::Ptr<VideoTrack> get_track_by_id(StringView id) const;
i32 selected_index() const;
template<typename Callback>
void for_each_track(Callback&& callback)
{
for (auto& video_track : m_video_tracks) {
auto iteration_decision = callback(*video_track);
if (iteration_decision == IterationDecision::Break)
break;
}
}
void set_onchange(WebIDL::CallbackType*);
WebIDL::CallbackType* onchange();

View file

@ -502,6 +502,13 @@ void Page::unregister_media_element(Badge<HTML::HTMLMediaElement>, UniqueNodeID
});
}
void Page::update_all_media_element_video_sinks()
{
for_each_media_element([](auto& media_element) {
media_element.update_video_frame_and_timeline();
});
}
void Page::did_request_media_context_menu(UniqueNodeID media_id, CSSPixelPoint position, ByteString const& target, unsigned modifiers, MediaContextMenu const& menu)
{
m_media_context_menu_element_id = media_id;

View file

@ -183,6 +183,8 @@ public:
void register_media_element(Badge<HTML::HTMLMediaElement>, UniqueNodeID media_id);
void unregister_media_element(Badge<HTML::HTMLMediaElement>, UniqueNodeID media_id);
void update_all_media_element_video_sinks();
struct MediaContextMenu {
URL::URL media_url;
bool is_video { false };

View file

@ -5,6 +5,7 @@
*/
#include <AK/Array.h>
#include <LibMedia/Sinks/DisplayingVideoSink.h>
#include <LibWeb/DOM/Document.h>
#include <LibWeb/HTML/HTMLMediaElement.h>
#include <LibWeb/HTML/HTMLVideoElement.h>
@ -58,7 +59,7 @@ void VideoPaintable::paint(DisplayListRecordingContext& context, PaintPhase phas
auto const& video_element = as<HTML::HTMLVideoElement>(*dom_node());
auto mouse_position = MediaPaintable::mouse_position(context, video_element);
auto const& current_frame = video_element.current_frame();
auto const& current_frame = video_element.selected_video_track_sink() != nullptr ? video_element.selected_video_track_sink()->current_frame() : nullptr;
auto const& poster_frame = video_element.poster_frame();
auto current_playback_position = video_element.current_playback_position();
@ -148,8 +149,8 @@ void VideoPaintable::paint(DisplayListRecordingContext& context, PaintPhase phas
switch (representation) {
case Representation::VideoFrame:
if (current_frame.frame)
paint_frame(current_frame.frame);
if (current_frame)
paint_frame(current_frame);
if (paint_user_agent_controls)
paint_loaded_video_controls();
break;