Make GIFs and videos with captions larger.

This commit is contained in:
John Preston 2022-09-05 19:03:52 +04:00
parent 9ef2f370ac
commit d6ba092697
21 changed files with 410 additions and 253 deletions

View file

@ -45,6 +45,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "platform/platform_specific.h"
#include "base/platform/base_platform_info.h"
#include "base/power_save_blocker.h"
#include "media/streaming/media_streaming_utility.h"
#include "window/main_window.h"
#include "webrtc/webrtc_video_track.h"
#include "webrtc/webrtc_media_devices.h"
@ -335,16 +336,10 @@ void Panel::refreshIncomingGeometry() {
return;
}
const auto to = widget()->size();
const auto small = _incomingFrameSize.scaled(to, Qt::KeepAspectRatio);
const auto big = _incomingFrameSize.scaled(
const auto use = ::Media::Streaming::DecideFrameResize(
to,
Qt::KeepAspectRatioByExpanding);
// If we cut out no more than 0.25 of the original, let's use expanding.
const auto use = ((big.width() * 3 <= to.width() * 4)
&& (big.height() * 3 <= to.height() * 4))
? big
: small;
_incomingFrameSize
).result;
const auto pos = QPoint(
(to.width() - use.width()) / 2,
(to.height() - use.height()) / 2);

View file

@ -10,6 +10,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/group/calls_group_viewport_tile.h"
#include "webrtc/webrtc_video_track.h"
#include "media/view/media_view_pip.h"
#include "media/streaming/media_streaming_utility.h"
#include "calls/group/calls_group_members_row.h"
#include "lang/lang_keys.h"
#include "ui/gl/gl_shader.h"
@ -31,7 +32,6 @@ constexpr auto kNoiseTextureSize = 256;
constexpr auto kBlurTextureSizeFactor = 4.;
constexpr auto kBlurOpacity = 0.65;
constexpr auto kDitherNoiseAmount = 0.002;
constexpr auto kMinCameraVisiblePart = 0.75;
constexpr auto kQuads = 9;
constexpr auto kQuadVertices = kQuads * 4;
@ -224,13 +224,8 @@ vec4 background() {
}
[[nodiscard]] bool UseExpandForCamera(QSize original, QSize viewport) {
const auto big = original.scaled(
viewport,
Qt::KeepAspectRatioByExpanding);
// If we cut out no more than 0.25 of the original, let's use expanding.
return (big.width() * kMinCameraVisiblePart <= viewport.width())
&& (big.height() * kMinCameraVisiblePart <= viewport.height());
using namespace ::Media::Streaming;
return DecideFrameResize(viewport, original).expanding;
}
[[nodiscard]] QSize NonEmpty(QSize size) {

View file

@ -482,6 +482,10 @@ bool RotationSwapWidthHeight(int rotation) {
return (rotation == 90 || rotation == 270);
}
QSize TransposeSizeByRotation(QSize size, int rotation) {
return RotationSwapWidthHeight(rotation) ? size.transposed() : size;
}
bool GoodStorageForFrame(const QImage &storage, QSize size) {
return !storage.isNull()
&& (storage.format() == kImageFormat)

View file

@ -180,6 +180,7 @@ void LogError(QLatin1String method, FFmpeg::AvErrorWrap error);
[[nodiscard]] int ReadRotationFromMetadata(not_null<AVStream*> stream);
[[nodiscard]] AVRational ValidateAspectRatio(AVRational aspect);
[[nodiscard]] bool RotationSwapWidthHeight(int rotation);
[[nodiscard]] QSize TransposeSizeByRotation(QSize size, int rotation);
[[nodiscard]] QSize CorrectByAspect(QSize size, AVRational aspect);
[[nodiscard]] bool GoodStorageForFrame(const QImage &storage, QSize size);

View file

@ -142,6 +142,28 @@ void PaintWaveform(
}
}
[[nodiscard]] int MaxStatusWidth(not_null<DocumentData*> document) {
using namespace Ui;
auto result = 0;
const auto add = [&](const QString &text) {
accumulate_max(result, st::normalFont->width(text));
};
add(FormatDownloadText(document->size, document->size));
const auto duration = document->getDuration();
if (const auto song = document->song()) {
add(FormatPlayedText(duration, duration));
add(FormatDurationAndSizeText(duration, document->size));
} else if (const auto voice = document->voice()) {
add(FormatPlayedText(duration, duration));
add(FormatDurationAndSizeText(duration, document->size));
} else if (document->isVideoFile()) {
add(FormatDurationAndSizeText(duration, document->size));
} else {
add(FormatSizeText(document->size));
}
return result;
}
} // namespace
Document::Document(
@ -317,10 +339,10 @@ QSize Document::countOptimalSize() {
const auto tleft = st.padding.left() + st.thumbSize + st.padding.right();
const auto tright = st.padding.left();
if (thumbed) {
accumulate_max(maxWidth, tleft + documentMaxStatusWidth(_data) + tright);
accumulate_max(maxWidth, tleft + MaxStatusWidth(_data) + tright);
} else {
auto unread = _data->isVoiceMessage() ? (st::mediaUnreadSkip + st::mediaUnreadSize) : 0;
accumulate_max(maxWidth, tleft + documentMaxStatusWidth(_data) + unread + _parent->skipBlockWidth() + st::msgPadding.right());
accumulate_max(maxWidth, tleft + MaxStatusWidth(_data) + unread + _parent->skipBlockWidth() + st::msgPadding.right());
}
if (auto named = Get<HistoryDocumentNamed>()) {

View file

@ -48,7 +48,7 @@ Game::Game(
}
QSize Game::countOptimalSize() {
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
const auto item = _parent->data();
if (!_openl && item->isRegular()) {
@ -149,7 +149,7 @@ QSize Game::countCurrentSize(int newWidth) {
// enable any count of lines in game description / message
auto linesMax = 4096;
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
auto newHeight = 0;
if (_title.isEmpty()) {
_titleLines = 0;
@ -225,7 +225,7 @@ void Game::draw(Painter &p, const PaintContext &context) const {
QRect bar(style::rtlrect(st::msgPadding.left(), tshift, st::webPageBar, height() - tshift - bshift, width()));
p.fillRect(bar, barfg);
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
if (_titleLines) {
p.setPen(semibold);
p.setTextPalette(stm->semiboldPalette);
@ -301,7 +301,7 @@ TextState Game::textState(QPoint point, StateRequest request) const {
auto inThumb = false;
auto symbolAdd = 0;
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
if (_titleLines) {
if (point.y() >= tshift && point.y() < tshift + _titleLines * lineHeight) {
Ui::Text::StateRequestElided titleRequest = request.forText();

View file

@ -16,6 +16,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/player/media_player_instance.h"
#include "media/streaming/media_streaming_instance.h"
#include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_utility.h"
#include "media/view/media_view_playback_progress.h"
#include "ui/boxes/confirm_box.h"
#include "history/history_item_components.h"
@ -147,21 +148,23 @@ QSize Gif::countOptimalSize() {
auto thumbMaxWidth = st::msgMaxWidth;
const auto scaled = countThumbSize(thumbMaxWidth);
_thumbw = scaled.width();
_thumbh = scaled.height();
const auto minWidthByInfo = _parent->infoWidth()
+ 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x());
auto maxWidth = std::clamp(
std::max(_thumbw, _parent->infoWidth() + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x())),
std::max(scaled.width(), minWidthByInfo),
st::minPhotoSize,
thumbMaxWidth);
auto minHeight = qMax(_thumbh, st::minPhotoSize);
auto minHeight = qMax(scaled.height(), st::minPhotoSize);
if (!activeCurrentStreamed()) {
accumulate_max(maxWidth, gifMaxStatusWidth(_data) + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x()));
}
if (_parent->hasBubble()) {
accumulate_max(maxWidth, _parent->minWidthForMedia());
if (!_caption.isEmpty()) {
auto captionw = maxWidth - st::msgPadding.left() - st::msgPadding.right();
minHeight += st::mediaCaptionSkip + _caption.countHeight(captionw);
maxWidth = qMax(maxWidth, st::msgPadding.left()
+ _caption.maxWidth()
+ st::msgPadding.right());
minHeight += st::mediaCaptionSkip + _caption.minHeight();
if (isBubbleBottom()) {
minHeight += st::msgPadding.bottom();
}
@ -185,21 +188,28 @@ QSize Gif::countCurrentSize(int newWidth) {
auto thumbMaxWidth = newWidth;
const auto scaled = countThumbSize(thumbMaxWidth);
_thumbw = scaled.width();
_thumbh = scaled.height();
const auto minWidthByInfo = _parent->infoWidth()
+ 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x());
newWidth = std::clamp(
std::max(_thumbw, _parent->infoWidth() + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x())),
std::max(scaled.width(), minWidthByInfo),
st::minPhotoSize,
thumbMaxWidth);
auto newHeight = qMax(_thumbh, st::minPhotoSize);
auto newHeight = qMax(scaled.height(), st::minPhotoSize);
if (!activeCurrentStreamed()) {
accumulate_max(newWidth, gifMaxStatusWidth(_data) + 2 * (st::msgDateImgDelta + st::msgDateImgPadding.x()));
}
if (_parent->hasBubble()) {
accumulate_max(newWidth, _parent->minWidthForMedia());
if (!_caption.isEmpty()) {
auto captionw = newWidth - st::msgPadding.left() - st::msgPadding.right();
const auto maxWithCaption = qMin(
st::msgMaxWidth,
(st::msgPadding.left()
+ _caption.maxWidth()
+ st::msgPadding.right()));
newWidth = qMax(newWidth, maxWithCaption);
const auto captionw = newWidth
- st::msgPadding.left()
- st::msgPadding.right();
newHeight += st::mediaCaptionSkip + _caption.countHeight(captionw);
if (isBubbleBottom()) {
newHeight += st::msgPadding.bottom();
@ -380,11 +390,12 @@ void Gif::draw(Painter &p, const PaintContext &context) const {
displayMute = true;
}
}
auto request = ::Media::Streaming::FrameRequest();
request.outer = QSize(usew, painth) * cIntRetinaFactor();
request.resize = QSize(_thumbw, _thumbh) * cIntRetinaFactor();
request.corners = roundCorners;
request.radius = roundRadius;
auto request = ::Media::Streaming::FrameRequest{
.outer = QSize(usew, painth) * cIntRetinaFactor(),
.radius = roundRadius,
.corners = roundCorners,
.blurredBackground = true,
};
if (!activeRoundPlaying && activeOwnPlaying->instance.playerLocked()) {
if (activeOwnPlaying->frozenFrame.isNull()) {
activeOwnPlaying->frozenRequest = request;
@ -433,49 +444,8 @@ void Gif::draw(Painter &p, const PaintContext &context) const {
}
} else if (!skipDrawingContent) {
ensureDataMediaCreated();
const auto size = QSize(_thumbw, _thumbh);
const auto args = Images::PrepareArgs{
.options = Images::RoundOptions(roundRadius, roundCorners),
.outer = QSize(usew, painth),
};
if (const auto good = _dataMedia->goodThumbnail()) {
p.drawPixmap(rthumb.topLeft(), good->pixSingle(size, args));
} else {
const auto normal = _dataMedia->thumbnail();
if (normal) {
const auto blurred = (normal->width() < kUseNonBlurredThreshold)
&& (normal->height() < kUseNonBlurredThreshold);
p.drawPixmap(
rthumb.topLeft(),
normal->pixSingle(size, blurred ? args.blurred() : args));
} else {
_data->loadThumbnail(_realParent->fullId());
validateVideoThumbnail();
if (_videoThumbnailFrame) {
p.drawPixmap(rthumb.topLeft(), _videoThumbnailFrame->pixSingle(size, args));
} else if (const auto blurred = _dataMedia->thumbnailInline()) {
p.drawPixmap(rthumb.topLeft(), blurred->pixSingle(size, args.blurred()));
} else if (!unwrapped) {
if (roundRadius == ImageRoundRadius::Ellipse) {
PainterHighQualityEnabler hq(p);
p.setPen(Qt::NoPen);
p.setBrush(st->imageBg());
p.drawEllipse(rthumb);
} else {
const auto roundTop = (roundCorners & RectPart::TopLeft);
const auto roundBottom = (roundCorners & RectPart::BottomLeft);
const auto margin = inWebPage
? st::roundRadiusSmall
: st::historyMessageRadius;
const auto parts = roundCorners
| RectPart::NoTopBottom
| (roundTop ? RectPart::Top : RectPart::None)
| (roundBottom ? RectPart::Bottom : RectPart::None);
Ui::FillRoundRect(p, rthumb.marginsAdded({ 0, roundTop ? 0 : margin, 0, roundBottom ? 0 : margin }), st->imageBg(), roundRadius, parts);
}
}
}
}
validateThumbCache({ usew, painth }, roundRadius, roundCorners);
p.drawImage(rthumb, _thumbCache);
}
if (context.selected()) {
@ -688,6 +658,74 @@ void Gif::validateVideoThumbnail() const {
: info.thumbnail);
}
void Gif::validateThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const {
const auto intRadius = static_cast<int>(radius);
const auto intCorners = static_cast<int>(corners);
const auto good = _dataMedia->goodThumbnail();
const auto normal = good ? good : _dataMedia->thumbnail();
if (!normal) {
_data->loadThumbnail(_realParent->fullId());
validateVideoThumbnail();
}
const auto videothumb = normal ? nullptr : _videoThumbnailFrame.get();
const auto blurred = normal
? (!good
&& (normal->width() < kUseNonBlurredThreshold)
&& (normal->height() < kUseNonBlurredThreshold))
: !videothumb;
const auto ratio = style::DevicePixelRatio();
const auto shouldBeBlurred = blurred ? 1 : 0;
if (_thumbCache.size() == (outer * ratio)
&& _thumbCacheRoundRadius == intRadius
&& _thumbCacheRoundCorners == intCorners
&& _thumbCacheBlurred == shouldBeBlurred) {
return;
}
_thumbCache = prepareThumbCache(outer, radius, corners);
_thumbCacheRoundRadius = intRadius;
_thumbCacheRoundCorners = intCorners;
_thumbCacheBlurred = shouldBeBlurred;
}
QImage Gif::prepareThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const {
return Images::Round(prepareThumbCache(outer), radius, corners);
}
QImage Gif::prepareThumbCache(QSize outer) const {
const auto good = _dataMedia->goodThumbnail();
const auto normal = good ? good : _dataMedia->thumbnail();
const auto videothumb = normal ? nullptr : _videoThumbnailFrame.get();
const auto ratio = style::DevicePixelRatio();
auto blurred = (!good
&& normal
&& (normal->width() < kUseNonBlurredThreshold)
&& (normal->height() < kUseNonBlurredThreshold))
? normal
: nullptr;
const auto blurFromLarge = good || (normal && !blurred);
const auto large = blurFromLarge ? normal : videothumb;
if (videothumb) {
} else if (const auto embedded = _dataMedia->thumbnailInline()) {
blurred = embedded;
}
const auto resize = large
? ::Media::Streaming::DecideVideoFrameResize(
outer,
good ? large->size() : _data->dimensions)
: ::Media::Streaming::ExpandDecision();
return PrepareWithBlurredBackground(
outer,
resize,
large,
blurFromLarge ? large : blurred);
}
void Gif::drawCornerStatus(
Painter &p,
const PaintContext &context,
@ -996,17 +1034,18 @@ void Gif::drawGrouped(
if (streamed) {
const auto paused = autoPaused;
auto request = ::Media::Streaming::FrameRequest();
const auto original = sizeForAspectRatio();
const auto originalWidth = style::ConvertScale(original.width());
const auto originalHeight = style::ConvertScale(original.height());
const auto pixSize = Ui::GetImageScaleSizeForGeometry(
{ originalWidth, originalHeight },
{ geometry.width(), geometry.height() });
request.outer = geometry.size() * cIntRetinaFactor();
request.resize = pixSize * cIntRetinaFactor();
request.corners = corners;
request.radius = roundRadius;
auto request = ::Media::Streaming::FrameRequest{
.resize = pixSize * cIntRetinaFactor(),
.outer = geometry.size() * cIntRetinaFactor(),
.radius = roundRadius,
.corners = corners,
};
if (activeOwnPlaying->instance.playerLocked()) {
if (activeOwnPlaying->frozenFrame.isNull()) {
activeOwnPlaying->frozenRequest = request;
@ -1408,6 +1447,7 @@ bool Gif::hasHeavyPart() const {
void Gif::unloadHeavyPart() {
stopAnimation();
_dataMedia = nullptr;
_thumbCache = QImage();
_videoThumbnailFrame = nullptr;
_caption.unloadCustomEmoji();
}

View file

@ -158,6 +158,16 @@ private:
[[nodiscard]] int additionalWidth() const;
[[nodiscard]] bool isUnwrapped() const;
void validateThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const;
[[nodiscard]] QImage prepareThumbCache(
QSize outer,
ImageRoundRadius radius,
RectParts corners) const;
[[nodiscard]] QImage prepareThumbCache(QSize outer) const;
void validateGroupedCache(
const QRect &geometry,
RectParts corners,
@ -178,13 +188,15 @@ private:
QPoint position) const;
const not_null<DocumentData*> _data;
int _thumbw = 1;
int _thumbh = 1;
Ui::Text::String _caption;
std::unique_ptr<Streamed> _streamed;
mutable std::shared_ptr<Data::DocumentMedia> _dataMedia;
mutable std::unique_ptr<Image> _videoThumbnailFrame;
QString _downloadSize;
mutable QImage _thumbCache;
mutable int _thumbCacheRoundRadius : 4 = 0;
mutable int _thumbCacheRoundCorners : 12 = 0;
mutable int _thumbCacheBlurred : 1 = 0;
};

View file

@ -83,7 +83,7 @@ void Invoice::fillFromData(not_null<Data::Invoice*> invoice) {
}
QSize Invoice::countOptimalSize() {
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
if (_attach) {
if (_status.hasSkipBlock()) {
@ -139,7 +139,7 @@ QSize Invoice::countCurrentSize(int newWidth) {
accumulate_min(newWidth, maxWidth());
auto innerWidth = newWidth - st::msgPadding.left() - st::msgPadding.right();
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
auto newHeight = 0;
if (_title.isEmpty()) {
@ -211,7 +211,7 @@ void Invoice::draw(Painter &p, const PaintContext &context) const {
auto tshift = padding.top();
paintw -= padding.left() + padding.right();
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
if (_titleHeight) {
p.setPen(semibold);
p.setTextPalette(stm->semiboldPalette);
@ -283,7 +283,7 @@ TextState Invoice::textState(QPoint point, StateRequest request) const {
}
paintw -= padding.left() + padding.right();
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
auto symbolAdd = 0;
if (_titleHeight) {
if (point.y() >= tshift && point.y() < tshift + _titleHeight) {

View file

@ -18,27 +18,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "history/view/media/history_view_document.h"
#include "history/view/media/history_view_sticker.h"
#include "history/view/media/history_view_theme_document.h"
#include "media/streaming/media_streaming_utility.h"
#include "styles/style_chat.h"
namespace HistoryView {
int documentMaxStatusWidth(DocumentData *document) {
auto result = st::normalFont->width(Ui::FormatDownloadText(document->size, document->size));
const auto duration = document->getDuration();
if (const auto song = document->song()) {
accumulate_max(result, st::normalFont->width(Ui::FormatPlayedText(duration, duration)));
accumulate_max(result, st::normalFont->width(Ui::FormatDurationAndSizeText(duration, document->size)));
} else if (const auto voice = document->voice()) {
accumulate_max(result, st::normalFont->width(Ui::FormatPlayedText(duration, duration)));
accumulate_max(result, st::normalFont->width(Ui::FormatDurationAndSizeText(duration, document->size)));
} else if (document->isVideoFile()) {
accumulate_max(result, st::normalFont->width(Ui::FormatDurationAndSizeText(duration, document->size)));
} else {
accumulate_max(result, st::normalFont->width(Ui::FormatSizeText(document->size)));
}
return result;
}
void PaintInterpolatedIcon(
Painter &p,
const style::icon &a,
@ -105,8 +89,49 @@ std::unique_ptr<Media> CreateAttach(
return nullptr;
}
int unitedLineHeight() {
return qMax(st::webPageTitleFont->height, st::webPageDescriptionFont->height);
int UnitedLineHeight() {
return std::max(st::semiboldFont->height, st::normalFont->height);
}
QImage PrepareWithBlurredBackground(
QSize outer,
::Media::Streaming::ExpandDecision resize,
Image *large,
Image *blurred) {
const auto ratio = style::DevicePixelRatio();
if (resize.expanding) {
return Images::Prepare(large->original(), resize.result * ratio, {
.outer = outer,
});
}
auto background = QImage(
outer * ratio,
QImage::Format_ARGB32_Premultiplied);
background.setDevicePixelRatio(ratio);
if (!blurred) {
background.fill(Qt::black);
if (!large) {
return background;
}
}
auto p = QPainter(&background);
if (blurred) {
using namespace ::Media::Streaming;
FillBlurredBackground(p, outer, blurred->original());
}
if (large) {
auto image = large->original().scaled(
resize.result * ratio,
Qt::IgnoreAspectRatio,
Qt::SmoothTransformation);
image.setDevicePixelRatio(ratio);
p.drawImage(
(outer.width() - resize.result.width()) / 2,
(outer.height() - resize.result.height()) / 2,
image);
}
p.end();
return background;
}
} // namespace HistoryView

View file

@ -7,6 +7,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
class DocumentData;
class PhotoData;
class Image;
namespace HistoryView {
class Element;
} // namespace HistoryView
@ -15,15 +19,14 @@ namespace Data {
class Media;
} // namespace Data
class DocumentData;
class PhotoData;
namespace Media::Streaming {
struct ExpandDecision;
} // namespace Media::Streaming
namespace HistoryView {
class Media;
int documentMaxStatusWidth(DocumentData *document);
void PaintInterpolatedIcon(
Painter &p,
const style::icon &a,
@ -41,7 +44,7 @@ void PaintInterpolatedIcon(
PhotoData *photo,
const std::vector<std::unique_ptr<Data::Media>> &collage,
const QString &webpageUrl);
int unitedLineHeight();
[[nodiscard]] int UnitedLineHeight();
[[nodiscard]] inline QSize NonEmptySize(QSize size) {
return QSize(std::max(size.width(), 1), std::max(size.height(), 1));
@ -54,4 +57,10 @@ int unitedLineHeight();
: size));
}
[[nodiscard]] QImage PrepareWithBlurredBackground(
QSize outer,
::Media::Streaming::ExpandDecision resize,
Image *large,
Image *blurred);
} // namespace HistoryView

View file

@ -16,6 +16,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_instance.h"
#include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_document.h"
#include "media/streaming/media_streaming_utility.h"
#include "main/main_session.h"
#include "main/main_session_settings.h"
#include "ui/image/image.h"
@ -185,7 +186,7 @@ QSize Photo::countOptimalSize() {
return { maxWidth, minHeight };
}
QSize Photo::countCurrentSize(int newWidth) {
QSize Photo::pixmapSizeFromData(int newWidth) const {
auto tw = style::ConvertScale(_data->width());
auto th = style::ConvertScale(_data->height());
if (tw > st::maxMediaSize) {
@ -197,33 +198,38 @@ QSize Photo::countCurrentSize(int newWidth) {
th = st::maxMediaSize;
}
_pixw = qMin(newWidth, maxWidth());
_pixh = th;
if (tw > _pixw) {
_pixh = (_pixw * _pixh / tw);
auto pixw = qMin(newWidth, maxWidth());
auto pixh = th;
if (tw > pixw) {
pixh = (pixw * pixh / tw);
} else {
_pixw = tw;
pixw = tw;
}
if (_pixh > newWidth) {
_pixw = (_pixw * newWidth) / _pixh;
_pixh = newWidth;
if (pixh > newWidth) {
pixw = (pixw * newWidth) / pixh;
pixh = newWidth;
}
if (_pixw < 1) _pixw = 1;
if (_pixh < 1) _pixh = 1;
return { pixw, pixh };
}
QSize Photo::countCurrentSize(int newWidth) {
if (_serviceWidth) {
return { _serviceWidth, _serviceWidth };
}
const auto minWidth = std::clamp(
_parent->minWidthForMedia(),
(_parent->hasBubble() ? st::historyPhotoBubbleMinWidth : st::minPhotoSize),
std::min(newWidth, st::maxMediaSize));
newWidth = qMax(_pixw, minWidth);
auto newHeight = qMax(_pixh, st::minPhotoSize);
auto pix = pixmapSizeFromData(newWidth);
newWidth = qMax(pix.width(), minWidth);
auto newHeight = qMax(pix.height(), st::minPhotoSize);
if (_parent->hasBubble() && !_caption.isEmpty()) {
const auto maxWithCaption = qMin(
st::msgMaxWidth,
(st::msgPadding.left()
+ _caption.maxWidth()
+ st::msgPadding.right()));
newWidth = qMin(maxWidth(), maxWithCaption);
newWidth = qMax(newWidth, maxWithCaption);
const auto captionw = newWidth
- st::msgPadding.left()
- st::msgPadding.right();
@ -390,68 +396,10 @@ QImage Photo::prepareImageCache(QSize outer) const {
} else {
blurred = large;
}
if (large) {
const auto from = large->size();
// If we cut out no more than 0.25 of the original, let's expand.
const auto big = from.scaled(outer, Qt::KeepAspectRatioByExpanding);
if ((big.width() * 3 <= outer.width() * 4)
&& (big.height() * 3 <= outer.height() * 4)) {
return Images::Prepare(large->original(), big * ratio, {
.outer = outer,
});
}
}
auto background = QImage(
outer * ratio,
QImage::Format_ARGB32_Premultiplied);
background.setDevicePixelRatio(ratio);
if (!blurred) {
background.fill(Qt::black);
return background;
}
const auto bsize = blurred->size();
const auto copyw = std::min(
bsize.width(),
outer.width() * bsize.height() / outer.height());
const auto copyh = std::min(
bsize.height(),
outer.height() * bsize.width() / outer.width());
auto copy = (bsize == QSize(copyw, copyh))
? blurred->original()
: blurred->original().copy(
(bsize.width() - copyw) / 2,
(bsize.height() - copyh) / 2,
copyw,
copyh);
auto scaled = Images::Blur((outer.width() < 10
|| outer.height() < 10
|| (copy.width() * 5 < background.width()
&& copy.height() * 5 < background.height()))
? std::move(copy)
: copy.scaled(
std::min(copy.width(), background.width() / 5),
std::min(copy.height(), background.height() / 5),
Qt::KeepAspectRatio,
Qt::FastTransformation));
auto p = QPainter(&background);
{
auto hq = PainterHighQualityEnabler(p);
p.drawImage(QRect(QPoint(), outer), scaled);
}
if (large) {
auto image = large->original().scaled(
background.size(),
Qt::KeepAspectRatio,
Qt::SmoothTransformation);
image.setDevicePixelRatio(ratio);
const auto size = image.size() / ratio;
p.drawImage(
(outer.width() - size.width()) / 2,
(outer.height() - size.height()) / 2,
image);
}
p.end();
return background;
const auto resize = large
? ::Media::Streaming::DecideFrameResize(outer, large->size())
: ::Media::Streaming::ExpandDecision();
return PrepareWithBlurredBackground(outer, resize, large, blurred);
}
void Photo::paintUserpicFrame(
@ -466,7 +414,7 @@ void Photo::paintUserpicFrame(
checkStreamedIsStarted();
}
const auto size = QSize(_pixw, _pixh);
const auto size = QSize(width(), height());
const auto rect = QRect(photoPosition, size);
const auto st = context.st;
const auto sti = context.imageStyle();

View file

@ -116,6 +116,7 @@ private:
QSize countOptimalSize() override;
QSize countCurrentSize(int newWidth) override;
[[nodiscard]] QSize pixmapSizeFromData(int newWidth) const;
bool needInfoDisplay() const;
void validateGroupedCache(
@ -152,8 +153,6 @@ private:
mutable std::unique_ptr<Streamed> _streamed;
mutable QImage _imageCache;
int _serviceWidth = 0;
int _pixw = 1;
int _pixh = 1;
mutable int _imageCacheRoundRadius : 4 = 0;
mutable int _imageCacheRoundCorners : 12 = 0;
mutable int _imageCacheBlurred : 1 = 0;

View file

@ -106,7 +106,7 @@ QSize WebPage::countOptimalSize() {
_title = Ui::Text::String(st::msgMinWidth - st::webPageLeft);
_description = Ui::Text::String(st::msgMinWidth - st::webPageLeft);
}
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
if (!_openl && !_data->url.isEmpty()) {
const auto previewOfHiddenUrl = [&] {
@ -328,7 +328,7 @@ QSize WebPage::countCurrentSize(int newWidth) {
auto innerWidth = newWidth - st::msgPadding.left() - st::webPageLeft - st::msgPadding.right();
auto newHeight = 0;
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
auto linesMax = isLogEntryOriginal() ? kMaxOriginalEntryLines : 5;
auto siteNameHeight = _siteNameLines ? lineHeight : 0;
if (asArticle()) {
@ -498,7 +498,7 @@ void WebPage::draw(Painter &p, const PaintContext &context) const {
QRect bar(style::rtlrect(st::msgPadding.left(), tshift, st::webPageBar, height() - tshift - bshift, width()));
p.fillRect(bar, barfg);
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
if (asArticle()) {
ensurePhotoMediaCreated();
@ -650,7 +650,7 @@ TextState WebPage::textState(QPoint point, StateRequest request) const {
}
paintw -= padding.left() + padding.right();
auto lineHeight = unitedLineHeight();
auto lineHeight = UnitedLineHeight();
auto inThumb = false;
if (asArticle()) {
auto pw = qMax(_pixw, lineHeight);

View file

@ -123,6 +123,7 @@ struct FrameRequest {
ImageRoundRadius radius = ImageRoundRadius();
RectParts corners = RectPart::AllCorners;
QColor colored = QColor(0, 0, 0, 0);
bool blurredBackground = false;
bool requireARGB32 = true;
bool keepAlpha = false;
bool strict = true;
@ -134,7 +135,7 @@ struct FrameRequest {
}
[[nodiscard]] bool empty() const {
return resize.isEmpty();
return blurredBackground ? outer.isEmpty() : resize.isEmpty();
}
[[nodiscard]] bool operator==(const FrameRequest &other) const {
@ -144,14 +145,16 @@ struct FrameRequest {
&& (corners == other.corners)
&& (colored == other.colored)
&& (keepAlpha == other.keepAlpha)
&& (requireARGB32 == other.requireARGB32);
&& (requireARGB32 == other.requireARGB32)
&& (blurredBackground == other.blurredBackground);
}
[[nodiscard]] bool operator!=(const FrameRequest &other) const {
return !(*this == other);
}
[[nodiscard]] bool goodFor(const FrameRequest &other) const {
return (requireARGB32 == other.requireARGB32)
return (blurredBackground == other.blurredBackground)
&& (requireARGB32 == other.requireARGB32)
&& (keepAlpha == other.keepAlpha)
&& (colored == other.colored)
&& ((strict && !other.strict) || (*this == other));

View file

@ -177,7 +177,8 @@ Stream File::Context::initStream(
return result;
}
result.rotation = FFmpeg::ReadRotationFromMetadata(info);
result.aspect = FFmpeg::ValidateAspectRatio(info->sample_aspect_ratio);
result.aspect = FFmpeg::ValidateAspectRatio(
info->sample_aspect_ratio);
} else if (type == AVMEDIA_TYPE_AUDIO) {
result.frequency = info->codecpar->sample_rate;
if (!result.frequency) {

View file

@ -96,7 +96,7 @@ bool GoodForRequest(
|| (hasAlpha && !request.keepAlpha)
|| request.colored.alpha() != 0) {
return false;
} else if (request.resize.isEmpty()) {
} else if (!request.blurredBackground && request.resize.isEmpty()) {
return true;
} else if (rotation != 0) {
return false;
@ -104,8 +104,10 @@ bool GoodForRequest(
&& ((request.corners & RectPart::AllCorners) != 0)) {
return false;
}
return (request.resize == request.outer)
&& (request.resize == image.size());
const auto size = request.blurredBackground
? request.outer
: request.resize;
return (size == request.outer) && (size == image.size());
}
bool TransferFrame(
@ -279,28 +281,69 @@ void PaintFrameInner(
p.drawImage(rect, original);
}
QImage PrepareBlurredBackground(QSize outer, QImage frame) {
const auto bsize = frame.size();
const auto copyw = std::min(
bsize.width(),
outer.width() * bsize.height() / outer.height());
const auto copyh = std::min(
bsize.height(),
outer.height() * bsize.width() / outer.width());
auto copy = (bsize == QSize(copyw, copyh))
? std::move(frame)
: frame.copy(
(bsize.width() - copyw) / 2,
(bsize.height() - copyh) / 2,
copyw,
copyh);
auto scaled = (copy.width() <= 100 && copy.height() <= 100)
? std::move(copy)
: copy.scaled(40, 40, Qt::KeepAspectRatio, Qt::FastTransformation);
return Images::Blur(std::move(scaled), true);
}
void FillBlurredBackground(QPainter &p, QSize outer, QImage bg) {
auto hq = PainterHighQualityEnabler(p);
const auto rect = QRect(QPoint(), outer);
const auto ratio = p.device()->devicePixelRatio();
p.drawImage(
rect,
PrepareBlurredBackground(outer * ratio, std::move(bg)));
p.fillRect(rect, QColor(0, 0, 0, 48));
}
void PaintFrameContent(
QPainter &p,
const QImage &original,
bool alpha,
bool hasAlpha,
const AVRational &aspect,
int rotation,
const FrameRequest &request) {
const auto full = request.outer.isEmpty()
? original.size()
: request.outer;
const auto size = request.resize.isEmpty()
? original.size()
: request.resize;
const auto to = QRect(
const auto outer = request.outer;
const auto full = request.outer.isEmpty() ? original.size() : outer;
const auto deAlpha = hasAlpha && !request.keepAlpha;
const auto resize = request.blurredBackground
? DecideVideoFrameResize(
outer,
FFmpeg::TransposeSizeByRotation(
FFmpeg::CorrectByAspect(original.size(), aspect), rotation))
: ExpandDecision{ request.resize.isEmpty()
? original.size()
: request.resize };
const auto size = resize.result;
const auto target = QRect(
(full.width() - size.width()) / 2,
(full.height() - size.height()) / 2,
size.width(),
size.height());
if (!alpha || !request.keepAlpha) {
PaintFrameOuter(p, to, full);
if (request.blurredBackground) {
if (!resize.expanding) {
FillBlurredBackground(p, full, original);
}
} else if (!hasAlpha || !request.keepAlpha) {
PaintFrameOuter(p, target, full);
}
const auto deAlpha = alpha && !request.keepAlpha;
PaintFrameInner(p, to, original, deAlpha, rotation);
PaintFrameInner(p, target, original, deAlpha, rotation);
}
void ApplyFrameRounding(QImage &storage, const FrameRequest &request) {
@ -314,13 +357,41 @@ void ApplyFrameRounding(QImage &storage, const FrameRequest &request) {
request.corners);
}
ExpandDecision DecideFrameResize(
QSize outer,
QSize original,
int minVisibleNominator,
int minVisibleDenominator) {
if (outer.isEmpty()) {
// Often "expanding" means that we don't need to fill the background.
return { .result = original, .expanding = true };
}
const auto big = original.scaled(outer, Qt::KeepAspectRatioByExpanding);
if ((big.width() * minVisibleNominator
<= outer.width() * minVisibleDenominator)
&& (big.height() * minVisibleNominator
<= outer.height() * minVisibleDenominator)) {
return { .result = big, .expanding = true };
}
return { .result = original.scaled(outer, Qt::KeepAspectRatio) };
}
ExpandDecision DecideVideoFrameResize(QSize outer, QSize original) {
return DecideFrameResize(outer, original, 1, 2);
}
QSize CalculateResizeFromOuter(QSize outer, QSize original) {
return DecideVideoFrameResize(outer, original).result;
}
QImage PrepareByRequest(
const QImage &original,
bool alpha,
bool hasAlpha,
const AVRational &aspect,
int rotation,
const FrameRequest &request,
QImage storage) {
Expects(!request.outer.isEmpty() || alpha);
Expects(!request.outer.isEmpty() || hasAlpha);
const auto outer = request.outer.isEmpty()
? original.size()
@ -329,12 +400,12 @@ QImage PrepareByRequest(
storage = FFmpeg::CreateFrameStorage(outer);
}
if (alpha && request.keepAlpha) {
if (hasAlpha && request.keepAlpha) {
storage.fill(Qt::transparent);
}
QPainter p(&storage);
PaintFrameContent(p, original, alpha, rotation, request);
PaintFrameContent(p, original, hasAlpha, aspect, rotation, request);
p.end();
ApplyFrameRounding(storage, request);

View file

@ -65,9 +65,26 @@ struct Stream {
QSize resize,
QImage storage);
[[nodiscard]] FrameYUV ExtractYUV(Stream &stream, AVFrame *frame);
struct ExpandDecision {
QSize result;
bool expanding = false;
};
[[nodiscard]] ExpandDecision DecideFrameResize(
QSize outer,
QSize original,
int minVisibleNominator = 3, // If we cut out no more than 0.25 of
int minVisibleDenominator = 4); // the original, let's expand.
[[nodiscard]] ExpandDecision DecideVideoFrameResize(
QSize outer,
QSize original);
[[nodiscard]] QSize CalculateResizeFromOuter(QSize outer, QSize original);
[[nodiscard]] QImage PrepareBlurredBackground(QSize outer, QImage frame);
void FillBlurredBackground(QPainter &p, QSize outer, QImage bg);
[[nodiscard]] QImage PrepareByRequest(
const QImage &original,
bool alpha,
bool hasAlpha,
const AVRational &aspect,
int rotation,
const FrameRequest &request,
QImage storage);

View file

@ -133,7 +133,7 @@ private:
[[nodiscard]] ReadEnoughState readEnoughFrames(crl::time trackTime);
[[nodiscard]] FrameResult readFrame(not_null<Frame*> frame);
void fillRequests(not_null<Frame*> frame) const;
[[nodiscard]] QSize chooseOriginalResize() const;
[[nodiscard]] QSize chooseOriginalResize(QSize encoded) const;
void presentFrameIfNeeded();
void callReady();
[[nodiscard]] bool loopAround();
@ -402,16 +402,22 @@ void VideoTrackObject::fillRequests(not_null<Frame*> frame) const {
}
}
QSize VideoTrackObject::chooseOriginalResize() const {
QSize VideoTrackObject::chooseOriginalResize(QSize encoded) const {
auto chosen = QSize();
if (FFmpeg::RotationSwapWidthHeight(_stream.rotation)) {
encoded.transpose();
}
for (const auto &[_, request] : _requests) {
if (request.resize.isEmpty()) {
const auto resize = request.blurredBackground
? CalculateResizeFromOuter(request.outer, encoded)
: request.resize;
if (resize.isEmpty()) {
return QSize();
}
const auto byWidth = (request.resize.width() >= chosen.width());
const auto byHeight = (request.resize.height() >= chosen.height());
const auto byWidth = (resize.width() >= chosen.width());
const auto byHeight = (resize.height() >= chosen.height());
if (byWidth && byHeight) {
chosen = request.resize;
chosen = resize;
} else if (byWidth || byHeight) {
return QSize();
}
@ -483,7 +489,8 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
frame->original = ConvertFrame(
_stream,
frameWithData,
chooseOriginalResize(),
chooseOriginalResize(
{ frameWithData->width, frameWithData->height }),
std::move(frame->original));
if (frame->original.isNull()) {
frame->prepared.clear();
@ -493,7 +500,10 @@ void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
frame->format = FrameFormat::ARGB32;
}
VideoTrack::PrepareFrameByRequests(frame, _stream.rotation);
VideoTrack::PrepareFrameByRequests(
frame,
_stream.aspect,
_stream.rotation);
Ensures(VideoTrack::IsRasterized(frame));
}
@ -706,22 +716,21 @@ void VideoTrackObject::callReady() {
const auto frame = _shared->frameForPaint();
++_frameIndex;
auto data = VideoInformation();
data.size = FFmpeg::CorrectByAspect(
frame->original.size(),
_stream.aspect);
if (FFmpeg::RotationSwapWidthHeight(_stream.rotation)) {
data.size.transpose();
}
data.cover = frame->original;
data.rotation = _stream.rotation;
data.alpha = frame->alpha;
data.state.duration = _stream.duration;
data.state.position = _syncTimePoint.trackTime;
data.state.receivedTill = _readTillEnd
? _stream.duration
: _syncTimePoint.trackTime;
base::take(_ready)({ data });
base::take(_ready)({ VideoInformation{
.state = {
.position = _syncTimePoint.trackTime,
.receivedTill = (_readTillEnd
? _stream.duration
: _syncTimePoint.trackTime),
.duration = _stream.duration,
},
.size = FFmpeg::TransposeSizeByRotation(
FFmpeg::CorrectByAspect(frame->original.size(), _stream.aspect),
_stream.rotation),
.cover = frame->original,
.rotation = _stream.rotation,
.alpha = frame->alpha,
} });
}
TimePoint VideoTrackObject::trackTime() const {
@ -1060,7 +1069,7 @@ VideoTrack::VideoTrack(
, _streamTimeBase(stream.timeBase)
, _streamDuration(stream.duration)
, _streamRotation(stream.rotation)
//, _streamAspect(stream.aspect)
, _streamAspect(stream.aspect)
, _shared(std::make_unique<Shared>())
, _wrapped(
options,
@ -1232,6 +1241,7 @@ QImage VideoTrack::frameImage(
j->second.image = PrepareByRequest(
frame->original,
frame->alpha,
_streamAspect,
_streamRotation,
useRequest,
std::move(j->second.image));
@ -1258,6 +1268,7 @@ void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
void VideoTrack::PrepareFrameByRequests(
not_null<Frame*> frame,
const AVRational &aspect,
int rotation) {
Expects(frame->format != FrameFormat::ARGB32
|| !frame->original.isNull());
@ -1286,6 +1297,7 @@ void VideoTrack::PrepareFrameByRequests(
prepared.image = PrepareByRequest(
frame->original,
frame->alpha,
aspect,
rotation,
prepared.request,
std::move(prepared.image));

View file

@ -155,7 +155,10 @@ private:
};
static void PrepareFrameByRequests(not_null<Frame*> frame, int rotation);
static void PrepareFrameByRequests(
not_null<Frame*> frame,
const AVRational &aspect,
int rotation);
[[nodiscard]] static bool IsDecoded(not_null<const Frame*> frame);
[[nodiscard]] static bool IsRasterized(not_null<const Frame*> frame);
[[nodiscard]] static bool IsStale(
@ -171,7 +174,7 @@ private:
const AVRational _streamTimeBase;
const crl::time _streamDuration = 0;
const int _streamRotation = 0;
//AVRational _streamAspect = kNormalAspect;
const AVRational _streamAspect = FFmpeg::kNormalAspect;
std::unique_ptr<Shared> _shared;
using Implementation = VideoTrackObject;

@ -1 +1 @@
Subproject commit 95dd2c8465d4b5bfcbcdb47ce7a1d1e743d04477
Subproject commit 2e63c6103e3b23bfcd65dcb8afb19c020511b168