Add color extraction for audio thumbnails (#14209)

main
Eugen Rochko 2020-07-05 18:28:25 +02:00 committed by GitHub
parent 2f2ab48b75
commit 99f3a55540
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 284 additions and 243 deletions

View File

@ -48,6 +48,7 @@ gem 'omniauth-cas', '~> 1.1'
gem 'omniauth-saml', '~> 1.10' gem 'omniauth-saml', '~> 1.10'
gem 'omniauth', '~> 1.9' gem 'omniauth', '~> 1.9'
gem 'color_diff', '~> 0.1'
gem 'discard', '~> 1.2' gem 'discard', '~> 1.2'
gem 'doorkeeper', '~> 5.4' gem 'doorkeeper', '~> 5.4'
gem 'ed25519', '~> 1.2' gem 'ed25519', '~> 1.2'

View File

@ -165,6 +165,7 @@ GEM
cocaine (0.5.8) cocaine (0.5.8)
climate_control (>= 0.0.3, < 1.0) climate_control (>= 0.0.3, < 1.0)
coderay (1.1.3) coderay (1.1.3)
color_diff (0.1)
concurrent-ruby (1.1.6) concurrent-ruby (1.1.6)
connection_pool (2.2.3) connection_pool (2.2.3)
crack (0.4.3) crack (0.4.3)
@ -689,6 +690,7 @@ DEPENDENCIES
chewy (~> 5.1) chewy (~> 5.1)
cld3 (~> 3.3.0) cld3 (~> 3.3.0)
climate_control (~> 0.2) climate_control (~> 0.2)
color_diff (~> 0.1)
concurrent-ruby concurrent-ruby
connection_pool connection_pool
devise (~> 4.7) devise (~> 4.7)

View File

@ -353,7 +353,9 @@ class Status extends ImmutablePureComponent {
src={attachment.get('url')} src={attachment.get('url')}
alt={attachment.get('description')} alt={attachment.get('description')}
poster={attachment.get('preview_url') || status.getIn(['account', 'avatar_static'])} poster={attachment.get('preview_url') || status.getIn(['account', 'avatar_static'])}
blurhash={attachment.get('blurhash')} backgroundColor={attachment.getIn(['meta', 'colors', 'background'])}
foregroundColor={attachment.getIn(['meta', 'colors', 'foreground'])}
accentColor={attachment.getIn(['meta', 'colors', 'accent'])}
duration={attachment.getIn(['meta', 'original', 'duration'], 0)} duration={attachment.getIn(['meta', 'original', 'duration'], 0)}
width={this.props.cachedMediaWidth} width={this.props.cachedMediaWidth}
height={110} height={110}

View File

@ -5,131 +5,12 @@ import { formatTime } from 'mastodon/features/video';
import Icon from 'mastodon/components/icon'; import Icon from 'mastodon/components/icon';
import classNames from 'classnames'; import classNames from 'classnames';
import { throttle } from 'lodash'; import { throttle } from 'lodash';
import { encode, decode } from 'blurhash';
import { getPointerPosition, fileNameFromURL } from 'mastodon/features/video'; import { getPointerPosition, fileNameFromURL } from 'mastodon/features/video';
import { debounce } from 'lodash'; import { debounce } from 'lodash';
const digitCharacters = [ const hex2rgba = (hex, alpha = 1) => {
'0', const [r, g, b] = hex.match(/\w\w/g).map(x => parseInt(x, 16));
'1', return `rgba(${r}, ${g}, ${b}, ${alpha})`;
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'A',
'B',
'C',
'D',
'E',
'F',
'G',
'H',
'I',
'J',
'K',
'L',
'M',
'N',
'O',
'P',
'Q',
'R',
'S',
'T',
'U',
'V',
'W',
'X',
'Y',
'Z',
'a',
'b',
'c',
'd',
'e',
'f',
'g',
'h',
'i',
'j',
'k',
'l',
'm',
'n',
'o',
'p',
'q',
'r',
's',
't',
'u',
'v',
'w',
'x',
'y',
'z',
'#',
'$',
'%',
'*',
'+',
',',
'-',
'.',
':',
';',
'=',
'?',
'@',
'[',
']',
'^',
'_',
'{',
'|',
'}',
'~',
];
const decode83 = (str) => {
let value = 0;
let c, digit;
for (let i = 0; i < str.length; i++) {
c = str[i];
digit = digitCharacters.indexOf(c);
value = value * 83 + digit;
}
return value;
};
const decodeRGB = int => ({
r: Math.max(0, (int >> 16)),
g: Math.max(0, (int >> 8) & 255),
b: Math.max(0, (int & 255)),
});
const luma = ({ r, g, b }) => 0.2126 * r + 0.7152 * g + 0.0722 * b;
const adjustColor = ({ r, g, b }, lumaThreshold = 100) => {
let delta;
if (luma({ r, g, b }) >= lumaThreshold) {
delta = -80;
} else {
delta = 80;
}
return {
r: r + delta,
g: g + delta,
b: b + delta,
};
}; };
const messages = defineMessages({ const messages = defineMessages({
@ -157,7 +38,9 @@ class Audio extends React.PureComponent {
fullscreen: PropTypes.bool, fullscreen: PropTypes.bool,
intl: PropTypes.object.isRequired, intl: PropTypes.object.isRequired,
cacheWidth: PropTypes.func, cacheWidth: PropTypes.func,
blurhash: PropTypes.string, backgroundColor: PropTypes.string,
foregroundColor: PropTypes.string,
accentColor: PropTypes.string,
}; };
state = { state = {
@ -169,7 +52,6 @@ class Audio extends React.PureComponent {
muted: false, muted: false,
volume: 0.5, volume: 0.5,
dragging: false, dragging: false,
color: { r: 255, g: 255, b: 255 },
}; };
setPlayerRef = c => { setPlayerRef = c => {
@ -207,10 +89,6 @@ class Audio extends React.PureComponent {
} }
} }
setBlurhashCanvasRef = c => {
this.blurhashCanvas = c;
}
setCanvasRef = c => { setCanvasRef = c => {
this.canvas = c; this.canvas = c;
@ -222,41 +100,13 @@ class Audio extends React.PureComponent {
componentDidMount () { componentDidMount () {
window.addEventListener('scroll', this.handleScroll); window.addEventListener('scroll', this.handleScroll);
window.addEventListener('resize', this.handleResize, { passive: true }); window.addEventListener('resize', this.handleResize, { passive: true });
if (!this.props.blurhash) {
const img = new Image();
img.crossOrigin = 'anonymous';
img.onload = () => this.handlePosterLoad(img);
img.src = this.props.poster;
} else {
this._setColorScheme();
this._decodeBlurhash();
}
} }
componentDidUpdate (prevProps, prevState) { componentDidUpdate (prevProps, prevState) {
if (prevProps.poster !== this.props.poster && !this.props.blurhash) { if (prevProps.src !== this.props.src || this.state.width !== prevState.width || this.state.height !== prevState.height) {
const img = new Image();
img.crossOrigin = 'anonymous';
img.onload = () => this.handlePosterLoad(img);
img.src = this.props.poster;
}
if (prevState.blurhash !== this.state.blurhash || prevProps.blurhash !== this.props.blurhash) {
this._setColorScheme();
this._decodeBlurhash();
}
this._clear(); this._clear();
this._draw(); this._draw();
} }
_decodeBlurhash () {
const context = this.blurhashCanvas.getContext('2d');
const pixels = decode(this.props.blurhash || this.state.blurhash, 32, 32);
const outputImageData = new ImageData(pixels, 32, 32);
context.putImageData(outputImageData, 0, 0);
} }
componentWillUnmount () { componentWillUnmount () {
@ -425,31 +275,6 @@ class Audio extends React.PureComponent {
this.analyser = analyser; this.analyser = analyser;
} }
handlePosterLoad = image => {
const canvas = document.createElement('canvas');
const context = canvas.getContext('2d');
canvas.width = image.width;
canvas.height = image.height;
context.drawImage(image, 0, 0);
const inputImageData = context.getImageData(0, 0, image.width, image.height);
const blurhash = encode(inputImageData.data, image.width, image.height, 4, 4);
this.setState({ blurhash });
}
_setColorScheme () {
const blurhash = this.props.blurhash || this.state.blurhash;
const averageColor = decodeRGB(decode83(blurhash.slice(2, 6)));
this.setState({
color: adjustColor(averageColor),
darkText: luma(averageColor) >= 165,
});
}
handleDownload = () => { handleDownload = () => {
fetch(this.props.src).then(res => res.blob()).then(blob => { fetch(this.props.src).then(res => res.blob()).then(blob => {
const element = document.createElement('a'); const element = document.createElement('a');
@ -609,8 +434,8 @@ class Audio extends React.PureComponent {
const gradient = this.canvasContext.createLinearGradient(dx1, dy1, dx2, dy2); const gradient = this.canvasContext.createLinearGradient(dx1, dy1, dx2, dy2);
const mainColor = `rgb(${this.state.color.r}, ${this.state.color.g}, ${this.state.color.b})`; const mainColor = this._getAccentColor();
const lastColor = `rgba(${this.state.color.r}, ${this.state.color.g}, ${this.state.color.b}, 0)`; const lastColor = hex2rgba(mainColor, 0);
gradient.addColorStop(0, mainColor); gradient.addColorStop(0, mainColor);
gradient.addColorStop(0.6, mainColor); gradient.addColorStop(0.6, mainColor);
@ -632,17 +457,25 @@ class Audio extends React.PureComponent {
return Math.floor(this._getRadius() + (PADDING * this._getScaleCoefficient())); return Math.floor(this._getRadius() + (PADDING * this._getScaleCoefficient()));
} }
_getColor () { _getAccentColor () {
return `rgb(${this.state.color.r}, ${this.state.color.g}, ${this.state.color.b})`; return this.props.accentColor || '#ffffff';
}
_getBackgroundColor () {
return this.props.backgroundColor || '#000000';
}
_getForegroundColor () {
return this.props.foregroundColor || '#ffffff';
} }
render () { render () {
const { src, intl, alt, editable } = this.props; const { src, intl, alt, editable } = this.props;
const { paused, muted, volume, currentTime, duration, buffer, darkText, dragging } = this.state; const { paused, muted, volume, currentTime, duration, buffer, dragging } = this.state;
const progress = (currentTime / duration) * 100; const progress = (currentTime / duration) * 100;
return ( return (
<div className={classNames('audio-player', { editable, 'with-light-background': darkText })} ref={this.setPlayerRef} style={{ width: '100%', height: this.props.fullscreen ? '100%' : (this.state.height || this.props.height) }} onMouseEnter={this.handleMouseEnter} onMouseLeave={this.handleMouseLeave}> <div className={classNames('audio-player', { editable })} ref={this.setPlayerRef} style={{ backgroundColor: this._getBackgroundColor(), color: this._getForegroundColor(), width: '100%', height: this.props.fullscreen ? '100%' : (this.state.height || this.props.height) }} onMouseEnter={this.handleMouseEnter} onMouseLeave={this.handleMouseLeave}>
<audio <audio
src={src} src={src}
ref={this.setAudioRef} ref={this.setAudioRef}
@ -654,24 +487,15 @@ class Audio extends React.PureComponent {
/> />
<canvas <canvas
className='audio-player__background'
onClick={this.togglePlay}
width='32'
height='32'
style={{ width: this.state.width, height: this.state.height, position: 'absolute', top: 0, left: 0 }}
ref={this.setBlurhashCanvasRef}
aria-label={alt}
title={alt}
role='button' role='button'
tabIndex='0'
/>
<canvas
className='audio-player__canvas' className='audio-player__canvas'
width={this.state.width} width={this.state.width}
height={this.state.height} height={this.state.height}
style={{ width: '100%', position: 'absolute', top: 0, left: 0, pointerEvents: 'none' }} style={{ width: '100%', position: 'absolute', top: 0, left: 0 }}
ref={this.setCanvasRef} ref={this.setCanvasRef}
onClick={this.togglePlay}
title={alt}
aria-label={alt}
/> />
<img <img
@ -684,12 +508,12 @@ class Audio extends React.PureComponent {
<div className='video-player__seek' onMouseDown={this.handleMouseDown} ref={this.setSeekRef}> <div className='video-player__seek' onMouseDown={this.handleMouseDown} ref={this.setSeekRef}>
<div className='video-player__seek__buffer' style={{ width: `${buffer}%` }} /> <div className='video-player__seek__buffer' style={{ width: `${buffer}%` }} />
<div className='video-player__seek__progress' style={{ width: `${progress}%`, backgroundColor: this._getColor() }} /> <div className='video-player__seek__progress' style={{ width: `${progress}%`, backgroundColor: this._getAccentColor() }} />
<span <span
className={classNames('video-player__seek__handle', { active: dragging })} className={classNames('video-player__seek__handle', { active: dragging })}
tabIndex='0' tabIndex='0'
style={{ left: `${progress}%`, backgroundColor: this._getColor() }} style={{ left: `${progress}%`, backgroundColor: this._getAccentColor() }}
/> />
</div> </div>
@ -700,12 +524,12 @@ class Audio extends React.PureComponent {
<button type='button' title={intl.formatMessage(muted ? messages.unmute : messages.mute)} aria-label={intl.formatMessage(muted ? messages.unmute : messages.mute)} onClick={this.toggleMute}><Icon id={muted ? 'volume-off' : 'volume-up'} fixedWidth /></button> <button type='button' title={intl.formatMessage(muted ? messages.unmute : messages.mute)} aria-label={intl.formatMessage(muted ? messages.unmute : messages.mute)} onClick={this.toggleMute}><Icon id={muted ? 'volume-off' : 'volume-up'} fixedWidth /></button>
<div className={classNames('video-player__volume', { active: this.state.hovered })} ref={this.setVolumeRef} onMouseDown={this.handleVolumeMouseDown}> <div className={classNames('video-player__volume', { active: this.state.hovered })} ref={this.setVolumeRef} onMouseDown={this.handleVolumeMouseDown}>
<div className='video-player__volume__current' style={{ width: `${volume * 100}%`, backgroundColor: this._getColor() }} /> <div className='video-player__volume__current' style={{ width: `${volume * 100}%`, backgroundColor: this._getAccentColor() }} />
<span <span
className={classNames('video-player__volume__handle')} className={classNames('video-player__volume__handle')}
tabIndex='0' tabIndex='0'
style={{ left: `${volume * 100}%`, backgroundColor: this._getColor() }} style={{ left: `${volume * 100}%`, backgroundColor: this._getAccentColor() }}
/> />
</div> </div>

View File

@ -126,7 +126,9 @@ class DetailedStatus extends ImmutablePureComponent {
alt={attachment.get('description')} alt={attachment.get('description')}
duration={attachment.getIn(['meta', 'original', 'duration'], 0)} duration={attachment.getIn(['meta', 'original', 'duration'], 0)}
poster={attachment.get('preview_url') || status.getIn(['account', 'avatar_static'])} poster={attachment.get('preview_url') || status.getIn(['account', 'avatar_static'])}
blurhash={attachment.get('blurhash')} backgroundColor={attachment.getIn(['meta', 'colors', 'background'])}
foregroundColor={attachment.getIn(['meta', 'colors', 'foreground'])}
accentColor={attachment.getIn(['meta', 'colors', 'accent'])}
height={150} height={150}
/> />
); );

View File

@ -61,7 +61,9 @@ export default class AudioModal extends ImmutablePureComponent {
duration={media.getIn(['meta', 'original', 'duration'], 0)} duration={media.getIn(['meta', 'original', 'duration'], 0)}
height={150} height={150}
poster={media.get('preview_url') || status.getIn(['account', 'avatar_static'])} poster={media.get('preview_url') || status.getIn(['account', 'avatar_static'])}
blurhash={media.get('blurhash')} backgroundColor={media.getIn(['meta', 'colors', 'background'])}
foregroundColor={media.getIn(['meta', 'colors', 'foreground'])}
accentColor={media.getIn(['meta', 'colors', 'accent'])}
/> />
</div> </div>

View File

@ -329,7 +329,9 @@ class FocalPointModal extends ImmutablePureComponent {
duration={media.getIn(['meta', 'original', 'duration'], 0)} duration={media.getIn(['meta', 'original', 'duration'], 0)}
height={150} height={150}
poster={media.get('preview_url') || account.get('avatar_static')} poster={media.get('preview_url') || account.get('avatar_static')}
blurhash={media.get('blurhash')} backgroundColor={media.getIn(['meta', 'colors', 'background'])}
foregroundColor={media.getIn(['meta', 'colors', 'foreground'])}
accentColor={media.getIn(['meta', 'colors', 'accent'])}
editable editable
/> />
)} )}

View File

@ -5314,36 +5314,31 @@ a.status-card.compact:hover {
.video-player__volume::before, .video-player__volume::before,
.video-player__seek::before { .video-player__seek::before {
background: rgba($white, 0.15); background: currentColor;
} opacity: 0.15;
&.with-light-background {
color: $black;
.video-player__volume::before,
.video-player__seek::before {
background: rgba($black, 0.15);
} }
.video-player__seek__buffer { .video-player__seek__buffer {
background: rgba($black, 0.2); background: currentColor;
opacity: 0.2;
} }
.video-player__buttons button { .video-player__buttons button {
color: rgba($black, 0.75); color: currentColor;
opacity: 0.75;
&:active, &:active,
&:hover, &:hover,
&:focus { &:focus {
color: $black; color: currentColor;
opacity: 1;
} }
} }
.video-player__time-sep, .video-player__time-sep,
.video-player__time-total, .video-player__time-total,
.video-player__time-current { .video-player__time-current {
color: $black; color: currentColor;
}
} }
.video-player__seek::before, .video-player__seek::before,

View File

@ -40,6 +40,13 @@ class MediaAttachment < ApplicationRecord
VIDEO_FILE_EXTENSIONS = %w(.webm .mp4 .m4v .mov).freeze VIDEO_FILE_EXTENSIONS = %w(.webm .mp4 .m4v .mov).freeze
AUDIO_FILE_EXTENSIONS = %w(.ogg .oga .mp3 .wav .flac .opus .aac .m4a .3gp .wma).freeze AUDIO_FILE_EXTENSIONS = %w(.ogg .oga .mp3 .wav .flac .opus .aac .m4a .3gp .wma).freeze
META_KEYS = %i(
focus
colors
original
small
).freeze
IMAGE_MIME_TYPES = %w(image/jpeg image/png image/gif).freeze IMAGE_MIME_TYPES = %w(image/jpeg image/png image/gif).freeze
VIDEO_MIME_TYPES = %w(video/webm video/mp4 video/quicktime video/ogg).freeze VIDEO_MIME_TYPES = %w(video/webm video/mp4 video/quicktime video/ogg).freeze
VIDEO_CONVERTIBLE_MIME_TYPES = %w(video/webm video/quicktime).freeze VIDEO_CONVERTIBLE_MIME_TYPES = %w(video/webm video/quicktime).freeze
@ -165,7 +172,7 @@ class MediaAttachment < ApplicationRecord
has_attached_file :thumbnail, has_attached_file :thumbnail,
styles: THUMBNAIL_STYLES, styles: THUMBNAIL_STYLES,
processors: [:lazy_thumbnail, :blurhash_transcoder], processors: [:lazy_thumbnail, :blurhash_transcoder, :color_extractor],
convert_options: GLOBAL_CONVERT_OPTIONS convert_options: GLOBAL_CONVERT_OPTIONS
validates_attachment_content_type :thumbnail, content_type: IMAGE_MIME_TYPES validates_attachment_content_type :thumbnail, content_type: IMAGE_MIME_TYPES
@ -216,7 +223,7 @@ class MediaAttachment < ApplicationRecord
x, y = (point.is_a?(Enumerable) ? point : point.split(',')).map(&:to_f) x, y = (point.is_a?(Enumerable) ? point : point.split(',')).map(&:to_f)
meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(:focus, :original, :small) meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(*META_KEYS)
meta['focus'] = { 'x' => x, 'y' => y } meta['focus'] = { 'x' => x, 'y' => y }
file.instance_write(:meta, meta) file.instance_write(:meta, meta)
@ -338,7 +345,7 @@ class MediaAttachment < ApplicationRecord
end end
def populate_meta def populate_meta
meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(:focus, :original, :small) meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(*META_KEYS)
file.queued_for_write.each do |style, file| file.queued_for_write.each do |style, file|
meta[style] = style == :small || image? ? image_geometry(file) : video_metadata(file) meta[style] = style == :small || image? ? image_geometry(file) : video_metadata(file)

View File

@ -11,6 +11,6 @@
%video{ autoplay: 'autoplay', muted: 'muted', loop: 'loop' } %video{ autoplay: 'autoplay', muted: 'muted', loop: 'loop' }
%source{ src: @media_attachment.file.url(:original) } %source{ src: @media_attachment.file.url(:original) }
- elsif @media_attachment.audio? - elsif @media_attachment.audio?
= react_component :audio, src: @media_attachment.file.url(:original), poster: full_asset_url(@media_attachment.account.avatar_static_url), width: 670, height: 380, fullscreen: true, alt: @media_attachment.description, duration: @media_attachment.file.meta.dig(:original, :duration) do = react_component :audio, src: @media_attachment.file.url(:original), poster: @media_attachment.thumbnail.present? ? @media_attachment.thumbnail.url : @media_attachment.account.avatar_static_url, backgroundColor: @media_attachment.file.meta.dig('colors', 'background'), foregroundColor: @media_attachment.file.meta.dig('colors', 'foreground'), accentColor: @media_attachment.file.meta.dig('colors', 'accent'), width: 670, height: 380, fullscreen: true, alt: @media_attachment.description, duration: @media_attachment.file.meta.dig(:original, :duration) do
%audio{ controls: 'controls' } %audio{ controls: 'controls' }
%source{ src: @media_attachment.file.url(:original) } %source{ src: @media_attachment.file.url(:original) }

View File

@ -33,7 +33,7 @@
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments } = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
- elsif status.media_attachments.first.audio? - elsif status.media_attachments.first.audio?
- audio = status.media_attachments.first - audio = status.media_attachments.first
= react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, blurhash: audio.blurhash, width: 670, height: 380, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do = react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 670, height: 380, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments } = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
- else - else
= react_component :media_gallery, height: 380, sensitive: status.sensitive?, standalone: true, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do = react_component :media_gallery, height: 380, sensitive: status.sensitive?, standalone: true, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do

View File

@ -39,7 +39,7 @@
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments } = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
- elsif status.media_attachments.first.audio? - elsif status.media_attachments.first.audio?
- audio = status.media_attachments.first - audio = status.media_attachments.first
= react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, blurhash: audio.blurhash, width: 610, height: 343, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do = react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 610, height: 343, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments } = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
- else - else
= react_component :media_gallery, height: 343, sensitive: status.sensitive?, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do = react_component :media_gallery, height: 343, sensitive: status.sensitive?, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do

View File

@ -32,7 +32,7 @@ class PostProcessMediaWorker
media_attachment.file.reprocess!(:original) media_attachment.file.reprocess!(:original)
media_attachment.processing = :complete media_attachment.processing = :complete
media_attachment.file_meta = previous_meta.merge(media_attachment.file_meta).with_indifferent_access.slice(:focus, :original, :small) media_attachment.file_meta = previous_meta.merge(media_attachment.file_meta).with_indifferent_access.slice(*MediaAttachment::META_KEYS)
media_attachment.save media_attachment.save
rescue ActiveRecord::RecordNotFound rescue ActiveRecord::RecordNotFound
true true

View File

@ -11,6 +11,7 @@ require_relative '../lib/redis/namespace_extensions'
require_relative '../lib/paperclip/url_generator_extensions' require_relative '../lib/paperclip/url_generator_extensions'
require_relative '../lib/paperclip/attachment_extensions' require_relative '../lib/paperclip/attachment_extensions'
require_relative '../lib/paperclip/media_type_spoof_detector_extensions' require_relative '../lib/paperclip/media_type_spoof_detector_extensions'
require_relative '../lib/paperclip/transcoder_extensions'
require_relative '../lib/paperclip/lazy_thumbnail' require_relative '../lib/paperclip/lazy_thumbnail'
require_relative '../lib/paperclip/gif_transcoder' require_relative '../lib/paperclip/gif_transcoder'
require_relative '../lib/paperclip/video_transcoder' require_relative '../lib/paperclip/video_transcoder'

View File

@ -0,0 +1,189 @@
# frozen_string_literal: true
require 'mime/types/columnar'
module Paperclip
class ColorExtractor < Paperclip::Processor
MIN_CONTRAST = 3.0
FREQUENCY_THRESHOLD = 0.01
def make
depth = 8
# Determine background palette by getting colors close to the image's edge only
background_palette = palette_from_histogram(convert(':source -alpha set -gravity Center -region 75%x75% -fill None -colorize 100% -alpha transparent +region -format %c -colors :quantity -depth :depth histogram:info:', source: File.expand_path(@file.path), quantity: 10, depth: depth), 10)
# Determine foreground palette from the whole image
foreground_palette = palette_from_histogram(convert(':source -format %c -colors :quantity -depth :depth histogram:info:', source: File.expand_path(@file.path), quantity: 10, depth: depth), 10)
background_color = background_palette.first || foreground_palette.first
foreground_colors = []
return @file if background_color.nil?
max_distance = 0
max_distance_color = nil
foreground_palette.each do |color|
distance = ColorDiff.between(background_color, color)
if distance > max_distance
max_distance = distance
max_distance_color = color
end
end
foreground_colors << max_distance_color unless max_distance_color.nil?
max_distance = 0
max_distance_color = nil
foreground_palette.each do |color|
distance = ColorDiff.between(background_color, color)
contrast = w3c_contrast(background_color, color)
if distance > max_distance && contrast >= MIN_CONTRAST && !foreground_colors.include?(color)
max_distance = distance
max_distance_color = color
end
end
foreground_colors << max_distance_color unless max_distance_color.nil?
# If we don't have enough colors for accent and foreground, generate
# new ones by manipulating the background color
(2 - foreground_colors.size).times do |i|
foreground_colors << lighten_or_darken(background_color, 35 + (15 * i))
end
# We want the color with the highest contrast to background to be the foreground one,
# and the one with the highest saturation to be the accent one
foreground_color = foreground_colors.max_by { |rgb| w3c_contrast(background_color, rgb) }
accent_color = foreground_colors.max_by { |rgb| rgb_to_hsl(rgb.r, rgb.g, rgb.b)[1] }
meta = {
colors: {
background: rgb_to_hex(background_color),
foreground: rgb_to_hex(foreground_color),
accent: rgb_to_hex(accent_color),
},
}
attachment.instance.file.instance_write(:meta, (attachment.instance.file.instance_read(:meta) || {}).merge(meta))
@file
end
private
def w3c_contrast(color1, color2)
luminance1 = (0.2126 * color1.r + 0.7152 * color1.g + 0.0722 * color1.b) + 0.05
luminance2 = (0.2126 * color2.r + 0.7152 * color2.g + 0.0722 * color2.b) + 0.05
if luminance1 > luminance2
luminance1 / luminance2
else
luminance2 / luminance1
end
end
# rubocop:disable Style/MethodParameterName
def rgb_to_hsl(r, g, b)
r /= 255.0
g /= 255.0
b /= 255.0
max = [r, g, b].max
min = [r, g, b].min
h = (max + min) / 2.0
s = (max + min) / 2.0
l = (max + min) / 2.0
if max == min
h = 0
s = 0 # achromatic
else
d = max - min
s = l >= 0.5 ? d / (2.0 - max - min) : d / (max + min)
case max
when r
h = (g - b) / d + (g < b ? 6.0 : 0)
when g
h = (b - r) / d + 2.0
when b
h = (r - g) / d + 4.0
end
h /= 6.0
end
[(h * 360).round, (s * 100).round, (l * 100).round]
end
def hue_to_rgb(p, q, t)
t += 1 if t.negative?
t -= 1 if t > 1
return (p + (q - p) * 6 * t) if t < 1 / 6.0
return q if t < 1 / 2.0
return (p + (q - p) * (2 / 3.0 - t) * 6) if t < 2 / 3.0
p
end
def hsl_to_rgb(h, s, l)
h /= 360.0
s /= 100.0
l /= 100.0
r = 0.0
g = 0.0
b = 0.0
if s == 0.0
r = l.to_f
g = l.to_f
b = l.to_f # achromatic
else
q = l < 0.5 ? l * (1 + s) : l + s - l * s
p = 2 * l - q
r = hue_to_rgb(p, q, h + 1 / 3.0)
g = hue_to_rgb(p, q, h)
b = hue_to_rgb(p, q, h - 1 / 3.0)
end
[(r * 255).round, (g * 255).round, (b * 255).round]
end
# rubocop:enable Style/MethodParameterName
def lighten_or_darken(color, by)
hue, saturation, light = rgb_to_hsl(color.r, color.g, color.b)
light = begin
if light < 50
[100, light + by].min
else
[0, light - by].max
end
end
ColorDiff::Color::RGB.new(*hsl_to_rgb(hue, saturation, light))
end
def palette_from_histogram(result, quantity)
frequencies = result.scan(/([0-9]+)\:/).flatten.map(&:to_f)
hex_values = result.scan(/\#([0-9A-Fa-f]{6,8})/).flatten
total_frequencies = frequencies.reduce(&:+).to_f
frequencies.map.with_index { |f, i| [f / total_frequencies, hex_values[i]] }
.sort_by { |r| -r[0] }
.reject { |r| r[1].size == 8 && r[1].end_with?('00') }
.map { |r| ColorDiff::Color::RGB.new(*r[1][0..5].scan(/../).map { |c| c.to_i(16) }) }
.slice(0, quantity)
end
def rgb_to_hex(rgb)
'#%02x%02x%02x' % [rgb.r, rgb.g, rgb.b]
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Paperclip
module TranscoderExtensions
# Prevent the transcoder from modifying our meta hash
def initialize(file, options = {}, attachment = nil)
meta_value = attachment&.instance_read(:meta)
super
attachment&.instance_write(:meta, meta_value)
end
end
end
Paperclip::Transcoder.prepend(Paperclip::TranscoderExtensions)