forked from treehouse/mastodon
Merge pull request #1393 from ThibG/glitch-soc/merge-upstream
Merge upstream changessignup-info-prompt
commit
b988bc7564
|
@ -113,6 +113,10 @@ class Audio extends React.PureComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
togglePlay = () => {
|
togglePlay = () => {
|
||||||
|
if (!this.audioContext) {
|
||||||
|
this._initAudioContext();
|
||||||
|
}
|
||||||
|
|
||||||
if (this.state.paused) {
|
if (this.state.paused) {
|
||||||
this.setState({ paused: false }, () => this.audio.play());
|
this.setState({ paused: false }, () => this.audio.play());
|
||||||
} else {
|
} else {
|
||||||
|
@ -131,10 +135,6 @@ class Audio extends React.PureComponent {
|
||||||
handlePlay = () => {
|
handlePlay = () => {
|
||||||
this.setState({ paused: false });
|
this.setState({ paused: false });
|
||||||
|
|
||||||
if (this.canvas && !this.audioContext) {
|
|
||||||
this._initAudioContext();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.audioContext && this.audioContext.state === 'suspended') {
|
if (this.audioContext && this.audioContext.state === 'suspended') {
|
||||||
this.audioContext.resume();
|
this.audioContext.resume();
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,6 +115,10 @@ class Audio extends React.PureComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
togglePlay = () => {
|
togglePlay = () => {
|
||||||
|
if (!this.audioContext) {
|
||||||
|
this._initAudioContext();
|
||||||
|
}
|
||||||
|
|
||||||
if (this.state.paused) {
|
if (this.state.paused) {
|
||||||
this.setState({ paused: false }, () => this.audio.play());
|
this.setState({ paused: false }, () => this.audio.play());
|
||||||
} else {
|
} else {
|
||||||
|
@ -133,10 +137,6 @@ class Audio extends React.PureComponent {
|
||||||
handlePlay = () => {
|
handlePlay = () => {
|
||||||
this.setState({ paused: false });
|
this.setState({ paused: false });
|
||||||
|
|
||||||
if (this.canvas && !this.audioContext) {
|
|
||||||
this._initAudioContext();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.audioContext && this.audioContext.state === 'suspended') {
|
if (this.audioContext && this.audioContext.state === 'suspended') {
|
||||||
this.audioContext.resume();
|
this.audioContext.resume();
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,11 +29,11 @@
|
||||||
- if !status.media_attachments.empty?
|
- if !status.media_attachments.empty?
|
||||||
- if status.media_attachments.first.video?
|
- if status.media_attachments.first.video?
|
||||||
- video = status.media_attachments.first
|
- video = status.media_attachments.first
|
||||||
= react_component :video, src: video.file.url(:original), preview: video.thumbnail.present? ? video.thumbnail.url : video.file.url(:small), blurhash: video.blurhash, sensitive: status.sensitive?, width: 670, height: 380, detailed: true, inline: true, alt: video.description do
|
= react_component :video, src: full_asset_url(video.file.url(:original)), preview: full_asset_url(video.thumbnail.present? ? video.thumbnail.url : video.file.url(:small)), blurhash: video.blurhash, sensitive: status.sensitive?, width: 670, height: 380, detailed: true, inline: true, alt: video.description do
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- elsif status.media_attachments.first.audio?
|
- elsif status.media_attachments.first.audio?
|
||||||
- audio = status.media_attachments.first
|
- audio = status.media_attachments.first
|
||||||
= react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 670, height: 380, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
|
= react_component :audio, src: full_asset_url(audio.file.url(:original)), poster: full_asset_url(audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url), backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 670, height: 380, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- else
|
- else
|
||||||
= react_component :media_gallery, height: 380, sensitive: status.sensitive?, standalone: true, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
= react_component :media_gallery, height: 380, sensitive: status.sensitive?, standalone: true, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
||||||
|
|
|
@ -35,11 +35,11 @@
|
||||||
- if !status.media_attachments.empty?
|
- if !status.media_attachments.empty?
|
||||||
- if status.media_attachments.first.video?
|
- if status.media_attachments.first.video?
|
||||||
- video = status.media_attachments.first
|
- video = status.media_attachments.first
|
||||||
= react_component :video, src: video.file.url(:original), preview: video.thumbnail.present? ? video.thumbnail.url : video.file.url(:small), blurhash: video.blurhash, sensitive: status.sensitive?, width: 610, height: 343, inline: true, alt: video.description do
|
= react_component :video, src: full_asset_url(video.file.url(:original)), preview: full_asset_url(video.thumbnail.present? ? video.thumbnail.url : video.file.url(:small)), blurhash: video.blurhash, sensitive: status.sensitive?, width: 610, height: 343, inline: true, alt: video.description do
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- elsif status.media_attachments.first.audio?
|
- elsif status.media_attachments.first.audio?
|
||||||
- audio = status.media_attachments.first
|
- audio = status.media_attachments.first
|
||||||
= react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 610, height: 343, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
|
= react_component :audio, src: full_asset_url(audio.file.url(:original)), poster: full_asset_url(audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url), backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 610, height: 343, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
|
||||||
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
= render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
|
||||||
- else
|
- else
|
||||||
= react_component :media_gallery, height: 343, sensitive: status.sensitive?, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
= react_component :media_gallery, height: 343, sensitive: status.sensitive?, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
|
||||||
|
|
|
@ -5,6 +5,7 @@ require 'mime/types/columnar'
|
||||||
module Paperclip
|
module Paperclip
|
||||||
class ColorExtractor < Paperclip::Processor
|
class ColorExtractor < Paperclip::Processor
|
||||||
MIN_CONTRAST = 3.0
|
MIN_CONTRAST = 3.0
|
||||||
|
ACCENT_MIN_CONTRAST = 2.0
|
||||||
FREQUENCY_THRESHOLD = 0.01
|
FREQUENCY_THRESHOLD = 0.01
|
||||||
|
|
||||||
def make
|
def make
|
||||||
|
@ -26,8 +27,9 @@ module Paperclip
|
||||||
|
|
||||||
foreground_palette.each do |color|
|
foreground_palette.each do |color|
|
||||||
distance = ColorDiff.between(background_color, color)
|
distance = ColorDiff.between(background_color, color)
|
||||||
|
contrast = w3c_contrast(background_color, color)
|
||||||
|
|
||||||
if distance > max_distance
|
if distance > max_distance && contrast >= ACCENT_MIN_CONTRAST
|
||||||
max_distance = distance
|
max_distance = distance
|
||||||
max_distance_color = color
|
max_distance_color = color
|
||||||
end
|
end
|
||||||
|
@ -77,8 +79,8 @@ module Paperclip
|
||||||
private
|
private
|
||||||
|
|
||||||
def w3c_contrast(color1, color2)
|
def w3c_contrast(color1, color2)
|
||||||
luminance1 = (0.2126 * color1.r + 0.7152 * color1.g + 0.0722 * color1.b) + 0.05
|
luminance1 = color1.to_xyz.y * 0.01 + 0.05
|
||||||
luminance2 = (0.2126 * color2.r + 0.7152 * color2.g + 0.0722 * color2.b) + 0.05
|
luminance2 = color2.to_xyz.y * 0.01 + 0.05
|
||||||
|
|
||||||
if luminance1 > luminance2
|
if luminance1 > luminance2
|
||||||
luminance1 / luminance2
|
luminance1 / luminance2
|
||||||
|
|
Loading…
Reference in New Issue