about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--Gemfile1
-rw-r--r--Gemfile.lock2
-rw-r--r--app/javascript/mastodon/components/status.js4
-rw-r--r--app/javascript/mastodon/features/audio/index.js238
-rw-r--r--app/javascript/mastodon/features/status/components/detailed_status.js4
-rw-r--r--app/javascript/mastodon/features/ui/components/audio_modal.js4
-rw-r--r--app/javascript/mastodon/features/ui/components/focal_point_modal.js4
-rw-r--r--app/javascript/styles/mastodon/components.scss43
-rw-r--r--app/models/media_attachment.rb13
-rw-r--r--app/views/media/player.html.haml2
-rw-r--r--app/views/statuses/_detailed_status.html.haml2
-rw-r--r--app/views/statuses/_simple_status.html.haml2
-rw-r--r--app/workers/post_process_media_worker.rb2
-rw-r--r--config/application.rb1
-rw-r--r--lib/paperclip/color_extractor.rb189
-rw-r--r--lib/paperclip/transcoder_extensions.rb14
16 files changed, 283 insertions, 242 deletions
diff --git a/Gemfile b/Gemfile
index d9415d874..04eb41bdc 100644
--- a/Gemfile
+++ b/Gemfile
@@ -48,6 +48,7 @@ gem 'omniauth-cas', '~> 1.1'
 gem 'omniauth-saml', '~> 1.10'
 gem 'omniauth', '~> 1.9'
 
+gem 'color_diff', '~> 0.1'
 gem 'discard', '~> 1.2'
 gem 'doorkeeper', '~> 5.4'
 gem 'ed25519', '~> 1.2'
diff --git a/Gemfile.lock b/Gemfile.lock
index fcea81002..bc7106e30 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -165,6 +165,7 @@ GEM
     cocaine (0.5.8)
       climate_control (>= 0.0.3, < 1.0)
     coderay (1.1.3)
+    color_diff (0.1)
     concurrent-ruby (1.1.6)
     connection_pool (2.2.3)
     crack (0.4.3)
@@ -689,6 +690,7 @@ DEPENDENCIES
   chewy (~> 5.1)
   cld3 (~> 3.3.0)
   climate_control (~> 0.2)
+  color_diff (~> 0.1)
   concurrent-ruby
   connection_pool
   devise (~> 4.7)
diff --git a/app/javascript/mastodon/components/status.js b/app/javascript/mastodon/components/status.js
index 827b69500..f9f6736e6 100644
--- a/app/javascript/mastodon/components/status.js
+++ b/app/javascript/mastodon/components/status.js
@@ -353,7 +353,9 @@ class Status extends ImmutablePureComponent {
                 src={attachment.get('url')}
                 alt={attachment.get('description')}
                 poster={attachment.get('preview_url') || status.getIn(['account', 'avatar_static'])}
-                blurhash={attachment.get('blurhash')}
+                backgroundColor={attachment.getIn(['meta', 'colors', 'background'])}
+                foregroundColor={attachment.getIn(['meta', 'colors', 'foreground'])}
+                accentColor={attachment.getIn(['meta', 'colors', 'accent'])}
                 duration={attachment.getIn(['meta', 'original', 'duration'], 0)}
                 width={this.props.cachedMediaWidth}
                 height={110}
diff --git a/app/javascript/mastodon/features/audio/index.js b/app/javascript/mastodon/features/audio/index.js
index 99926e52a..686709ac3 100644
--- a/app/javascript/mastodon/features/audio/index.js
+++ b/app/javascript/mastodon/features/audio/index.js
@@ -5,131 +5,12 @@ import { formatTime } from 'mastodon/features/video';
 import Icon from 'mastodon/components/icon';
 import classNames from 'classnames';
 import { throttle } from 'lodash';
-import { encode, decode } from 'blurhash';
 import { getPointerPosition, fileNameFromURL } from 'mastodon/features/video';
 import { debounce } from 'lodash';
 
-const digitCharacters = [
-  '0',
-  '1',
-  '2',
-  '3',
-  '4',
-  '5',
-  '6',
-  '7',
-  '8',
-  '9',
-  'A',
-  'B',
-  'C',
-  'D',
-  'E',
-  'F',
-  'G',
-  'H',
-  'I',
-  'J',
-  'K',
-  'L',
-  'M',
-  'N',
-  'O',
-  'P',
-  'Q',
-  'R',
-  'S',
-  'T',
-  'U',
-  'V',
-  'W',
-  'X',
-  'Y',
-  'Z',
-  'a',
-  'b',
-  'c',
-  'd',
-  'e',
-  'f',
-  'g',
-  'h',
-  'i',
-  'j',
-  'k',
-  'l',
-  'm',
-  'n',
-  'o',
-  'p',
-  'q',
-  'r',
-  's',
-  't',
-  'u',
-  'v',
-  'w',
-  'x',
-  'y',
-  'z',
-  '#',
-  '$',
-  '%',
-  '*',
-  '+',
-  ',',
-  '-',
-  '.',
-  ':',
-  ';',
-  '=',
-  '?',
-  '@',
-  '[',
-  ']',
-  '^',
-  '_',
-  '{',
-  '|',
-  '}',
-  '~',
-];
-
-const decode83 = (str) => {
-  let value = 0;
-  let c, digit;
-
-  for (let i = 0; i < str.length; i++) {
-    c = str[i];
-    digit = digitCharacters.indexOf(c);
-    value = value * 83 + digit;
-  }
-
-  return value;
-};
-
-const decodeRGB = int => ({
-  r: Math.max(0, (int >> 16)),
-  g: Math.max(0, (int >> 8) & 255),
-  b: Math.max(0, (int & 255)),
-});
-
-const luma = ({ r, g, b }) => 0.2126 * r + 0.7152 * g + 0.0722 * b;
-
-const adjustColor = ({ r, g, b }, lumaThreshold = 100) => {
-  let delta;
-
-  if (luma({ r, g, b }) >= lumaThreshold) {
-    delta = -80;
-  } else {
-    delta = 80;
-  }
-
-  return {
-    r: r + delta,
-    g: g + delta,
-    b: b + delta,
-  };
+const hex2rgba = (hex, alpha = 1) => {
+  const [r, g, b] = hex.match(/\w\w/g).map(x => parseInt(x, 16));
+  return `rgba(${r}, ${g}, ${b}, ${alpha})`;
 };
 
 const messages = defineMessages({
@@ -157,7 +38,9 @@ class Audio extends React.PureComponent {
     fullscreen: PropTypes.bool,
     intl: PropTypes.object.isRequired,
     cacheWidth: PropTypes.func,
-    blurhash: PropTypes.string,
+    backgroundColor: PropTypes.string,
+    foregroundColor: PropTypes.string,
+    accentColor: PropTypes.string,
   };
 
   state = {
@@ -169,7 +52,6 @@ class Audio extends React.PureComponent {
     muted: false,
     volume: 0.5,
     dragging: false,
-    color: { r: 255, g: 255, b: 255 },
   };
 
   setPlayerRef = c => {
@@ -207,10 +89,6 @@ class Audio extends React.PureComponent {
     }
   }
 
-  setBlurhashCanvasRef = c => {
-    this.blurhashCanvas = c;
-  }
-
   setCanvasRef = c => {
     this.canvas = c;
 
@@ -222,41 +100,13 @@ class Audio extends React.PureComponent {
   componentDidMount () {
     window.addEventListener('scroll', this.handleScroll);
     window.addEventListener('resize', this.handleResize, { passive: true });
-
-    if (!this.props.blurhash) {
-      const img = new Image();
-      img.crossOrigin = 'anonymous';
-      img.onload = () => this.handlePosterLoad(img);
-      img.src = this.props.poster;
-    } else {
-      this._setColorScheme();
-      this._decodeBlurhash();
-    }
   }
 
   componentDidUpdate (prevProps, prevState) {
-    if (prevProps.poster !== this.props.poster && !this.props.blurhash) {
-      const img = new Image();
-      img.crossOrigin = 'anonymous';
-      img.onload = () => this.handlePosterLoad(img);
-      img.src = this.props.poster;
-    }
-
-    if (prevState.blurhash !== this.state.blurhash || prevProps.blurhash !== this.props.blurhash) {
-      this._setColorScheme();
-      this._decodeBlurhash();
+    if (prevProps.src !== this.props.src || this.state.width !== prevState.width || this.state.height !== prevState.height) {
+      this._clear();
+      this._draw();
     }
-
-    this._clear();
-    this._draw();
-  }
-
-  _decodeBlurhash () {
-    const context = this.blurhashCanvas.getContext('2d');
-    const pixels = decode(this.props.blurhash || this.state.blurhash, 32, 32);
-    const outputImageData = new ImageData(pixels, 32, 32);
-
-    context.putImageData(outputImageData, 0, 0);
   }
 
   componentWillUnmount () {
@@ -425,31 +275,6 @@ class Audio extends React.PureComponent {
     this.analyser = analyser;
   }
 
-  handlePosterLoad = image => {
-    const canvas  = document.createElement('canvas');
-    const context = canvas.getContext('2d');
-
-    canvas.width  = image.width;
-    canvas.height = image.height;
-
-    context.drawImage(image, 0, 0);
-
-    const inputImageData = context.getImageData(0, 0, image.width, image.height);
-    const blurhash = encode(inputImageData.data, image.width, image.height, 4, 4);
-
-    this.setState({ blurhash });
-  }
-
-  _setColorScheme () {
-    const blurhash     = this.props.blurhash || this.state.blurhash;
-    const averageColor = decodeRGB(decode83(blurhash.slice(2, 6)));
-
-    this.setState({
-      color: adjustColor(averageColor),
-      darkText: luma(averageColor) >= 165,
-    });
-  }
-
   handleDownload = () => {
     fetch(this.props.src).then(res => res.blob()).then(blob => {
       const element   = document.createElement('a');
@@ -609,8 +434,8 @@ class Audio extends React.PureComponent {
 
     const gradient = this.canvasContext.createLinearGradient(dx1, dy1, dx2, dy2);
 
-    const mainColor = `rgb(${this.state.color.r}, ${this.state.color.g}, ${this.state.color.b})`;
-    const lastColor = `rgba(${this.state.color.r}, ${this.state.color.g}, ${this.state.color.b}, 0)`;
+    const mainColor = this._getAccentColor();
+    const lastColor = hex2rgba(mainColor, 0);
 
     gradient.addColorStop(0, mainColor);
     gradient.addColorStop(0.6, mainColor);
@@ -632,17 +457,25 @@ class Audio extends React.PureComponent {
     return Math.floor(this._getRadius() + (PADDING * this._getScaleCoefficient()));
   }
 
-  _getColor () {
-    return `rgb(${this.state.color.r}, ${this.state.color.g}, ${this.state.color.b})`;
+  _getAccentColor () {
+    return this.props.accentColor || '#ffffff';
+  }
+
+  _getBackgroundColor () {
+    return this.props.backgroundColor || '#000000';
+  }
+
+  _getForegroundColor () {
+    return this.props.foregroundColor || '#ffffff';
   }
 
   render () {
     const { src, intl, alt, editable } = this.props;
-    const { paused, muted, volume, currentTime, duration, buffer, darkText, dragging } = this.state;
+    const { paused, muted, volume, currentTime, duration, buffer, dragging } = this.state;
     const progress = (currentTime / duration) * 100;
 
     return (
-      <div className={classNames('audio-player', { editable, 'with-light-background': darkText })} ref={this.setPlayerRef} style={{ width: '100%', height: this.props.fullscreen ? '100%' : (this.state.height || this.props.height) }} onMouseEnter={this.handleMouseEnter} onMouseLeave={this.handleMouseLeave}>
+      <div className={classNames('audio-player', { editable })} ref={this.setPlayerRef} style={{ backgroundColor: this._getBackgroundColor(), color: this._getForegroundColor(), width: '100%', height: this.props.fullscreen ? '100%' : (this.state.height || this.props.height) }} onMouseEnter={this.handleMouseEnter} onMouseLeave={this.handleMouseLeave}>
         <audio
           src={src}
           ref={this.setAudioRef}
@@ -654,24 +487,15 @@ class Audio extends React.PureComponent {
         />
 
         <canvas
-          className='audio-player__background'
-          onClick={this.togglePlay}
-          width='32'
-          height='32'
-          style={{ width: this.state.width, height: this.state.height, position: 'absolute', top: 0, left: 0 }}
-          ref={this.setBlurhashCanvasRef}
-          aria-label={alt}
-          title={alt}
           role='button'
-          tabIndex='0'
-        />
-
-        <canvas
           className='audio-player__canvas'
           width={this.state.width}
           height={this.state.height}
-          style={{ width: '100%', position: 'absolute', top: 0, left: 0, pointerEvents: 'none' }}
+          style={{ width: '100%', position: 'absolute', top: 0, left: 0 }}
           ref={this.setCanvasRef}
+          onClick={this.togglePlay}
+          title={alt}
+          aria-label={alt}
         />
 
         <img
@@ -684,12 +508,12 @@ class Audio extends React.PureComponent {
 
         <div className='video-player__seek' onMouseDown={this.handleMouseDown} ref={this.setSeekRef}>
           <div className='video-player__seek__buffer' style={{ width: `${buffer}%` }} />
-          <div className='video-player__seek__progress' style={{ width: `${progress}%`, backgroundColor: this._getColor() }} />
+          <div className='video-player__seek__progress' style={{ width: `${progress}%`, backgroundColor: this._getAccentColor() }} />
 
           <span
             className={classNames('video-player__seek__handle', { active: dragging })}
             tabIndex='0'
-            style={{ left: `${progress}%`, backgroundColor: this._getColor() }}
+            style={{ left: `${progress}%`, backgroundColor: this._getAccentColor() }}
           />
         </div>
 
@@ -700,12 +524,12 @@ class Audio extends React.PureComponent {
               <button type='button' title={intl.formatMessage(muted ? messages.unmute : messages.mute)} aria-label={intl.formatMessage(muted ? messages.unmute : messages.mute)} onClick={this.toggleMute}><Icon id={muted ? 'volume-off' : 'volume-up'} fixedWidth /></button>
 
               <div className={classNames('video-player__volume', { active: this.state.hovered })} ref={this.setVolumeRef} onMouseDown={this.handleVolumeMouseDown}>
-                <div className='video-player__volume__current' style={{ width: `${volume * 100}%`, backgroundColor: this._getColor() }} />
+                <div className='video-player__volume__current' style={{ width: `${volume * 100}%`, backgroundColor: this._getAccentColor() }} />
 
                 <span
                   className={classNames('video-player__volume__handle')}
                   tabIndex='0'
-                  style={{ left: `${volume * 100}%`, backgroundColor: this._getColor() }}
+                  style={{ left: `${volume * 100}%`, backgroundColor: this._getAccentColor() }}
                 />
               </div>
 
diff --git a/app/javascript/mastodon/features/status/components/detailed_status.js b/app/javascript/mastodon/features/status/components/detailed_status.js
index f7d0c9bd4..b1ae0b2cc 100644
--- a/app/javascript/mastodon/features/status/components/detailed_status.js
+++ b/app/javascript/mastodon/features/status/components/detailed_status.js
@@ -126,7 +126,9 @@ class DetailedStatus extends ImmutablePureComponent {
             alt={attachment.get('description')}
             duration={attachment.getIn(['meta', 'original', 'duration'], 0)}
             poster={attachment.get('preview_url') || status.getIn(['account', 'avatar_static'])}
-            blurhash={attachment.get('blurhash')}
+            backgroundColor={attachment.getIn(['meta', 'colors', 'background'])}
+            foregroundColor={attachment.getIn(['meta', 'colors', 'foreground'])}
+            accentColor={attachment.getIn(['meta', 'colors', 'accent'])}
             height={150}
           />
         );
diff --git a/app/javascript/mastodon/features/ui/components/audio_modal.js b/app/javascript/mastodon/features/ui/components/audio_modal.js
index 1d23925ca..dc033434e 100644
--- a/app/javascript/mastodon/features/ui/components/audio_modal.js
+++ b/app/javascript/mastodon/features/ui/components/audio_modal.js
@@ -61,7 +61,9 @@ export default class AudioModal extends ImmutablePureComponent {
             duration={media.getIn(['meta', 'original', 'duration'], 0)}
             height={150}
             poster={media.get('preview_url') || status.getIn(['account', 'avatar_static'])}
-            blurhash={media.get('blurhash')}
+            backgroundColor={media.getIn(['meta', 'colors', 'background'])}
+            foregroundColor={media.getIn(['meta', 'colors', 'foreground'])}
+            accentColor={media.getIn(['meta', 'colors', 'accent'])}
           />
         </div>
 
diff --git a/app/javascript/mastodon/features/ui/components/focal_point_modal.js b/app/javascript/mastodon/features/ui/components/focal_point_modal.js
index 06d298205..8112e3b9e 100644
--- a/app/javascript/mastodon/features/ui/components/focal_point_modal.js
+++ b/app/javascript/mastodon/features/ui/components/focal_point_modal.js
@@ -329,7 +329,9 @@ class FocalPointModal extends ImmutablePureComponent {
                 duration={media.getIn(['meta', 'original', 'duration'], 0)}
                 height={150}
                 poster={media.get('preview_url') || account.get('avatar_static')}
-                blurhash={media.get('blurhash')}
+                backgroundColor={media.getIn(['meta', 'colors', 'background'])}
+                foregroundColor={media.getIn(['meta', 'colors', 'foreground'])}
+                accentColor={media.getIn(['meta', 'colors', 'accent'])}
                 editable
               />
             )}
diff --git a/app/javascript/styles/mastodon/components.scss b/app/javascript/styles/mastodon/components.scss
index 58bc0ff8b..b32247297 100644
--- a/app/javascript/styles/mastodon/components.scss
+++ b/app/javascript/styles/mastodon/components.scss
@@ -5314,36 +5314,31 @@ a.status-card.compact:hover {
 
   .video-player__volume::before,
   .video-player__seek::before {
-    background: rgba($white, 0.15);
+    background: currentColor;
+    opacity: 0.15;
   }
 
-  &.with-light-background {
-    color: $black;
-
-    .video-player__volume::before,
-    .video-player__seek::before {
-      background: rgba($black, 0.15);
-    }
-
-    .video-player__seek__buffer {
-      background: rgba($black, 0.2);
-    }
+  .video-player__seek__buffer {
+    background: currentColor;
+    opacity: 0.2;
+  }
 
-    .video-player__buttons button {
-      color: rgba($black, 0.75);
+  .video-player__buttons button {
+    color: currentColor;
+    opacity: 0.75;
 
-      &:active,
-      &:hover,
-      &:focus {
-        color: $black;
-      }
+    &:active,
+    &:hover,
+    &:focus {
+      color: currentColor;
+      opacity: 1;
     }
+  }
 
-    .video-player__time-sep,
-    .video-player__time-total,
-    .video-player__time-current {
-      color: $black;
-    }
+  .video-player__time-sep,
+  .video-player__time-total,
+  .video-player__time-current {
+    color: currentColor;
   }
 
   .video-player__seek::before,
diff --git a/app/models/media_attachment.rb b/app/models/media_attachment.rb
index f67566a18..519711401 100644
--- a/app/models/media_attachment.rb
+++ b/app/models/media_attachment.rb
@@ -40,6 +40,13 @@ class MediaAttachment < ApplicationRecord
   VIDEO_FILE_EXTENSIONS = %w(.webm .mp4 .m4v .mov).freeze
   AUDIO_FILE_EXTENSIONS = %w(.ogg .oga .mp3 .wav .flac .opus .aac .m4a .3gp .wma).freeze
 
+  META_KEYS = %i(
+    focus
+    colors
+    original
+    small
+  ).freeze
+
   IMAGE_MIME_TYPES             = %w(image/jpeg image/png image/gif).freeze
   VIDEO_MIME_TYPES             = %w(video/webm video/mp4 video/quicktime video/ogg).freeze
   VIDEO_CONVERTIBLE_MIME_TYPES = %w(video/webm video/quicktime).freeze
@@ -165,7 +172,7 @@ class MediaAttachment < ApplicationRecord
 
   has_attached_file :thumbnail,
                     styles: THUMBNAIL_STYLES,
-                    processors: [:lazy_thumbnail, :blurhash_transcoder],
+                    processors: [:lazy_thumbnail, :blurhash_transcoder, :color_extractor],
                     convert_options: GLOBAL_CONVERT_OPTIONS
 
   validates_attachment_content_type :thumbnail, content_type: IMAGE_MIME_TYPES
@@ -216,7 +223,7 @@ class MediaAttachment < ApplicationRecord
 
     x, y = (point.is_a?(Enumerable) ? point : point.split(',')).map(&:to_f)
 
-    meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(:focus, :original, :small)
+    meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(*META_KEYS)
     meta['focus'] = { 'x' => x, 'y' => y }
 
     file.instance_write(:meta, meta)
@@ -338,7 +345,7 @@ class MediaAttachment < ApplicationRecord
   end
 
   def populate_meta
-    meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(:focus, :original, :small)
+    meta = (file.instance_read(:meta) || {}).with_indifferent_access.slice(*META_KEYS)
 
     file.queued_for_write.each do |style, file|
       meta[style] = style == :small || image? ? image_geometry(file) : video_metadata(file)
diff --git a/app/views/media/player.html.haml b/app/views/media/player.html.haml
index 3d308ee69..1d0374897 100644
--- a/app/views/media/player.html.haml
+++ b/app/views/media/player.html.haml
@@ -11,6 +11,6 @@
     %video{ autoplay: 'autoplay', muted: 'muted', loop: 'loop' }
       %source{ src: @media_attachment.file.url(:original) }
 - elsif @media_attachment.audio?
-  = react_component :audio, src: @media_attachment.file.url(:original), poster: full_asset_url(@media_attachment.account.avatar_static_url), width: 670, height: 380, fullscreen: true, alt: @media_attachment.description, duration: @media_attachment.file.meta.dig(:original, :duration) do
+  = react_component :audio, src: @media_attachment.file.url(:original), poster: @media_attachment.thumbnail.present? ? @media_attachment.thumbnail.url : @media_attachment.account.avatar_static_url, backgroundColor: @media_attachment.file.meta.dig('colors', 'background'), foregroundColor: @media_attachment.file.meta.dig('colors', 'foreground'), accentColor: @media_attachment.file.meta.dig('colors', 'accent'), width: 670, height: 380, fullscreen: true, alt: @media_attachment.description, duration: @media_attachment.file.meta.dig(:original, :duration) do
     %audio{ controls: 'controls' }
       %source{ src: @media_attachment.file.url(:original) }
diff --git a/app/views/statuses/_detailed_status.html.haml b/app/views/statuses/_detailed_status.html.haml
index d10017db9..dce122607 100644
--- a/app/views/statuses/_detailed_status.html.haml
+++ b/app/views/statuses/_detailed_status.html.haml
@@ -33,7 +33,7 @@
         = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
     - elsif status.media_attachments.first.audio?
       - audio = status.media_attachments.first
-      = react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, blurhash: audio.blurhash, width: 670, height: 380, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
+      = react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 670, height: 380, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
         = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
     - else
       = react_component :media_gallery, height: 380, sensitive: status.sensitive?, standalone: true, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
diff --git a/app/views/statuses/_simple_status.html.haml b/app/views/statuses/_simple_status.html.haml
index ab09dfe45..b29e92ddc 100644
--- a/app/views/statuses/_simple_status.html.haml
+++ b/app/views/statuses/_simple_status.html.haml
@@ -39,7 +39,7 @@
         = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
     - elsif status.media_attachments.first.audio?
       - audio = status.media_attachments.first
-      = react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, blurhash: audio.blurhash, width: 610, height: 343, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
+      = react_component :audio, src: audio.file.url(:original), poster: audio.thumbnail.present? ? audio.thumbnail.url : status.account.avatar_static_url, backgroundColor: audio.file.meta.dig('colors', 'background'), foregroundColor: audio.file.meta.dig('colors', 'foreground'), accentColor: audio.file.meta.dig('colors', 'accent'), width: 610, height: 343, alt: audio.description, duration: audio.file.meta.dig('original', 'duration') do
         = render partial: 'statuses/attachment_list', locals: { attachments: status.media_attachments }
     - else
       = react_component :media_gallery, height: 343, sensitive: status.sensitive?, autoplay: autoplay, media: status.media_attachments.map { |a| ActiveModelSerializers::SerializableResource.new(a, serializer: REST::MediaAttachmentSerializer).as_json } do
diff --git a/app/workers/post_process_media_worker.rb b/app/workers/post_process_media_worker.rb
index a904f35b1..24201101c 100644
--- a/app/workers/post_process_media_worker.rb
+++ b/app/workers/post_process_media_worker.rb
@@ -32,7 +32,7 @@ class PostProcessMediaWorker
 
     media_attachment.file.reprocess!(:original)
     media_attachment.processing = :complete
-    media_attachment.file_meta = previous_meta.merge(media_attachment.file_meta).with_indifferent_access.slice(:focus, :original, :small)
+    media_attachment.file_meta = previous_meta.merge(media_attachment.file_meta).with_indifferent_access.slice(*MediaAttachment::META_KEYS)
     media_attachment.save
   rescue ActiveRecord::RecordNotFound
     true
diff --git a/config/application.rb b/config/application.rb
index a3c37b042..ad6cf82d7 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -11,6 +11,7 @@ require_relative '../lib/redis/namespace_extensions'
 require_relative '../lib/paperclip/url_generator_extensions'
 require_relative '../lib/paperclip/attachment_extensions'
 require_relative '../lib/paperclip/media_type_spoof_detector_extensions'
+require_relative '../lib/paperclip/transcoder_extensions'
 require_relative '../lib/paperclip/lazy_thumbnail'
 require_relative '../lib/paperclip/gif_transcoder'
 require_relative '../lib/paperclip/video_transcoder'
diff --git a/lib/paperclip/color_extractor.rb b/lib/paperclip/color_extractor.rb
new file mode 100644
index 000000000..44fe5ff1d
--- /dev/null
+++ b/lib/paperclip/color_extractor.rb
@@ -0,0 +1,189 @@
+# frozen_string_literal: true
+
+require 'mime/types/columnar'
+
+module Paperclip
+  class ColorExtractor < Paperclip::Processor
+    MIN_CONTRAST        = 3.0
+    FREQUENCY_THRESHOLD = 0.01
+
+    def make
+      depth = 8
+
+      # Determine background palette by getting colors close to the image's edge only
+      background_palette = palette_from_histogram(convert(':source -alpha set -gravity Center -region 75%x75% -fill None -colorize 100% -alpha transparent +region -format %c -colors :quantity -depth :depth histogram:info:', source: File.expand_path(@file.path), quantity: 10, depth: depth), 10)
+
+      # Determine foreground palette from the whole image
+      foreground_palette = palette_from_histogram(convert(':source -format %c -colors :quantity -depth :depth histogram:info:', source: File.expand_path(@file.path), quantity: 10, depth: depth), 10)
+
+      background_color   = background_palette.first || foreground_palette.first
+      foreground_colors  = []
+
+      return @file if background_color.nil?
+
+      max_distance       = 0
+      max_distance_color = nil
+
+      foreground_palette.each do |color|
+        distance = ColorDiff.between(background_color, color)
+
+        if distance > max_distance
+          max_distance = distance
+          max_distance_color = color
+        end
+      end
+
+      foreground_colors << max_distance_color unless max_distance_color.nil?
+
+      max_distance       = 0
+      max_distance_color = nil
+
+      foreground_palette.each do |color|
+        distance = ColorDiff.between(background_color, color)
+        contrast = w3c_contrast(background_color, color)
+
+        if distance > max_distance && contrast >= MIN_CONTRAST && !foreground_colors.include?(color)
+          max_distance = distance
+          max_distance_color = color
+        end
+      end
+
+      foreground_colors << max_distance_color unless max_distance_color.nil?
+
+      # If we don't have enough colors for accent and foreground, generate
+      # new ones by manipulating the background color
+      (2 - foreground_colors.size).times do |i|
+        foreground_colors << lighten_or_darken(background_color, 35 + (15 * i))
+      end
+
+      # We want the color with the highest contrast to background to be the foreground one,
+      # and the one with the highest saturation to be the accent one
+      foreground_color = foreground_colors.max_by { |rgb| w3c_contrast(background_color, rgb) }
+      accent_color     = foreground_colors.max_by { |rgb| rgb_to_hsl(rgb.r, rgb.g, rgb.b)[1] }
+
+      meta = {
+        colors: {
+          background: rgb_to_hex(background_color),
+          foreground: rgb_to_hex(foreground_color),
+          accent: rgb_to_hex(accent_color),
+        },
+      }
+
+      attachment.instance.file.instance_write(:meta, (attachment.instance.file.instance_read(:meta) || {}).merge(meta))
+
+      @file
+    end
+
+    private
+
+    def w3c_contrast(color1, color2)
+      luminance1 = (0.2126 * color1.r + 0.7152 * color1.g + 0.0722 * color1.b) + 0.05
+      luminance2 = (0.2126 * color2.r + 0.7152 * color2.g + 0.0722 * color2.b) + 0.05
+
+      if luminance1 > luminance2
+        luminance1 / luminance2
+      else
+        luminance2 / luminance1
+      end
+    end
+
+    # rubocop:disable Style/MethodParameterName
+    def rgb_to_hsl(r, g, b)
+      r /= 255.0
+      g /= 255.0
+      b /= 255.0
+      max = [r, g, b].max
+      min = [r, g, b].min
+      h = (max + min) / 2.0
+      s = (max + min) / 2.0
+      l = (max + min) / 2.0
+
+      if max == min
+        h = 0
+        s = 0 # achromatic
+      else
+        d = max - min
+        s = l >= 0.5 ? d / (2.0 - max - min) : d / (max + min)
+
+        case max
+        when r
+          h = (g - b) / d + (g < b ? 6.0 : 0)
+        when g
+          h = (b - r) / d + 2.0
+        when b
+          h = (r - g) / d + 4.0
+        end
+
+        h /= 6.0
+      end
+
+      [(h * 360).round, (s * 100).round, (l * 100).round]
+    end
+
+    def hue_to_rgb(p, q, t)
+      t += 1 if t.negative?
+      t -= 1 if t > 1
+
+      return (p + (q - p) * 6 * t) if t < 1 / 6.0
+      return q if t < 1 / 2.0
+      return (p + (q - p) * (2 / 3.0 - t) * 6) if t < 2 / 3.0
+
+      p
+    end
+
+    def hsl_to_rgb(h, s, l)
+      h /= 360.0
+      s /= 100.0
+      l /= 100.0
+
+      r = 0.0
+      g = 0.0
+      b = 0.0
+
+      if s == 0.0
+        r = l.to_f
+        g = l.to_f
+        b = l.to_f # achromatic
+      else
+        q = l < 0.5 ? l * (1 + s) : l + s - l * s
+        p = 2 * l - q
+        r = hue_to_rgb(p, q, h + 1 / 3.0)
+        g = hue_to_rgb(p, q, h)
+        b = hue_to_rgb(p, q, h - 1 / 3.0)
+      end
+
+      [(r * 255).round, (g * 255).round, (b * 255).round]
+    end
+    # rubocop:enable Style/MethodParameterName
+
+    def lighten_or_darken(color, by)
+      hue, saturation, light = rgb_to_hsl(color.r, color.g, color.b)
+
+      light = begin
+        if light < 50
+          [100, light + by].min
+        else
+          [0, light - by].max
+        end
+      end
+
+      ColorDiff::Color::RGB.new(*hsl_to_rgb(hue, saturation, light))
+    end
+
+    def palette_from_histogram(result, quantity)
+      frequencies       = result.scan(/([0-9]+)\:/).flatten.map(&:to_f)
+      hex_values        = result.scan(/\#([0-9A-Fa-f]{6,8})/).flatten
+      total_frequencies = frequencies.reduce(&:+).to_f
+
+      frequencies.map.with_index { |f, i| [f / total_frequencies, hex_values[i]] }
+                 .sort_by { |r| -r[0] }
+                 .reject { |r| r[1].size == 8 && r[1].end_with?('00') }
+                 .map { |r| ColorDiff::Color::RGB.new(*r[1][0..5].scan(/../).map { |c| c.to_i(16) }) }
+                 .slice(0, quantity)
+    end
+
+    def rgb_to_hex(rgb)
+      '#%02x%02x%02x' % [rgb.r, rgb.g, rgb.b]
+    end
+  end
+end
diff --git a/lib/paperclip/transcoder_extensions.rb b/lib/paperclip/transcoder_extensions.rb
new file mode 100644
index 000000000..c0b2447f3
--- /dev/null
+++ b/lib/paperclip/transcoder_extensions.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Paperclip
+  module TranscoderExtensions
+    # Prevent the transcoder from modifying our meta hash
+    def initialize(file, options = {}, attachment = nil)
+      meta_value = attachment&.instance_read(:meta)
+      super
+      attachment&.instance_write(:meta, meta_value)
+    end
+  end
+end
+
+Paperclip::Transcoder.prepend(Paperclip::TranscoderExtensions)