about summary refs log tree commit diff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/chewy/strategy/custom_sidekiq.rb30
-rw-r--r--lib/cli.rb4
-rw-r--r--lib/devise/ldap_authenticatable.rb55
-rw-r--r--lib/devise/two_factor_ldap_authenticatable.rb32
-rw-r--r--lib/devise/two_factor_pam_authenticatable.rb31
-rw-r--r--lib/json_ld/identity.rb87
-rw-r--r--lib/json_ld/security.rb5
-rw-r--r--lib/mastodon/accounts_cli.rb189
-rw-r--r--lib/mastodon/cache_cli.rb41
-rw-r--r--lib/mastodon/cli_helper.rb63
-rw-r--r--lib/mastodon/domains_cli.rb71
-rw-r--r--lib/mastodon/emoji_cli.rb6
-rw-r--r--lib/mastodon/feeds_cli.rb43
-rw-r--r--lib/mastodon/media_cli.rb137
-rw-r--r--lib/mastodon/preview_cards_cli.rb59
-rw-r--r--lib/mastodon/search_cli.rb23
-rw-r--r--lib/mastodon/statuses_cli.rb1
-rw-r--r--lib/mastodon/version.rb18
-rw-r--r--lib/paperclip/audio_transcoder.rb23
-rw-r--r--lib/paperclip/gif_transcoder.rb101
-rw-r--r--lib/paperclip/lazy_thumbnail.rb4
-rw-r--r--lib/paperclip/type_corrector.rb19
-rw-r--r--lib/paperclip/video_transcoder.rb2
-rw-r--r--lib/tasks/mastodon.rake30
-rw-r--r--lib/tasks/repo.rake15
25 files changed, 787 insertions, 302 deletions
diff --git a/lib/chewy/strategy/custom_sidekiq.rb b/lib/chewy/strategy/custom_sidekiq.rb
new file mode 100644
index 000000000..3e54326ba
--- /dev/null
+++ b/lib/chewy/strategy/custom_sidekiq.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Chewy
+  class Strategy
+    class CustomSidekiq < Base
+      class Worker
+        include ::Sidekiq::Worker
+
+        sidekiq_options queue: 'pull'
+
+        def perform(type, ids, options = {})
+          options[:refresh] = !Chewy.disable_refresh_async if Chewy.disable_refresh_async
+          type.constantize.import!(ids, options)
+        end
+      end
+
+      def update(type, objects, _options = {})
+        return unless Chewy.enabled?
+
+        ids = type.root.id ? Array.wrap(objects) : type.adapter.identify(objects)
+
+        return if ids.empty?
+
+        Worker.perform_async(type.name, ids)
+      end
+
+      def leave; end
+    end
+  end
+end
diff --git a/lib/cli.rb b/lib/cli.rb
index be276583d..fbdf49fc3 100644
--- a/lib/cli.rb
+++ b/lib/cli.rb
@@ -9,6 +9,7 @@ require_relative 'mastodon/search_cli'
 require_relative 'mastodon/settings_cli'
 require_relative 'mastodon/statuses_cli'
 require_relative 'mastodon/domains_cli'
+require_relative 'mastodon/preview_cards_cli'
 require_relative 'mastodon/cache_cli'
 require_relative 'mastodon/version'
 
@@ -42,6 +43,9 @@ module Mastodon
     desc 'domains SUBCOMMAND ...ARGS', 'Manage account domains'
     subcommand 'domains', Mastodon::DomainsCLI
 
+    desc 'preview_cards SUBCOMMAND ...ARGS', 'Manage preview cards'
+    subcommand 'preview_cards', Mastodon::PreviewCardsCLI
+
     desc 'cache SUBCOMMAND ...ARGS', 'Manage cache'
     subcommand 'cache', Mastodon::CacheCLI
 
diff --git a/lib/devise/ldap_authenticatable.rb b/lib/devise/ldap_authenticatable.rb
deleted file mode 100644
index 6903d468d..000000000
--- a/lib/devise/ldap_authenticatable.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'net/ldap'
-require 'devise/strategies/authenticatable'
-
-module Devise
-  module Strategies
-    class LdapAuthenticatable < Authenticatable
-      def authenticate!
-        if params[:user]
-          ldap = Net::LDAP.new(
-            host: Devise.ldap_host,
-            port: Devise.ldap_port,
-            base: Devise.ldap_base,
-            encryption: {
-              method: Devise.ldap_method,
-              tls_options: tls_options,
-            },
-            auth: {
-              method: :simple,
-              username: Devise.ldap_bind_dn,
-              password: Devise.ldap_password,
-            },
-            connect_timeout: 10
-          )
-
-          filter = format(Devise.ldap_search_filter, uid: Devise.ldap_uid, email: email)
-
-          if (user_info = ldap.bind_as(base: Devise.ldap_base, filter: filter, password: password))
-            user = User.ldap_get_user(user_info.first)
-            success!(user)
-          else
-            return fail(:invalid)
-          end
-        end
-      end
-
-      def email
-        params[:user][:email]
-      end
-
-      def password
-        params[:user][:password]
-      end
-
-      def tls_options
-        OpenSSL::SSL::SSLContext::DEFAULT_PARAMS.tap do |options|
-          options[:verify_mode] = OpenSSL::SSL::VERIFY_NONE if Devise.ldap_tls_no_verify
-        end
-      end
-    end
-  end
-end
-
-Warden::Strategies.add(:ldap_authenticatable, Devise::Strategies::LdapAuthenticatable)
diff --git a/lib/devise/two_factor_ldap_authenticatable.rb b/lib/devise/two_factor_ldap_authenticatable.rb
new file mode 100644
index 000000000..065aa2de8
--- /dev/null
+++ b/lib/devise/two_factor_ldap_authenticatable.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'net/ldap'
+require 'devise/strategies/base'
+
+module Devise
+  module Strategies
+    class TwoFactorLdapAuthenticatable < Base
+      def valid?
+        valid_params? && mapping.to.respond_to?(:authenticate_with_ldap)
+      end
+
+      def authenticate!
+        resource = mapping.to.authenticate_with_ldap(params[scope])
+
+        if resource && !resource.otp_required_for_login?
+          success!(resource)
+        else
+          fail(:invalid)
+        end
+      end
+
+      protected
+
+      def valid_params?
+        params[scope] && params[scope][:password].present?
+      end
+    end
+  end
+end
+
+Warden::Strategies.add(:two_factor_ldap_authenticatable, Devise::Strategies::TwoFactorLdapAuthenticatable)
diff --git a/lib/devise/two_factor_pam_authenticatable.rb b/lib/devise/two_factor_pam_authenticatable.rb
new file mode 100644
index 000000000..5ce723b33
--- /dev/null
+++ b/lib/devise/two_factor_pam_authenticatable.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'devise/strategies/base'
+
+module Devise
+  module Strategies
+    class TwoFactorPamAuthenticatable < Base
+      def valid?
+        valid_params? && mapping.to.respond_to?(:authenticate_with_pam)
+      end
+
+      def authenticate!
+        resource = mapping.to.authenticate_with_pam(params[scope])
+
+        if resource && !resource.otp_required_for_login?
+          success!(resource)
+        else
+          fail(:invalid)
+        end
+      end
+
+      protected
+
+      def valid_params?
+        params[scope] && params[scope][:password].present?
+      end
+    end
+  end
+end
+
+Warden::Strategies.add(:two_factor_pam_authenticatable, Devise::Strategies::TwoFactorPamAuthenticatable)
diff --git a/lib/json_ld/identity.rb b/lib/json_ld/identity.rb
new file mode 100644
index 000000000..4fb3f8e9d
--- /dev/null
+++ b/lib/json_ld/identity.rb
@@ -0,0 +1,87 @@
+# -*- encoding: utf-8 -*-
+# frozen_string_literal: true
+# This file generated automatically from http://w3id.org/identity/v1
+require 'json/ld'
+class JSON::LD::Context
+  add_preloaded("http://w3id.org/identity/v1") do
+    new(term_definitions: {
+      "Credential" => TermDefinition.new("Credential", id: "https://w3id.org/credentials#Credential", simple: true),
+      "CryptographicKey" => TermDefinition.new("CryptographicKey", id: "https://w3id.org/security#Key", simple: true),
+      "CryptographicKeyCredential" => TermDefinition.new("CryptographicKeyCredential", id: "https://w3id.org/credentials#CryptographicKeyCredential", simple: true),
+      "EncryptedMessage" => TermDefinition.new("EncryptedMessage", id: "https://w3id.org/security#EncryptedMessage", simple: true),
+      "GraphSignature2012" => TermDefinition.new("GraphSignature2012", id: "https://w3id.org/security#GraphSignature2012", simple: true),
+      "Group" => TermDefinition.new("Group", id: "https://www.w3.org/ns/activitystreams#Group", simple: true),
+      "Identity" => TermDefinition.new("Identity", id: "https://w3id.org/identity#Identity", simple: true),
+      "LinkedDataSignature2015" => TermDefinition.new("LinkedDataSignature2015", id: "https://w3id.org/security#LinkedDataSignature2015", simple: true),
+      "Organization" => TermDefinition.new("Organization", id: "http://schema.org/Organization", simple: true),
+      "Person" => TermDefinition.new("Person", id: "http://schema.org/Person", simple: true),
+      "PostalAddress" => TermDefinition.new("PostalAddress", id: "http://schema.org/PostalAddress", simple: true),
+      "about" => TermDefinition.new("about", id: "http://schema.org/about", type_mapping: "@id"),
+      "accessControl" => TermDefinition.new("accessControl", id: "https://w3id.org/permissions#accessControl", type_mapping: "@id"),
+      "address" => TermDefinition.new("address", id: "http://schema.org/address", type_mapping: "@id"),
+      "addressCountry" => TermDefinition.new("addressCountry", id: "http://schema.org/addressCountry", simple: true),
+      "addressLocality" => TermDefinition.new("addressLocality", id: "http://schema.org/addressLocality", simple: true),
+      "addressRegion" => TermDefinition.new("addressRegion", id: "http://schema.org/addressRegion", simple: true),
+      "cipherAlgorithm" => TermDefinition.new("cipherAlgorithm", id: "https://w3id.org/security#cipherAlgorithm", simple: true),
+      "cipherData" => TermDefinition.new("cipherData", id: "https://w3id.org/security#cipherData", simple: true),
+      "cipherKey" => TermDefinition.new("cipherKey", id: "https://w3id.org/security#cipherKey", simple: true),
+      "claim" => TermDefinition.new("claim", id: "https://w3id.org/credentials#claim", type_mapping: "@id"),
+      "comment" => TermDefinition.new("comment", id: "http://www.w3.org/2000/01/rdf-schema#comment", simple: true),
+      "created" => TermDefinition.new("created", id: "http://purl.org/dc/terms/created", type_mapping: "http://www.w3.org/2001/XMLSchema#dateTime"),
+      "creator" => TermDefinition.new("creator", id: "http://purl.org/dc/terms/creator", type_mapping: "@id"),
+      "cred" => TermDefinition.new("cred", id: "https://w3id.org/credentials#", simple: true, prefix: true),
+      "credential" => TermDefinition.new("credential", id: "https://w3id.org/credentials#credential", type_mapping: "@id"),
+      "dc" => TermDefinition.new("dc", id: "http://purl.org/dc/terms/", simple: true, prefix: true),
+      "description" => TermDefinition.new("description", id: "http://schema.org/description", simple: true),
+      "digestAlgorithm" => TermDefinition.new("digestAlgorithm", id: "https://w3id.org/security#digestAlgorithm", simple: true),
+      "digestValue" => TermDefinition.new("digestValue", id: "https://w3id.org/security#digestValue", simple: true),
+      "domain" => TermDefinition.new("domain", id: "https://w3id.org/security#domain", simple: true),
+      "email" => TermDefinition.new("email", id: "http://schema.org/email", simple: true),
+      "expires" => TermDefinition.new("expires", id: "https://w3id.org/security#expiration", type_mapping: "http://www.w3.org/2001/XMLSchema#dateTime"),
+      "familyName" => TermDefinition.new("familyName", id: "http://schema.org/familyName", simple: true),
+      "givenName" => TermDefinition.new("givenName", id: "http://schema.org/givenName", simple: true),
+      "id" => TermDefinition.new("id", id: "@id", simple: true),
+      "identity" => TermDefinition.new("identity", id: "https://w3id.org/identity#", simple: true, prefix: true),
+      "identityService" => TermDefinition.new("identityService", id: "https://w3id.org/identity#identityService", type_mapping: "@id"),
+      "idp" => TermDefinition.new("idp", id: "https://w3id.org/identity#idp", type_mapping: "@id"),
+      "image" => TermDefinition.new("image", id: "http://schema.org/image", type_mapping: "@id"),
+      "initializationVector" => TermDefinition.new("initializationVector", id: "https://w3id.org/security#initializationVector", simple: true),
+      "issued" => TermDefinition.new("issued", id: "https://w3id.org/credentials#issued", type_mapping: "http://www.w3.org/2001/XMLSchema#dateTime"),
+      "issuer" => TermDefinition.new("issuer", id: "https://w3id.org/credentials#issuer", type_mapping: "@id"),
+      "label" => TermDefinition.new("label", id: "http://www.w3.org/2000/01/rdf-schema#label", simple: true),
+      "member" => TermDefinition.new("member", id: "http://schema.org/member", type_mapping: "@id"),
+      "memberOf" => TermDefinition.new("memberOf", id: "http://schema.org/memberOf", type_mapping: "@id"),
+      "name" => TermDefinition.new("name", id: "http://schema.org/name", simple: true),
+      "nonce" => TermDefinition.new("nonce", id: "https://w3id.org/security#nonce", simple: true),
+      "normalizationAlgorithm" => TermDefinition.new("normalizationAlgorithm", id: "https://w3id.org/security#normalizationAlgorithm", simple: true),
+      "owner" => TermDefinition.new("owner", id: "https://w3id.org/security#owner", type_mapping: "@id"),
+      "password" => TermDefinition.new("password", id: "https://w3id.org/security#password", simple: true),
+      "paymentProcessor" => TermDefinition.new("paymentProcessor", id: "https://w3id.org/payswarm#processor", simple: true),
+      "perm" => TermDefinition.new("perm", id: "https://w3id.org/permissions#", simple: true, prefix: true),
+      "postalCode" => TermDefinition.new("postalCode", id: "http://schema.org/postalCode", simple: true),
+      "preferences" => TermDefinition.new("preferences", id: "https://w3id.org/payswarm#preferences", type_mapping: "@vocab"),
+      "privateKey" => TermDefinition.new("privateKey", id: "https://w3id.org/security#privateKey", type_mapping: "@id"),
+      "privateKeyPem" => TermDefinition.new("privateKeyPem", id: "https://w3id.org/security#privateKeyPem", simple: true),
+      "ps" => TermDefinition.new("ps", id: "https://w3id.org/payswarm#", simple: true, prefix: true),
+      "publicKey" => TermDefinition.new("publicKey", id: "https://w3id.org/security#publicKey", type_mapping: "@id"),
+      "publicKeyPem" => TermDefinition.new("publicKeyPem", id: "https://w3id.org/security#publicKeyPem", simple: true),
+      "publicKeyService" => TermDefinition.new("publicKeyService", id: "https://w3id.org/security#publicKeyService", type_mapping: "@id"),
+      "rdf" => TermDefinition.new("rdf", id: "http://www.w3.org/1999/02/22-rdf-syntax-ns#", simple: true, prefix: true),
+      "rdfs" => TermDefinition.new("rdfs", id: "http://www.w3.org/2000/01/rdf-schema#", simple: true, prefix: true),
+      "recipient" => TermDefinition.new("recipient", id: "https://w3id.org/credentials#recipient", type_mapping: "@id"),
+      "revoked" => TermDefinition.new("revoked", id: "https://w3id.org/security#revoked", type_mapping: "http://www.w3.org/2001/XMLSchema#dateTime"),
+      "schema" => TermDefinition.new("schema", id: "http://schema.org/", simple: true, prefix: true),
+      "sec" => TermDefinition.new("sec", id: "https://w3id.org/security#", simple: true, prefix: true),
+      "signature" => TermDefinition.new("signature", id: "https://w3id.org/security#signature", simple: true),
+      "signatureAlgorithm" => TermDefinition.new("signatureAlgorithm", id: "https://w3id.org/security#signatureAlgorithm", simple: true),
+      "signatureValue" => TermDefinition.new("signatureValue", id: "https://w3id.org/security#signatureValue", simple: true),
+      "streetAddress" => TermDefinition.new("streetAddress", id: "http://schema.org/streetAddress", simple: true),
+      "title" => TermDefinition.new("title", id: "http://purl.org/dc/terms/title", simple: true),
+      "type" => TermDefinition.new("type", id: "@type", simple: true),
+      "url" => TermDefinition.new("url", id: "http://schema.org/url", type_mapping: "@id"),
+      "writePermission" => TermDefinition.new("writePermission", id: "https://w3id.org/permissions#writePermission", type_mapping: "@id"),
+      "xsd" => TermDefinition.new("xsd", id: "http://www.w3.org/2001/XMLSchema#", simple: true, prefix: true)
+    })
+  end
+  alias_preloaded("https://w3id.org/identity/v1", "http://w3id.org/identity/v1")
+end
diff --git a/lib/json_ld/security.rb b/lib/json_ld/security.rb
index 1230206f0..a6fbce95f 100644
--- a/lib/json_ld/security.rb
+++ b/lib/json_ld/security.rb
@@ -1,9 +1,9 @@
 # -*- encoding: utf-8 -*-
 # frozen_string_literal: true
-# This file generated automatically from https://w3id.org/security/v1
+# This file generated automatically from http://w3id.org/security/v1
 require 'json/ld'
 class JSON::LD::Context
-  add_preloaded("https://w3id.org/security/v1") do
+  add_preloaded("http://w3id.org/security/v1") do
     new(processingMode: "json-ld-1.0", term_definitions: {
       "CryptographicKey" => TermDefinition.new("CryptographicKey", id: "https://w3id.org/security#Key", simple: true),
       "EcdsaKoblitzSignature2016" => TermDefinition.new("EcdsaKoblitzSignature2016", id: "https://w3id.org/security#EcdsaKoblitzSignature2016", simple: true),
@@ -47,4 +47,5 @@ class JSON::LD::Context
       "xsd" => TermDefinition.new("xsd", id: "http://www.w3.org/2001/XMLSchema#", simple: true, prefix: true)
     })
   end
+  alias_preloaded("https://w3id.org/security/v1", "http://w3id.org/security/v1")
 end
diff --git a/lib/mastodon/accounts_cli.rb b/lib/mastodon/accounts_cli.rb
index 7d0215313..6dbb75689 100644
--- a/lib/mastodon/accounts_cli.rb
+++ b/lib/mastodon/accounts_cli.rb
@@ -7,6 +7,8 @@ require_relative 'cli_helper'
 
 module Mastodon
   class AccountsCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
@@ -26,18 +28,20 @@ module Mastodon
       if options[:all]
         processed = 0
         delay     = 0
+        scope     = Account.local.without_suspended
+        progress  = create_progress_bar(scope.count)
 
-        Account.local.without_suspended.find_in_batches do |accounts|
+        scope.find_in_batches do |accounts|
           accounts.each do |account|
             rotate_keys_for_account(account, delay)
+            progress.increment
             processed += 1
-            say('.', :green, false)
           end
 
           delay += 5.minutes
         end
 
-        say
+        progress.finish
         say("OK, rotated keys for #{processed} accounts", :green)
       elsif username.present?
         rotate_keys_for_account(Account.find_local(username))
@@ -181,7 +185,7 @@ module Mastodon
       end
 
       say("Deleting user with #{account.statuses_count} statuses, this might take a while...")
-      SuspendAccountService.new.call(account, including_user: true)
+      SuspendAccountService.new.call(account, reserve_email: false)
       say('OK', :green)
     end
 
@@ -206,6 +210,7 @@ module Mastodon
       say('OK', :green)
     end
 
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
     option :dry_run, type: :boolean
     desc 'cull', 'Remove remote accounts that no longer exist'
     long_desc <<-LONG_DESC
@@ -215,63 +220,45 @@ module Mastodon
 
       Accounts that have had confirmed activity within the last week
       are excluded from the checks.
-
-      Domains that are unreachable are not checked.
-
-      With the --dry-run option, no deletes will actually be carried
-      out.
     LONG_DESC
     def cull
       skip_threshold = 7.days.ago
-      culled         = 0
-      dry_run_culled = []
-      skip_domains   = Set.new
       dry_run        = options[:dry_run] ? ' (DRY RUN)' : ''
+      skip_domains   = Concurrent::Set.new
 
-      Account.remote.where(protocol: :activitypub).partitioned.find_each do |account|
-        next if account.updated_at >= skip_threshold || (account.last_webfingered_at.present? && account.last_webfingered_at >= skip_threshold)
+      processed, culled = parallelize_with_progress(Account.remote.where(protocol: :activitypub).partitioned) do |account|
+        next if account.updated_at >= skip_threshold || (account.last_webfingered_at.present? && account.last_webfingered_at >= skip_threshold) || skip_domains.include?(account.domain)
 
         code = 0
-        unless skip_domains.include?(account.domain)
-          begin
-            code = Request.new(:head, account.uri).perform(&:code)
-          rescue HTTP::ConnectionError
-            skip_domains << account.domain
-          rescue StandardError
-            next
-          end
+
+        begin
+          code = Request.new(:head, account.uri).perform(&:code)
+        rescue HTTP::ConnectionError
+          skip_domains << account.domain
         end
 
         if [404, 410].include?(code)
-          if options[:dry_run]
-            dry_run_culled << account.acct
-          else
-            SuspendAccountService.new.call(account, destroy: true)
-          end
-          culled += 1
-          say('+', :green, false)
+          SuspendAccountService.new.call(account, reserve_username: false) unless options[:dry_run]
+          1
         else
-          account.touch # Touch account even during dry run to avoid getting the account into the window again
-          say('.', nil, false)
+          # Touch account even during dry run to avoid getting the account into the window again
+          account.touch
         end
       end
 
-      say
-      say("Removed #{culled} accounts. #{skip_domains.size} servers skipped#{dry_run}", skip_domains.empty? ? :green : :yellow)
+      say("Visited #{processed} accounts, removed #{culled}#{dry_run}", :green)
 
       unless skip_domains.empty?
-        say('The following servers were not available during the check:', :yellow)
+        say('The following domains were not available during the check:', :yellow)
         skip_domains.each { |domain| say('    ' + domain) }
       end
-
-      unless dry_run_culled.empty?
-        say('The following accounts would have been deleted:', :green)
-        dry_run_culled.each { |account| say('    ' + account) }
-      end
     end
 
     option :all, type: :boolean
     option :domain
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    option :dry_run, type: :boolean
     desc 'refresh [USERNAME]', 'Fetch remote user data and files'
     long_desc <<-LONG_DESC
       Fetch remote user data and files for one or multiple accounts.
@@ -280,21 +267,23 @@ module Mastodon
       Through the --domain option, this can be narrowed down to a
       specific domain only. Otherwise, a single remote account must
       be specified with USERNAME.
-
-      All processing is done in the background through Sidekiq.
     LONG_DESC
     def refresh(username = nil)
+      dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
+
       if options[:domain] || options[:all]
-        queued = 0
         scope  = Account.remote
         scope  = scope.where(domain: options[:domain]) if options[:domain]
 
-        scope.select(:id).reorder(nil).find_in_batches do |accounts|
-          Maintenance::RedownloadAccountMediaWorker.push_bulk(accounts.map(&:id))
-          queued += accounts.size
+        processed, = parallelize_with_progress(scope) do |account|
+          next if options[:dry_run]
+
+          account.reset_avatar!
+          account.reset_header!
+          account.save
         end
 
-        say("Scheduled refreshment of #{queued} accounts", :green, true)
+        say("Refreshed #{processed} accounts#{dry_run}", :green, true)
       elsif username.present?
         username, domain = username.split('@')
         account = Account.find_remote(username, domain)
@@ -304,72 +293,53 @@ module Mastodon
           exit(1)
         end
 
-        Maintenance::RedownloadAccountMediaWorker.perform_async(account.id)
-        say('OK', :green)
+        unless options[:dry_run]
+          account.reset_avatar!
+          account.reset_header!
+          account.save
+        end
+
+        say("OK#{dry_run}", :green)
       else
         say('No account(s) given', :red)
         exit(1)
       end
     end
 
-    desc 'follow ACCT', 'Make all local accounts follow account specified by ACCT'
-    long_desc <<-LONG_DESC
-      Make all local accounts follow an account specified by ACCT. ACCT can be
-      a simple username, in case of a local user. It can also be in the format
-      username@domain, in case of a remote user.
-    LONG_DESC
-    def follow(acct)
-      target_account = ResolveAccountService.new.call(acct)
-      processed      = 0
-      failed         = 0
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    desc 'follow USERNAME', 'Make all local accounts follow account specified by USERNAME'
+    def follow(username)
+      target_account = Account.find_local(username)
 
       if target_account.nil?
-        say("Target account (#{acct}) could not be resolved", :red)
+        say('No such account', :red)
         exit(1)
       end
 
-      Account.local.without_suspended.find_each do |account|
-        begin
-          FollowService.new.call(account, target_account)
-          processed += 1
-          say('.', :green, false)
-        rescue StandardError
-          failed += 1
-          say('.', :red, false)
-        end
+      processed, = parallelize_with_progress(Account.local.without_suspended) do |account|
+        FollowService.new.call(account, target_account)
       end
 
-      say("OK, followed target from #{processed} accounts, skipped #{failed}", :green)
+      say("OK, followed target from #{processed} accounts", :green)
     end
 
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
     desc 'unfollow ACCT', 'Make all local accounts unfollow account specified by ACCT'
-    long_desc <<-LONG_DESC
-      Make all local accounts unfollow an account specified by ACCT. ACCT can be
-      a simple username, in case of a local user. It can also be in the format
-      username@domain, in case of a remote user.
-    LONG_DESC
     def unfollow(acct)
       target_account = Account.find_remote(*acct.split('@'))
-      processed      = 0
-      failed         = 0
 
       if target_account.nil?
-        say("Target account (#{acct}) was not found", :red)
+        say('No such account', :red)
         exit(1)
       end
 
-      target_account.followers.local.find_each do |account|
-        begin
-          UnfollowService.new.call(account, target_account)
-          processed += 1
-          say('.', :green, false)
-        rescue StandardError
-          failed += 1
-          say('.', :red, false)
-        end
+      parallelize_with_progress(target_account.followers.local) do |account|
+        UnfollowService.new.call(account, target_account)
       end
 
-      say("OK, unfollowed target from #{processed} accounts, skipped #{failed}", :green)
+      say("OK, unfollowed target from #{processed} accounts", :green)
     end
 
     option :follows, type: :boolean, default: false
@@ -392,51 +362,50 @@ module Mastodon
       account = Account.find_local(username)
 
       if account.nil?
-        say('No user with such username', :red)
+        say('No such account', :red)
         exit(1)
       end
 
-      if options[:follows]
-        processed = 0
-        failed    = 0
+      total     = 0
+      total    += Account.where(id: ::Follow.where(account: account).select(:target_account_id)).count if options[:follows]
+      total    += Account.where(id: ::Follow.where(target_account: account).select(:account_id)).count if options[:followers]
+      progress  = create_progress_bar(total)
+      processed = 0
 
-        say("Unfollowing #{account.username}'s followees, this might take a while...")
+      if options[:follows]
+        scope = Account.where(id: ::Follow.where(account: account).select(:target_account_id))
 
-        Account.where(id: ::Follow.where(account: account).select(:target_account_id)).find_each do |target_account|
+        scope.find_each do |target_account|
           begin
             UnfollowService.new.call(account, target_account)
+          rescue => e
+            progress.log pastel.red("Error processing #{target_account.id}: #{e}")
+          ensure
+            progress.increment
             processed += 1
-            say('.', :green, false)
-          rescue StandardError
-            failed += 1
-            say('.', :red, false)
           end
         end
 
         BootstrapTimelineWorker.perform_async(account.id)
-
-        say("OK, unfollowed #{processed} followees, skipped #{failed}", :green)
       end
 
       if options[:followers]
-        processed = 0
-        failed    = 0
-
-        say("Removing #{account.username}'s followers, this might take a while...")
+        scope = Account.where(id: ::Follow.where(target_account: account).select(:account_id))
 
-        Account.where(id: ::Follow.where(target_account: account).select(:account_id)).find_each do |target_account|
+        scope.find_each do |target_account|
           begin
             UnfollowService.new.call(target_account, account)
+          rescue => e
+            progress.log pastel.red("Error processing #{target_account.id}: #{e}")
+          ensure
+            progress.increment
             processed += 1
-            say('.', :green, false)
-          rescue StandardError
-            failed += 1
-            say('.', :red, false)
           end
         end
-
-        say("OK, removed #{processed} followers, skipped #{failed}", :green)
       end
+
+      progress.finish
+      say("Processed #{processed} relationships", :green, true)
     end
 
     option :number, type: :numeric, aliases: [:n]
diff --git a/lib/mastodon/cache_cli.rb b/lib/mastodon/cache_cli.rb
index e9b6667b3..803404c34 100644
--- a/lib/mastodon/cache_cli.rb
+++ b/lib/mastodon/cache_cli.rb
@@ -6,6 +6,8 @@ require_relative 'cli_helper'
 
 module Mastodon
   class CacheCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
@@ -15,5 +17,44 @@ module Mastodon
       Rails.cache.clear
       say('OK', :green)
     end
+
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    desc 'recount TYPE', 'Update hard-cached counters'
+    long_desc <<~LONG_DESC
+      Update hard-cached counters of TYPE by counting referenced
+      records from scratch. TYPE can be "accounts" or "statuses".
+
+      It may take a very long time to finish, depending on the
+      size of the database.
+    LONG_DESC
+    def recount(type)
+      case type
+      when 'accounts'
+        processed, = parallelize_with_progress(Account.local.includes(:account_stat)) do |account|
+          account_stat                 = account.account_stat
+          account_stat.following_count = account.active_relationships.count
+          account_stat.followers_count = account.passive_relationships.count
+          account_stat.statuses_count  = account.statuses.where.not(visibility: :direct).count
+
+          account_stat.save if account_stat.changed?
+        end
+      when 'statuses'
+        processed, = parallelize_with_progress(Status.includes(:status_stat)) do |status|
+          status_stat                  = status.status_stat
+          status_stat.replies_count    = status.replies.where.not(visibility: :direct).count
+          status_stat.reblogs_count    = status.reblogs.count
+          status_stat.favourites_count = status.favourites.count
+
+          status_stat.save if status_stat.changed?
+        end
+      else
+        say("Unknown type: #{type}", :red)
+        exit(1)
+      end
+
+      say
+      say("OK, recounted #{processed} records", :green)
+    end
   end
 end
diff --git a/lib/mastodon/cli_helper.rb b/lib/mastodon/cli_helper.rb
index 2f807d08c..ec4d9a81e 100644
--- a/lib/mastodon/cli_helper.rb
+++ b/lib/mastodon/cli_helper.rb
@@ -7,3 +7,66 @@ ActiveRecord::Base.logger    = dev_null
 ActiveJob::Base.logger       = dev_null
 HttpLog.configuration.logger = dev_null
 Paperclip.options[:log]      = false
+
+module Mastodon
+  module CLIHelper
+    def create_progress_bar(total = nil)
+      ProgressBar.create(total: total, format: '%c/%u |%b%i| %e')
+    end
+
+    def parallelize_with_progress(scope)
+      if options[:concurrency] < 1
+        say('Cannot run with this concurrency setting, must be at least 1', :red)
+        exit(1)
+      end
+
+      ActiveRecord::Base.configurations[Rails.env]['pool'] = options[:concurrency] + 1
+
+      progress  = create_progress_bar(scope.count)
+      pool      = Concurrent::FixedThreadPool.new(options[:concurrency])
+      total     = Concurrent::AtomicFixnum.new(0)
+      aggregate = Concurrent::AtomicFixnum.new(0)
+
+      scope.reorder(nil).find_in_batches do |items|
+        futures = []
+
+        items.each do |item|
+          futures << Concurrent::Future.execute(executor: pool) do
+            begin
+              if !progress.total.nil? && progress.progress + 1 > progress.total
+                # The number of items has changed between start and now,
+                # since there is no good way to predict the final count from
+                # here, just change the progress bar to an indeterminate one
+
+                progress.total = nil
+              end
+
+              progress.log("Processing #{item.id}") if options[:verbose]
+
+              result = ActiveRecord::Base.connection_pool.with_connection do
+                yield(item)
+              end
+
+              aggregate.increment(result) if result.is_a?(Integer)
+            rescue => e
+              progress.log pastel.red("Error processing #{item.id}: #{e}")
+            ensure
+              progress.increment
+            end
+          end
+        end
+
+        total.increment(items.size)
+        futures.map(&:value)
+      end
+
+      progress.stop
+
+      [total.value, aggregate.value]
+    end
+
+    def pastel
+      @pastel ||= Pastel.new
+    end
+  end
+end
diff --git a/lib/mastodon/domains_cli.rb b/lib/mastodon/domains_cli.rb
index b081581fe..b5435bb5e 100644
--- a/lib/mastodon/domains_cli.rb
+++ b/lib/mastodon/domains_cli.rb
@@ -7,41 +7,58 @@ require_relative 'cli_helper'
 
 module Mastodon
   class DomainsCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
 
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
     option :dry_run, type: :boolean
-    desc 'purge DOMAIN', 'Remove accounts from a DOMAIN without a trace'
+    option :whitelist_mode, type: :boolean
+    desc 'purge [DOMAIN...]', 'Remove accounts from a DOMAIN without a trace'
     long_desc <<-LONG_DESC
       Remove all accounts from a given DOMAIN without leaving behind any
       records. Unlike a suspension, if the DOMAIN still exists in the wild,
       it means the accounts could return if they are resolved again.
+
+      When the --whitelist-mode option is given, instead of purging accounts
+      from a single domain, all accounts from domains that are not whitelisted
+      are removed from the database.
     LONG_DESC
-    def purge(domain)
-      removed = 0
+    def purge(*domains)
       dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
 
-      Account.where(domain: domain).find_each do |account|
-        SuspendAccountService.new.call(account, destroy: true) unless options[:dry_run]
-        removed += 1
-        say('.', :green, false)
+      scope = begin
+        if options[:whitelist_mode]
+          Account.remote.where.not(domain: DomainAllow.pluck(:domain))
+        elsif !domains.empty?
+          Account.remote.where(domain: domains)
+        else
+          say('No domain(s) given', :red)
+          exit(1)
+        end
+      end
+
+      processed, = parallelize_with_progress(scope) do |account|
+        SuspendAccountService.new.call(account, reserve_username: false, skip_side_effects: true) unless options[:dry_run]
       end
 
-      DomainBlock.where(domain: domain).destroy_all unless options[:dry_run]
+      DomainBlock.where(domain: domains).destroy_all unless options[:dry_run]
 
-      say
-      say("Removed #{removed} accounts#{dry_run}", :green)
+      say("Removed #{processed} accounts#{dry_run}", :green)
 
-      custom_emojis = CustomEmoji.where(domain: domain)
+      custom_emojis = CustomEmoji.where(domain: domains)
       custom_emojis_count = custom_emojis.count
       custom_emojis.destroy_all unless options[:dry_run]
+
       say("Removed #{custom_emojis_count} custom emojis", :green)
     end
 
     option :concurrency, type: :numeric, default: 50, aliases: [:c]
-    option :silent, type: :boolean, default: false, aliases: [:s]
     option :format, type: :string, default: 'summary', aliases: [:f]
+    option :exclude_suspended, type: :boolean, default: false, aliases: [:x]
     desc 'crawl [START]', 'Crawl all known peers, optionally beginning at START'
     long_desc <<-LONG_DESC
       Crawl the fediverse by using the Mastodon REST API endpoints that expose
@@ -52,26 +69,31 @@ module Mastodon
       The --concurrency (-c) option controls the number of threads performing HTTP
       requests at the same time. More threads means the crawl may complete faster.
 
-      The --silent (-s) option controls progress output.
-
       The --format (-f) option controls how the data is displayed at the end. By
       default (`summary`), a summary of the statistics is returned. The other options
       are `domains`, which returns a newline-delimited list of all discovered peers,
       and `json`, which dumps all the aggregated data raw.
+
+      The --exclude-suspended (-x) option means that domains that are suspended
+      instance-wide do not appear in the output and are not included in summaries.
+      This also excludes subdomains of any of those domains.
     LONG_DESC
     def crawl(start = nil)
-      stats     = Concurrent::Hash.new
-      processed = Concurrent::AtomicFixnum.new(0)
-      failed    = Concurrent::AtomicFixnum.new(0)
-      start_at  = Time.now.to_f
-      seed      = start ? [start] : Account.remote.domains
+      stats           = Concurrent::Hash.new
+      processed       = Concurrent::AtomicFixnum.new(0)
+      failed          = Concurrent::AtomicFixnum.new(0)
+      start_at        = Time.now.to_f
+      seed            = start ? [start] : Account.remote.domains
+      blocked_domains = Regexp.new('\\.?' + DomainBlock.where(severity: 1).pluck(:domain).join('|') + '$')
+      progress        = create_progress_bar
 
       pool = Concurrent::ThreadPoolExecutor.new(min_threads: 0, max_threads: options[:concurrency], idletime: 10, auto_terminate: true, max_queue: 0)
 
       work_unit = ->(domain) do
         next if stats.key?(domain)
+        next if options[:exclude_suspended] && domain.match(blocked_domains)
+
         stats[domain] = nil
-        processed.increment
 
         begin
           Request.new(:get, "https://#{domain}/api/v1/instance").perform do |res|
@@ -91,11 +113,11 @@ module Mastodon
             next unless res.code == 200
             stats[domain]['activity'] = Oj.load(res.to_s)
           end
-
-          say('.', :green, false) unless options[:silent]
         rescue StandardError
           failed.increment
-          say('.', :red, false) unless options[:silent]
+        ensure
+          processed.increment
+          progress.increment unless progress.finished?
         end
       end
 
@@ -109,10 +131,9 @@ module Mastodon
       pool.shutdown
       pool.wait_for_termination(20)
     ensure
+      progress.finish
       pool.shutdown
 
-      say unless options[:silent]
-
       case options[:format]
       when 'summary'
         stats_to_summary(stats, processed, failed, start_at)
diff --git a/lib/mastodon/emoji_cli.rb b/lib/mastodon/emoji_cli.rb
index 97a822e45..beac1b1fd 100644
--- a/lib/mastodon/emoji_cli.rb
+++ b/lib/mastodon/emoji_cli.rb
@@ -15,6 +15,7 @@ module Mastodon
     option :suffix
     option :overwrite, type: :boolean
     option :unlisted, type: :boolean
+    option :category
     desc 'import PATH', 'Import emoji from a TAR GZIP archive at PATH'
     long_desc <<-LONG_DESC
       Imports custom emoji from a TAR GZIP archive specified by PATH.
@@ -22,6 +23,9 @@ module Mastodon
       Existing emoji will be skipped unless the --overwrite option
       is provided, in which case they will be overwritten.
 
+      You can specifiy a --category under which the emojis will be
+      grouped together.
+
       With the --prefix option, a prefix can be added to all
       generated shortcodes. Likewise, the --suffix option controls
       the suffix of all shortcodes.
@@ -33,6 +37,7 @@ module Mastodon
       imported = 0
       skipped  = 0
       failed   = 0
+      category = options[:category] ? CustomEmojiCategory.find_or_create_by(name: options[:category]) : nil
 
       Gem::Package::TarReader.new(Zlib::GzipReader.open(path)) do |tar|
         tar.each do |entry|
@@ -50,6 +55,7 @@ module Mastodon
           custom_emoji.image = StringIO.new(entry.read)
           custom_emoji.image_file_name = File.basename(entry.full_name)
           custom_emoji.visible_in_picker = !options[:unlisted]
+          custom_emoji.category = category
 
           if custom_emoji.save
             imported += 1
diff --git a/lib/mastodon/feeds_cli.rb b/lib/mastodon/feeds_cli.rb
index fe11c3df4..578ea15c5 100644
--- a/lib/mastodon/feeds_cli.rb
+++ b/lib/mastodon/feeds_cli.rb
@@ -6,55 +6,32 @@ require_relative 'cli_helper'
 
 module Mastodon
   class FeedsCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
 
     option :all, type: :boolean, default: false
-    option :background, type: :boolean, default: false
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
     option :dry_run, type: :boolean, default: false
-    option :verbose, type: :boolean, default: false
     desc 'build [USERNAME]', 'Build home and list feeds for one or all users'
     long_desc <<-LONG_DESC
       Build home and list feeds that are stored in Redis from the database.
 
       With the --all option, all active users will be processed.
       Otherwise, a single user specified by USERNAME.
-
-      With the --background option, regeneration will be queued into Sidekiq,
-      and the command will exit as soon as possible.
-
-      With the --dry-run option, no work will be done.
-
-      With the --verbose option, when accounts are processed sequentially in the
-      foreground, the IDs of the accounts will be printed.
     LONG_DESC
     def build(username = nil)
       dry_run = options[:dry_run] ? '(DRY RUN)' : ''
 
       if options[:all] || username.nil?
-        processed = 0
-        queued    = 0
-
-        User.active.select(:id, :account_id).reorder(nil).find_in_batches do |users|
-          if options[:background]
-            RegenerationWorker.push_bulk(users.map(&:account_id)) unless options[:dry_run]
-            queued += users.size
-          else
-            users.each do |user|
-              RegenerationWorker.new.perform(user.account_id) unless options[:dry_run]
-              options[:verbose] ? say(user.account_id) : say('.', :green, false)
-              processed += 1
-            end
-          end
+        processed, = parallelize_with_progress(Account.joins(:user).merge(User.active)) do |account|
+          PrecomputeFeedService.new.call(account) unless options[:dry_run]
         end
 
-        if options[:background]
-          say("Scheduled feed regeneration for #{queued} accounts #{dry_run}", :green, true)
-        else
-          say
-          say("Regenerated feeds for #{processed} accounts #{dry_run}", :green, true)
-        end
+        say("Regenerated feeds for #{processed} accounts #{dry_run}", :green, true)
       elsif username.present?
         account = Account.find_local(username)
 
@@ -63,11 +40,7 @@ module Mastodon
           exit(1)
         end
 
-        if options[:background]
-          RegenerationWorker.perform_async(account.id) unless options[:dry_run]
-        else
-          RegenerationWorker.new.perform(account.id) unless options[:dry_run]
-        end
+        PrecomputeFeedService.new.call(account) unless options[:dry_run]
 
         say("OK #{dry_run}", :green, true)
       else
diff --git a/lib/mastodon/media_cli.rb b/lib/mastodon/media_cli.rb
index 6152d5a09..3b702f155 100644
--- a/lib/mastodon/media_cli.rb
+++ b/lib/mastodon/media_cli.rb
@@ -7,14 +7,15 @@ require_relative 'cli_helper'
 module Mastodon
   class MediaCLI < Thor
     include ActionView::Helpers::NumberHelper
+    include CLIHelper
 
     def self.exit_on_failure?
       true
     end
 
-    option :days, type: :numeric, default: 7
-    option :background, type: :boolean, default: false
-    option :verbose, type: :boolean, default: false
+    option :days, type: :numeric, default: 7, aliases: [:d]
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, default: false, aliases: [:v]
     option :dry_run, type: :boolean, default: false
     desc 'remove', 'Remove remote media files'
     long_desc <<-DESC
@@ -22,48 +23,116 @@ module Mastodon
 
       The --days option specifies how old media attachments have to be before
       they are removed. It defaults to 7 days.
+    DESC
+    def remove
+      time_ago = options[:days].days.ago
+      dry_run  = options[:dry_run] ? '(DRY RUN)' : ''
+
+      processed, aggregate = parallelize_with_progress(MediaAttachment.cached.where.not(remote_url: '').where('created_at < ?', time_ago)) do |media_attachment|
+        next if media_attachment.file.blank?
+
+        size = media_attachment.file_file_size
+
+        unless options[:dry_run]
+          media_attachment.file.destroy
+          media_attachment.save
+        end
+
+        size
+      end
+
+      say("Removed #{processed} media attachments (approx. #{number_to_human_size(aggregate)}) #{dry_run}", :green, true)
+    end
+
+    option :account, type: :string
+    option :domain, type: :string
+    option :status, type: :numeric
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, default: false, aliases: [:v]
+    option :dry_run, type: :boolean, default: false
+    option :force, type: :boolean, default: false
+    desc 'refresh', 'Fetch remote media files'
+    long_desc <<-DESC
+      Re-downloads media attachments from other servers. You must specify the
+      source of media attachments with one of the following options:
+
+      Use the --status option to download attachments from a specific status,
+      using the status local numeric ID.
 
-      With the --background option, instead of deleting the files sequentially,
-      they will be queued into Sidekiq and the command will exit as soon as
-      possible. In Sidekiq they will be processed with higher concurrency, but
-      it may impact other operations of the Mastodon server, and it may overload
-      the underlying file storage.
+      Use the --account option to download attachments from a specific account,
+      using username@domain handle of the account.
 
-      With the --dry-run option, no work will be done.
+      Use the --domain option to download attachments from a specific domain.
 
-      With the --verbose option, when media attachments are processed sequentially in the
-      foreground, the IDs of the media attachments will be printed.
+      By default, attachments that are believed to be already downloaded will
+      not be re-downloaded. To force re-download of every URL, use --force.
     DESC
-    def remove
-      time_ago  = options[:days].days.ago
-      queued    = 0
-      processed = 0
-      size      = 0
-      dry_run   = options[:dry_run] ? '(DRY RUN)' : ''
-
-      if options[:background]
-        MediaAttachment.where.not(remote_url: '').where.not(file_file_name: nil).where('created_at < ?', time_ago).select(:id, :file_file_size).reorder(nil).find_in_batches do |media_attachments|
-          queued += media_attachments.size
-          size   += media_attachments.reduce(0) { |sum, m| sum + (m.file_file_size || 0) }
-          Maintenance::UncacheMediaWorker.push_bulk(media_attachments.map(&:id)) unless options[:dry_run]
+    def refresh
+      dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
+
+      if options[:status]
+        scope = MediaAttachment.where(status_id: options[:status])
+      elsif options[:account]
+        username, domain = username.split('@')
+        account = Account.find_remote(username, domain)
+
+        if account.nil?
+          say('No such account', :red)
+          exit(1)
         end
+
+        scope = MediaAttachment.where(account_id: account.id)
+      elsif options[:domain]
+        scope = MediaAttachment.joins(:account).merge(Account.by_domain_and_subdomains(options[:domain]))
       else
-        MediaAttachment.where.not(remote_url: '').where.not(file_file_name: nil).where('created_at < ?', time_ago).reorder(nil).find_in_batches do |media_attachments|
-          media_attachments.each do |m|
-            size += m.file_file_size || 0
-            Maintenance::UncacheMediaWorker.new.perform(m) unless options[:dry_run]
-            options[:verbose] ? say(m.id) : say('.', :green, false)
-            processed += 1
-          end
+        exit(1)
+      end
+
+      processed, aggregate = parallelize_with_progress(scope) do |media_attachment|
+        next if media_attachment.remote_url.blank? || (!options[:force] && media_attachment.file_file_name.present?)
+
+        unless options[:dry_run]
+          media_attachment.reset_file!
+          media_attachment.save
         end
+
+        media_attachment.file_file_size
       end
 
-      say
+      say("Downloaded #{processed} media attachments (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
+    end
+
+    desc 'usage', 'Calculate disk space consumed by Mastodon'
+    def usage
+      say("Attachments:\t#{number_to_human_size(MediaAttachment.sum(:file_file_size))} (#{number_to_human_size(MediaAttachment.where(account: Account.local).sum(:file_file_size))} local)")
+      say("Custom emoji:\t#{number_to_human_size(CustomEmoji.sum(:image_file_size))} (#{number_to_human_size(CustomEmoji.local.sum(:image_file_size))} local)")
+      say("Preview cards:\t#{number_to_human_size(PreviewCard.sum(:image_file_size))}")
+      say("Avatars:\t#{number_to_human_size(Account.sum(:avatar_file_size))} (#{number_to_human_size(Account.local.sum(:avatar_file_size))} local)")
+      say("Headers:\t#{number_to_human_size(Account.sum(:header_file_size))} (#{number_to_human_size(Account.local.sum(:header_file_size))} local)")
+      say("Backups:\t#{number_to_human_size(Backup.sum(:dump_file_size))}")
+      say("Imports:\t#{number_to_human_size(Import.sum(:data_file_size))}")
+      say("Settings:\t#{number_to_human_size(SiteUpload.sum(:file_file_size))}")
+    end
+
+    desc 'lookup', 'Lookup where media is displayed by passing a media URL'
+    def lookup
+      prompt = TTY::Prompt.new
+
+      url = prompt.ask('Please enter a URL to the media to lookup:', required: true)
+
+      attachment_id = url
+                      .split('/')[0..-2]
+                      .grep(/\A\d+\z/)
+                      .join('')
 
-      if options[:background]
-        say("Scheduled the deletion of #{queued} media attachments (approx. #{number_to_human_size(size)}) #{dry_run}", :green, true)
+      if url.split('/')[0..-2].include? 'media_attachments'
+        model = MediaAttachment.find(attachment_id).status
+        prompt.say(ActivityPub::TagManager.instance.url_for(model))
+      elsif url.split('/')[0..-2].include? 'accounts'
+        model = Account.find(attachment_id)
+        prompt.say(ActivityPub::TagManager.instance.url_for(model))
       else
-        say("Removed #{processed} media attachments (approx. #{number_to_human_size(size)}) #{dry_run}", :green, true)
+        prompt.say('Not found')
       end
     end
   end
diff --git a/lib/mastodon/preview_cards_cli.rb b/lib/mastodon/preview_cards_cli.rb
new file mode 100644
index 000000000..cf4407250
--- /dev/null
+++ b/lib/mastodon/preview_cards_cli.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'tty-prompt'
+require_relative '../../config/boot'
+require_relative '../../config/environment'
+require_relative 'cli_helper'
+
+module Mastodon
+  class PreviewCardsCLI < Thor
+    include ActionView::Helpers::NumberHelper
+    include CLIHelper
+
+    def self.exit_on_failure?
+      true
+    end
+
+    option :days, type: :numeric, default: 180
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    option :dry_run, type: :boolean, default: false
+    option :link, type: :boolean, default: false
+    desc 'remove', 'Remove preview cards'
+    long_desc <<-DESC
+      Removes local thumbnails for preview cards.
+
+      The --days option specifies how old preview cards have to be before
+      they are removed. It defaults to 180 days. Since preview cards will
+      not be re-fetched unless the link is re-posted after 2 weeks from
+      last time, it is not recommended to delete preview cards within the
+      last 14 days.
+
+      With the --link option, only link-type preview cards will be deleted,
+      leaving video and photo cards untouched.
+    DESC
+    def remove
+      time_ago = options[:days].days.ago
+      dry_run  = options[:dry_run] ? ' (DRY RUN)' : ''
+      link     = options[:link] ? 'link-type ' : ''
+      scope    = PreviewCard.cached
+      scope    = scope.where(type: :link) if options[:link]
+      scope    = scope.where('updated_at < ?', time_ago)
+
+      processed, aggregate = parallelize_with_progress(scope) do |preview_card|
+        next if preview_card.image.blank?
+
+        size = preview_card.image_file_size
+
+        unless options[:dry_run]
+          preview_card.image.destroy
+          preview_card.save
+        end
+
+        size
+      end
+
+      say("Removed #{processed} #{link}preview cards (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
+    end
+  end
+end
diff --git a/lib/mastodon/search_cli.rb b/lib/mastodon/search_cli.rb
index 42ad93f1e..8bd5f9543 100644
--- a/lib/mastodon/search_cli.rb
+++ b/lib/mastodon/search_cli.rb
@@ -6,6 +6,7 @@ require_relative 'cli_helper'
 
 module Mastodon
   class SearchCLI < Thor
+    option :processes, default: 2, aliases: [:p]
     desc 'deploy', 'Create or update an ElasticSearch index and populate it'
     long_desc <<~LONG_DESC
       If ElasticSearch is empty, this command will create the necessary indices
@@ -13,10 +14,28 @@ module Mastodon
 
       This command will also upgrade indices if the underlying schema has been
       changed since the last run.
+
+      With the --processes option, parallelize execution of the command. The
+      default is 2. If "auto" is specified, the number is automatically
+      derived from available CPUs.
     LONG_DESC
     def deploy
-      processed = Chewy::RakeHelper.upgrade
-      Chewy::RakeHelper.sync(except: processed)
+      processed = Chewy::RakeHelper.upgrade(parallel: processes)
+      Chewy::RakeHelper.sync(except: processed, parallel: processes)
+    end
+
+    private
+
+    def processes
+      return true if options[:processes] == 'auto'
+
+      num = options[:processes].to_i
+
+      if num < 2
+        nil
+      else
+        num
+      end
     end
   end
 end
diff --git a/lib/mastodon/statuses_cli.rb b/lib/mastodon/statuses_cli.rb
index 7f2fbfa85..eeedc026c 100644
--- a/lib/mastodon/statuses_cli.rb
+++ b/lib/mastodon/statuses_cli.rb
@@ -41,6 +41,7 @@ module Mastodon
             .where('id NOT IN (SELECT status_pins.status_id FROM status_pins WHERE statuses.id = status_id)')                                                                                                                      # Skip statuses that are pinned on profiles
             .where('id NOT IN (SELECT mentions.status_id FROM mentions WHERE statuses.id = mentions.status_id AND mentions.account_id IN (SELECT accounts.id FROM accounts WHERE domain IS NULL))')                                # Skip statuses that mention local accounts
             .where('id NOT IN (SELECT statuses1.in_reply_to_id FROM statuses AS statuses1 WHERE statuses.id = statuses1.in_reply_to_id)')                                                                                          # Skip statuses favourited by local accounts
+            .where('id NOT IN (SELECT bookmarks.status_id FROM bookmarks WHERE statuses.id = bookmarks.status_id)') # Skip statuses bookmarked by local users
             .where('id NOT IN (SELECT statuses1.reblog_of_id FROM statuses AS statuses1 WHERE statuses.id = statuses1.reblog_of_id AND statuses1.account_id IN (SELECT accounts.id FROM accounts WHERE accounts.domain IS NULL))') # Skip statuses reblogged by local accounts
             .where('account_id NOT IN (SELECT follows.target_account_id FROM follows WHERE statuses.account_id = follows.target_account_id)')                                                                                      # Skip accounts followed by local accounts
             .in_batches
diff --git a/lib/mastodon/version.rb b/lib/mastodon/version.rb
index 56b846a36..a8e5f0b79 100644
--- a/lib/mastodon/version.rb
+++ b/lib/mastodon/version.rb
@@ -5,33 +5,29 @@ module Mastodon
     module_function
 
     def major
-      2
+      3
     end
 
     def minor
-      9
-    end
-
-    def patch
       0
     end
 
-    def pre
-      nil
+    def patch
+      1
     end
 
     def flags
       ''
     end
 
-    def to_a
-      [major, minor, patch, pre].compact
-    end
-
     def suffix
       '+glitch'
     end
 
+    def to_a
+      [major, minor, patch].compact
+    end
+
     def to_s
       [to_a.join('.'), flags, suffix].join
     end
diff --git a/lib/paperclip/audio_transcoder.rb b/lib/paperclip/audio_transcoder.rb
deleted file mode 100644
index 323ec7bfe..000000000
--- a/lib/paperclip/audio_transcoder.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-module Paperclip
-  class AudioTranscoder < Paperclip::Processor
-    def make
-      max_aud_len = (ENV['MAX_AUDIO_LENGTH'] || 60.0).to_f
-
-      meta = ::Av.cli.identify(@file.path)
-      # {:length=>"0:00:02.14", :duration=>2.14, :audio_encode=>"mp3", :audio_bitrate=>"44100 Hz", :audio_channels=>"mono"}
-      if meta[:duration] > max_aud_len
-        raise Mastodon::ValidationError, "Audio uploads must be less than #{max_aud_len} seconds in length."
-      end
-      
-      final_file = Paperclip::Transcoder.make(file, options, attachment)
-      
-      attachment.instance.file_file_name    = 'media.mp4'
-      attachment.instance.file_content_type = 'video/mp4'
-      attachment.instance.type              = MediaAttachment.types[:video]
-
-      final_file
-    end
-  end
-end
diff --git a/lib/paperclip/gif_transcoder.rb b/lib/paperclip/gif_transcoder.rb
index cbab6fd99..64f12f963 100644
--- a/lib/paperclip/gif_transcoder.rb
+++ b/lib/paperclip/gif_transcoder.rb
@@ -1,5 +1,103 @@
 # frozen_string_literal: true
 
+class GifReader
+  attr_reader :animated
+
+  EXTENSION_LABELS = [0xf9, 0x01, 0xff].freeze
+  GIF_HEADERS      = %w(GIF87a GIF89a).freeze
+
+  class GifReaderException; end
+
+  class UnknownImageType < GifReaderException; end
+
+  class CannotParseImage < GifReaderException; end
+
+  def self.animated?(path)
+    new(path).animated
+  rescue GifReaderException
+    false
+  end
+
+  def initialize(path, max_frames = 2)
+    @path      = path
+    @nb_frames = 0
+
+    File.open(path, 'rb') do |s|
+      raise UnknownImageType unless GIF_HEADERS.include?(s.read(6))
+
+      # Skip to "packed byte"
+      s.seek(4, IO::SEEK_CUR)
+
+      # "Packed byte" gives us the size of the GIF color table
+      packed_byte, = s.read(1).unpack('C')
+
+      # Skip background color and aspect ratio
+      s.seek(2, IO::SEEK_CUR)
+
+      if packed_byte & 0x80 != 0
+        # GIF uses a global color table, skip it
+        s.seek(3 * (1 << ((packed_byte & 0x07) + 1)), IO::SEEK_CUR)
+      end
+
+      # Now read data
+      while @nb_frames < max_frames
+        separator = s.read(1)
+
+        case separator
+        when ',' # Image block
+          @nb_frames += 1
+
+          # Skip to "packed byte"
+          s.seek(8, IO::SEEK_CUR)
+          packed_byte, = s.read(1).unpack('C')
+
+          if packed_byte & 0x80 != 0
+            # Image uses a local color table, skip it
+            s.seek(3 * (1 << ((packed_byte & 0x07) + 1)), IO::SEEK_CUR)
+          end
+
+          # Skip lzw min code size
+          raise InvalidValue unless s.read(1).unpack('C')[0] >= 2
+
+          # Skip image data sub-blocks
+          skip_sub_blocks!(s)
+        when '!' # Extension block
+          skip_extension_block!(s)
+        when ';' # Trailer
+          break
+        else
+          raise CannotParseImage
+        end
+      end
+    end
+
+    @animated = @nb_frames > 1
+  end
+
+  private
+
+  def skip_extension_block!(file)
+    if EXTENSION_LABELS.include?(file.read(1).unpack('C')[0])
+      block_size, = file.read(1).unpack('C')
+      file.seek(block_size, IO::SEEK_CUR)
+    end
+
+    # Read until extension block end marker
+    skip_sub_blocks!(file)
+  end
+
+  # Skip sub-blocks up until block end marker
+  def skip_sub_blocks!(file)
+    loop do
+      size, = file.read(1).unpack('C')
+
+      break if size.zero?
+
+      file.seek(size, IO::SEEK_CUR)
+    end
+  end
+end
+
 module Paperclip
   # This transcoder is only to be used for the MediaAttachment model
   # to convert animated gifs to webm
@@ -19,8 +117,7 @@ module Paperclip
     private
 
     def needs_convert?
-      num_frames = identify('-format %n :file', file: file.path).to_i
-      options[:style] == :original && num_frames > 1
+      options[:style] == :original && GifReader.animated?(file.path)
     end
   end
 end
diff --git a/lib/paperclip/lazy_thumbnail.rb b/lib/paperclip/lazy_thumbnail.rb
index 542c17fb2..10b14860c 100644
--- a/lib/paperclip/lazy_thumbnail.rb
+++ b/lib/paperclip/lazy_thumbnail.rb
@@ -9,8 +9,8 @@ module Paperclip
         min_side = [@current_geometry.width, @current_geometry.height].min.to_i
         options[:geometry] = "#{min_side}x#{min_side}#" if @target_geometry.square? && min_side < @target_geometry.width
       elsif options[:pixels]
-        width  = Math.sqrt(options[:pixels] * (@current_geometry.width.to_f / @current_geometry.height.to_f)).round.to_i
-        height = Math.sqrt(options[:pixels] * (@current_geometry.height.to_f / @current_geometry.width.to_f)).round.to_i
+        width  = Math.sqrt(options[:pixels] * (@current_geometry.width.to_f / @current_geometry.height)).round.to_i
+        height = Math.sqrt(options[:pixels] * (@current_geometry.height.to_f / @current_geometry.width)).round.to_i
         options[:geometry] = "#{width}x#{height}>"
       end
 
diff --git a/lib/paperclip/type_corrector.rb b/lib/paperclip/type_corrector.rb
new file mode 100644
index 000000000..0b0c10a56
--- /dev/null
+++ b/lib/paperclip/type_corrector.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'mime/types/columnar'
+
+module Paperclip
+  class TypeCorrector < Paperclip::Processor
+    def make
+      target_extension = options[:format]
+      extension        = File.extname(attachment.instance.file_file_name)
+
+      return @file unless options[:style] == :original && target_extension && extension != target_extension
+
+      attachment.instance.file_content_type = options[:content_type] || attachment.instance.file_content_type
+      attachment.instance.file_file_name    = File.basename(attachment.instance.file_file_name, '.*') + '.' + target_extension
+
+      @file
+    end
+  end
+end
diff --git a/lib/paperclip/video_transcoder.rb b/lib/paperclip/video_transcoder.rb
index c3504c17c..66f7feda5 100644
--- a/lib/paperclip/video_transcoder.rb
+++ b/lib/paperclip/video_transcoder.rb
@@ -6,7 +6,9 @@ module Paperclip
   class VideoTranscoder < Paperclip::Processor
     def make
       meta = ::Av.cli.identify(@file.path)
+
       attachment.instance.type = MediaAttachment.types[:gifv] unless meta[:audio_encode]
+      options[:format] = File.extname(attachment.instance.file_file_name)[1..-1] if options[:keep_same_format]
 
       Paperclip::Transcoder.make(file, options, attachment)
     end
diff --git a/lib/tasks/mastodon.rake b/lib/tasks/mastodon.rake
index ee9657b0e..2e92e8ded 100644
--- a/lib/tasks/mastodon.rake
+++ b/lib/tasks/mastodon.rake
@@ -135,7 +135,7 @@ namespace :mastodon do
       prompt.say "\n"
 
       if prompt.yes?('Do you want to store uploaded files on the cloud?', default: false)
-        case prompt.select('Provider', ['Amazon S3', 'Wasabi', 'Minio'])
+        case prompt.select('Provider', ['Amazon S3', 'Wasabi', 'Minio', 'Google Cloud Storage'])
         when 'Amazon S3'
           env['S3_ENABLED']  = 'true'
           env['S3_PROTOCOL'] = 'https'
@@ -217,6 +217,34 @@ namespace :mastodon do
             q.required true
             q.modify :strip
           end
+        when 'Google Cloud Storage'
+          env['S3_ENABLED']             = 'true'
+          env['S3_PROTOCOL']            = 'https'
+          env['S3_HOSTNAME']            = 'storage.googleapis.com'
+          env['S3_ENDPOINT']            = 'https://storage.googleapis.com'
+          env['S3_MULTIPART_THRESHOLD'] = 50.megabytes
+
+          env['S3_BUCKET'] = prompt.ask('GCS bucket name:') do |q|
+            q.required true
+            q.default "files.#{env['LOCAL_DOMAIN']}"
+            q.modify :strip
+          end
+
+          env['S3_REGION'] = prompt.ask('GCS region:') do |q|
+            q.required true
+            q.default 'us-west1'
+            q.modify :strip
+          end
+
+          env['AWS_ACCESS_KEY_ID'] = prompt.ask('GCS access key:') do |q|
+            q.required true
+            q.modify :strip
+          end
+
+          env['AWS_SECRET_ACCESS_KEY'] = prompt.ask('GCS secret key:') do |q|
+            q.required true
+            q.modify :strip
+          end
         end
 
         if prompt.yes?('Do you want to access the uploaded files from your own domain?')
diff --git a/lib/tasks/repo.rake b/lib/tasks/repo.rake
index 8ceec3085..d1de17b7c 100644
--- a/lib/tasks/repo.rake
+++ b/lib/tasks/repo.rake
@@ -76,4 +76,19 @@ namespace :repo do
       tmp.unlink
     end
   end
+
+  task check_locales_files: :environment do
+    pastel = Pastel.new
+
+    missing_yaml_files = I18n.available_locales.reject { |locale| File.exist?(Rails.root.join('config', 'locales', "#{locale}.yml")) }
+    missing_json_files = I18n.available_locales.reject { |locale| File.exist?(Rails.root.join('app', 'javascript', 'mastodon', 'locales', "#{locale}.json")) }
+
+    if missing_json_files.empty? && missing_yaml_files.empty?
+      puts pastel.green('OK')
+    else
+      puts pastel.red("Missing YAML files: #{pastel.bold(missing_yaml_files.join(', '))}") unless missing_yaml_files.empty?
+      puts pastel.red("Missing JSON files: #{pastel.bold(missing_json_files.join(', '))}") unless missing_json_files.empty?
+      exit(1)
+    end
+  end
 end