about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--Gemfile3
-rw-r--r--Gemfile.lock7
-rw-r--r--app/models/media_attachment.rb1
-rw-r--r--app/models/preview_card.rb2
-rw-r--r--app/workers/maintenance/destroy_media_worker.rb14
-rw-r--r--app/workers/maintenance/redownload_account_media_worker.rb16
-rw-r--r--app/workers/maintenance/uncache_media_worker.rb18
-rw-r--r--app/workers/maintenance/uncache_preview_worker.rb18
-rw-r--r--lib/mastodon/accounts_cli.rb188
-rw-r--r--lib/mastodon/cache_cli.rb41
-rw-r--r--lib/mastodon/cli_helper.rb49
-rw-r--r--lib/mastodon/domains_cli.rb27
-rw-r--r--lib/mastodon/feeds_cli.rb42
-rw-r--r--lib/mastodon/media_cli.rb143
-rw-r--r--lib/mastodon/preview_cards_cli.rb59
15 files changed, 261 insertions, 367 deletions
diff --git a/Gemfile b/Gemfile
index af187d364..af85b021a 100644
--- a/Gemfile
+++ b/Gemfile
@@ -75,7 +75,8 @@ gem 'rails-settings-cached', '~> 0.6'
 gem 'redis', '~> 4.1', require: ['redis', 'redis/connection/hiredis']
 gem 'mario-redis-lock', '~> 1.2', require: 'redis_lock'
 gem 'rqrcode', '~> 0.10'
-gem 'sanitize', '~> 5.0'
+gem 'ruby-progressbar', '~> 1.10'
+gem 'sanitize', '~> 5.1'
 gem 'sidekiq', '~> 5.2'
 gem 'sidekiq-scheduler', '~> 3.0'
 gem 'sidekiq-unique-jobs', '~> 6.0'
diff --git a/Gemfile.lock b/Gemfile.lock
index e108bf497..2df92c634 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -808,10 +808,11 @@ DEPENDENCIES
   rqrcode (~> 0.10)
   rspec-rails (~> 3.8)
   rspec-sidekiq (~> 3.0)
-  rubocop (~> 0.71)
-  rubocop-rails (~> 2.0)
+  rubocop (~> 0.74)
+  rubocop-rails (~> 2.3)
   ruby-bbcode (~> 2.0)
-  sanitize (~> 5.0)
+  ruby-progressbar (~> 1.10)
+  sanitize (~> 5.1)
   sidekiq (~> 5.2)
   sidekiq-bulk (~> 0.2.0)
   sidekiq-scheduler (~> 3.0)
diff --git a/app/models/media_attachment.rb b/app/models/media_attachment.rb
index 747836b59..3df69ea66 100644
--- a/app/models/media_attachment.rb
+++ b/app/models/media_attachment.rb
@@ -146,6 +146,7 @@ class MediaAttachment < ApplicationRecord
   scope :unattached, -> { where(status_id: nil, scheduled_status_id: nil) }
   scope :local,      -> { where(remote_url: '') }
   scope :remote,     -> { where.not(remote_url: '') }
+  scope :cached,     -> { remote.where.not(file_file_name: nil) }
 
   default_scope { order(id: :asc) }
 
diff --git a/app/models/preview_card.rb b/app/models/preview_card.rb
index a792b352b..9d6c1938a 100644
--- a/app/models/preview_card.rb
+++ b/app/models/preview_card.rb
@@ -43,6 +43,8 @@ class PreviewCard < ApplicationRecord
   validates_attachment_size :image, less_than: LIMIT
   remotable_attachment :image, LIMIT
 
+  scope :cached, -> { where.not(image_file_name: [nil, '']) }
+
   before_save :extract_dimensions, if: :link?
 
   def save_with_optional_image!
diff --git a/app/workers/maintenance/destroy_media_worker.rb b/app/workers/maintenance/destroy_media_worker.rb
deleted file mode 100644
index cde33d6d7..000000000
--- a/app/workers/maintenance/destroy_media_worker.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-class Maintenance::DestroyMediaWorker
-  include Sidekiq::Worker
-
-  sidekiq_options queue: 'pull'
-
-  def perform(media_attachment_id)
-    media = media_attachment_id.is_a?(MediaAttachment) ? media_attachment_id : MediaAttachment.find(media_attachment_id)
-    media.destroy
-  rescue ActiveRecord::RecordNotFound
-    true
-  end
-end
diff --git a/app/workers/maintenance/redownload_account_media_worker.rb b/app/workers/maintenance/redownload_account_media_worker.rb
deleted file mode 100644
index 6afbe6e19..000000000
--- a/app/workers/maintenance/redownload_account_media_worker.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-class Maintenance::RedownloadAccountMediaWorker
-  include Sidekiq::Worker
-
-  sidekiq_options queue: 'pull', retry: false
-
-  def perform(account_id)
-    account = account_id.is_a?(Account) ? account_id : Account.find(account_id)
-    account.reset_avatar!
-    account.reset_header!
-    account.save
-  rescue ActiveRecord::RecordNotFound
-    true
-  end
-end
diff --git a/app/workers/maintenance/uncache_media_worker.rb b/app/workers/maintenance/uncache_media_worker.rb
deleted file mode 100644
index 4bc62ef75..000000000
--- a/app/workers/maintenance/uncache_media_worker.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-class Maintenance::UncacheMediaWorker
-  include Sidekiq::Worker
-
-  sidekiq_options queue: 'pull'
-
-  def perform(media_attachment_id)
-    media = media_attachment_id.is_a?(MediaAttachment) ? media_attachment_id : MediaAttachment.find(media_attachment_id)
-
-    return if media.file.blank?
-
-    media.file.destroy
-    media.save
-  rescue ActiveRecord::RecordNotFound
-    true
-  end
-end
diff --git a/app/workers/maintenance/uncache_preview_worker.rb b/app/workers/maintenance/uncache_preview_worker.rb
deleted file mode 100644
index 810ffd8cc..000000000
--- a/app/workers/maintenance/uncache_preview_worker.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-class Maintenance::UncachePreviewWorker
-  include Sidekiq::Worker
-
-  sidekiq_options queue: 'pull'
-
-  def perform(preview_card_id)
-    preview_card = PreviewCard.find(preview_card_id)
-
-    return if preview_card.image.blank?
-
-    preview_card.image.destroy
-    preview_card.save
-  rescue ActiveRecord::RecordNotFound
-    true
-  end
-end
diff --git a/lib/mastodon/accounts_cli.rb b/lib/mastodon/accounts_cli.rb
index fb1aca639..423980fc0 100644
--- a/lib/mastodon/accounts_cli.rb
+++ b/lib/mastodon/accounts_cli.rb
@@ -7,6 +7,8 @@ require_relative 'cli_helper'
 
 module Mastodon
   class AccountsCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
@@ -26,18 +28,20 @@ module Mastodon
       if options[:all]
         processed = 0
         delay     = 0
+        scope     = Account.local.without_suspended
+        progress  = create_progress_bar(scope.count)
 
-        Account.local.without_suspended.find_in_batches do |accounts|
+        scope.find_in_batches do |accounts|
           accounts.each do |account|
             rotate_keys_for_account(account, delay)
+            progress.increment
             processed += 1
-            say('.', :green, false)
           end
 
           delay += 5.minutes
         end
 
-        say
+        progress.finish
         say("OK, rotated keys for #{processed} accounts", :green)
       elsif username.present?
         rotate_keys_for_account(Account.find_local(username))
@@ -206,6 +210,8 @@ module Mastodon
       say('OK', :green)
     end
 
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
     option :dry_run, type: :boolean
     desc 'cull', 'Remove remote accounts that no longer exist'
     long_desc <<-LONG_DESC
@@ -215,63 +221,45 @@ module Mastodon
 
       Accounts that have had confirmed activity within the last week
       are excluded from the checks.
-
-      Domains that are unreachable are not checked.
-
-      With the --dry-run option, no deletes will actually be carried
-      out.
     LONG_DESC
     def cull
       skip_threshold = 7.days.ago
-      culled         = 0
-      dry_run_culled = []
-      skip_domains   = Set.new
       dry_run        = options[:dry_run] ? ' (DRY RUN)' : ''
+      skip_domains   = Concurrent::Set.new
 
-      Account.remote.where(protocol: :activitypub).partitioned.find_each do |account|
-        next if account.updated_at >= skip_threshold || (account.last_webfingered_at.present? && account.last_webfingered_at >= skip_threshold)
+      processed, culled = parallelize_with_progress(Account.remote.where(protocol: :activitypub).partitioned) do |account|
+        next if account.updated_at >= skip_threshold || (account.last_webfingered_at.present? && account.last_webfingered_at >= skip_threshold) || skip_domains.include?(account.domain)
 
         code = 0
-        unless skip_domains.include?(account.domain)
-          begin
-            code = Request.new(:head, account.uri).perform(&:code)
-          rescue HTTP::ConnectionError
-            skip_domains << account.domain
-          rescue StandardError
-            next
-          end
+
+        begin
+          code = Request.new(:head, account.uri).perform(&:code)
+        rescue HTTP::ConnectionError
+          skip_domains << account.domain
         end
 
         if [404, 410].include?(code)
-          if options[:dry_run]
-            dry_run_culled << account.acct
-          else
-            SuspendAccountService.new.call(account, destroy: true)
-          end
-          culled += 1
-          say('+', :green, false)
+          SuspendAccountService.new.call(account, destroy: true) unless options[:dry_run]
+          1
         else
-          account.touch # Touch account even during dry run to avoid getting the account into the window again
-          say('.', nil, false)
+          # Touch account even during dry run to avoid getting the account into the window again
+          account.touch
         end
       end
 
-      say
-      say("Removed #{culled} accounts. #{skip_domains.size} servers skipped#{dry_run}", skip_domains.empty? ? :green : :yellow)
+      say("Visited #{processed} accounts, removed #{culled}#{dry_run}", :green)
 
       unless skip_domains.empty?
-        say('The following servers were not available during the check:', :yellow)
+        say('The following domains were not available during the check:', :yellow)
         skip_domains.each { |domain| say('    ' + domain) }
       end
-
-      unless dry_run_culled.empty?
-        say('The following accounts would have been deleted:', :green)
-        dry_run_culled.each { |account| say('    ' + account) }
-      end
     end
 
     option :all, type: :boolean
     option :domain
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    option :dry_run, type: :boolean
     desc 'refresh [USERNAME]', 'Fetch remote user data and files'
     long_desc <<-LONG_DESC
       Fetch remote user data and files for one or multiple accounts.
@@ -280,21 +268,23 @@ module Mastodon
       Through the --domain option, this can be narrowed down to a
       specific domain only. Otherwise, a single remote account must
       be specified with USERNAME.
-
-      All processing is done in the background through Sidekiq.
     LONG_DESC
     def refresh(username = nil)
+      dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
+
       if options[:domain] || options[:all]
-        queued = 0
         scope  = Account.remote
         scope  = scope.where(domain: options[:domain]) if options[:domain]
 
-        scope.select(:id).reorder(nil).find_in_batches do |accounts|
-          Maintenance::RedownloadAccountMediaWorker.push_bulk(accounts.map(&:id))
-          queued += accounts.size
+        processed, = parallelize_with_progress(scope) do |account|
+          next if options[:dry_run]
+
+          account.reset_avatar!
+          account.reset_header!
+          account.save
         end
 
-        say("Scheduled refreshment of #{queued} accounts", :green, true)
+        say("Refreshed #{processed} accounts#{dry_run}", :green, true)
       elsif username.present?
         username, domain = username.split('@')
         account = Account.find_remote(username, domain)
@@ -304,72 +294,53 @@ module Mastodon
           exit(1)
         end
 
-        Maintenance::RedownloadAccountMediaWorker.perform_async(account.id)
-        say('OK', :green)
+        unless options[:dry_run]
+          account.reset_avatar!
+          account.reset_header!
+          account.save
+        end
+
+        say("OK#{dry_run}", :green)
       else
         say('No account(s) given', :red)
         exit(1)
       end
     end
 
-    desc 'follow ACCT', 'Make all local accounts follow account specified by ACCT'
-    long_desc <<-LONG_DESC
-      Make all local accounts follow an account specified by ACCT. ACCT can be
-      a simple username, in case of a local user. It can also be in the format
-      username@domain, in case of a remote user.
-    LONG_DESC
-    def follow(acct)
-      target_account = ResolveAccountService.new.call(acct)
-      processed      = 0
-      failed         = 0
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    desc 'follow USERNAME', 'Make all local accounts follow account specified by USERNAME'
+    def follow(username)
+      target_account = Account.find_local(username)
 
       if target_account.nil?
-        say("Target account (#{acct}) could not be resolved", :red)
+        say('No such account', :red)
         exit(1)
       end
 
-      Account.local.without_suspended.find_each do |account|
-        begin
-          FollowService.new.call(account, target_account)
-          processed += 1
-          say('.', :green, false)
-        rescue StandardError
-          failed += 1
-          say('.', :red, false)
-        end
+      processed, = parallelize_with_progress(Account.local.without_suspended) do |account|
+        FollowService.new.call(account, target_account)
       end
 
-      say("OK, followed target from #{processed} accounts, skipped #{failed}", :green)
+      say("OK, followed target from #{processed} accounts", :green)
     end
 
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
     desc 'unfollow ACCT', 'Make all local accounts unfollow account specified by ACCT'
-    long_desc <<-LONG_DESC
-      Make all local accounts unfollow an account specified by ACCT. ACCT can be
-      a simple username, in case of a local user. It can also be in the format
-      username@domain, in case of a remote user.
-    LONG_DESC
     def unfollow(acct)
       target_account = Account.find_remote(*acct.split('@'))
-      processed      = 0
-      failed         = 0
 
       if target_account.nil?
-        say("Target account (#{acct}) was not found", :red)
+        say('No such account', :red)
         exit(1)
       end
 
-      target_account.followers.local.find_each do |account|
-        begin
-          UnfollowService.new.call(account, target_account)
-          processed += 1
-          say('.', :green, false)
-        rescue StandardError
-          failed += 1
-          say('.', :red, false)
-        end
+      parallelize_with_progress(target_account.followers.local) do |account|
+        UnfollowService.new.call(account, target_account)
       end
 
-      say("OK, unfollowed target from #{processed} accounts, skipped #{failed}", :green)
+      say("OK, unfollowed target from #{processed} accounts", :green)
     end
 
     option :follows, type: :boolean, default: false
@@ -392,51 +363,50 @@ module Mastodon
       account = Account.find_local(username)
 
       if account.nil?
-        say('No user with such username', :red)
+        say('No such account', :red)
         exit(1)
       end
 
-      if options[:follows]
-        processed = 0
-        failed    = 0
+      total     = 0
+      total    += Account.where(id: ::Follow.where(account: account).select(:target_account_id)).count if options[:follows]
+      total    += Account.where(id: ::Follow.where(target_account: account).select(:account_id)).count if options[:followers]
+      progress  = create_progress_bar(total)
+      processed = 0
 
-        say("Unfollowing #{account.username}'s followees, this might take a while...")
+      if options[:follows]
+        scope = Account.where(id: ::Follow.where(account: account).select(:target_account_id))
 
-        Account.where(id: ::Follow.where(account: account).select(:target_account_id)).find_each do |target_account|
+        scope.find_each do |target_account|
           begin
             UnfollowService.new.call(account, target_account)
+          rescue => e
+            progress.log pastel.red("Error processing #{target_account.id}: #{e}")
+          ensure
+            progress.increment
             processed += 1
-            say('.', :green, false)
-          rescue StandardError
-            failed += 1
-            say('.', :red, false)
           end
         end
 
         BootstrapTimelineWorker.perform_async(account.id)
-
-        say("OK, unfollowed #{processed} followees, skipped #{failed}", :green)
       end
 
       if options[:followers]
-        processed = 0
-        failed    = 0
-
-        say("Removing #{account.username}'s followers, this might take a while...")
+        scope = Account.where(id: ::Follow.where(target_account: account).select(:account_id))
 
-        Account.where(id: ::Follow.where(target_account: account).select(:account_id)).find_each do |target_account|
+        scope.find_each do |target_account|
           begin
             UnfollowService.new.call(target_account, account)
+          rescue => e
+            progress.log pastel.red("Error processing #{target_account.id}: #{e}")
+          ensure
+            progress.increment
             processed += 1
-            say('.', :green, false)
-          rescue StandardError
-            failed += 1
-            say('.', :red, false)
           end
         end
-
-        say("OK, removed #{processed} followers, skipped #{failed}", :green)
       end
+
+      progress.finish
+      say("Processed #{processed} relationships", :green, true)
     end
 
     option :number, type: :numeric, aliases: [:n]
diff --git a/lib/mastodon/cache_cli.rb b/lib/mastodon/cache_cli.rb
index e9b6667b3..803404c34 100644
--- a/lib/mastodon/cache_cli.rb
+++ b/lib/mastodon/cache_cli.rb
@@ -6,6 +6,8 @@ require_relative 'cli_helper'
 
 module Mastodon
   class CacheCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
@@ -15,5 +17,44 @@ module Mastodon
       Rails.cache.clear
       say('OK', :green)
     end
+
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    desc 'recount TYPE', 'Update hard-cached counters'
+    long_desc <<~LONG_DESC
+      Update hard-cached counters of TYPE by counting referenced
+      records from scratch. TYPE can be "accounts" or "statuses".
+
+      It may take a very long time to finish, depending on the
+      size of the database.
+    LONG_DESC
+    def recount(type)
+      case type
+      when 'accounts'
+        processed, = parallelize_with_progress(Account.local.includes(:account_stat)) do |account|
+          account_stat                 = account.account_stat
+          account_stat.following_count = account.active_relationships.count
+          account_stat.followers_count = account.passive_relationships.count
+          account_stat.statuses_count  = account.statuses.where.not(visibility: :direct).count
+
+          account_stat.save if account_stat.changed?
+        end
+      when 'statuses'
+        processed, = parallelize_with_progress(Status.includes(:status_stat)) do |status|
+          status_stat                  = status.status_stat
+          status_stat.replies_count    = status.replies.where.not(visibility: :direct).count
+          status_stat.reblogs_count    = status.reblogs.count
+          status_stat.favourites_count = status.favourites.count
+
+          status_stat.save if status_stat.changed?
+        end
+      else
+        say("Unknown type: #{type}", :red)
+        exit(1)
+      end
+
+      say
+      say("OK, recounted #{processed} records", :green)
+    end
   end
 end
diff --git a/lib/mastodon/cli_helper.rb b/lib/mastodon/cli_helper.rb
index 2f807d08c..da7348349 100644
--- a/lib/mastodon/cli_helper.rb
+++ b/lib/mastodon/cli_helper.rb
@@ -7,3 +7,52 @@ ActiveRecord::Base.logger    = dev_null
 ActiveJob::Base.logger       = dev_null
 HttpLog.configuration.logger = dev_null
 Paperclip.options[:log]      = false
+
+module Mastodon
+  module CLIHelper
+    def create_progress_bar(total = nil)
+      ProgressBar.create(total: total, format: '%c/%u |%b%i| %e')
+    end
+
+    def parallelize_with_progress(scope)
+      ActiveRecord::Base.configurations[Rails.env]['pool'] = options[:concurrency]
+
+      progress  = create_progress_bar(scope.count)
+      pool      = Concurrent::FixedThreadPool.new(options[:concurrency])
+      total     = Concurrent::AtomicFixnum.new(0)
+      aggregate = Concurrent::AtomicFixnum.new(0)
+
+      scope.reorder(nil).find_in_batches do |items|
+        futures = []
+
+        items.each do |item|
+          futures << Concurrent::Future.execute(executor: pool) do
+            ActiveRecord::Base.connection_pool.with_connection do
+              begin
+                progress.log("Processing #{item.id}") if options[:verbose]
+
+                result = yield(item)
+                aggregate.increment(result) if result.is_a?(Integer)
+              rescue => e
+                progress.log pastel.red("Error processing #{item.id}: #{e}")
+              ensure
+                progress.increment
+              end
+            end
+          end
+        end
+
+        total.increment(items.size)
+        futures.map(&:value)
+      end
+
+      progress.finish
+
+      [total.value, aggregate.value]
+    end
+
+    def pastel
+      @pastel ||= Pastel.new
+    end
+  end
+end
diff --git a/lib/mastodon/domains_cli.rb b/lib/mastodon/domains_cli.rb
index 17cafd1bc..c612c2d72 100644
--- a/lib/mastodon/domains_cli.rb
+++ b/lib/mastodon/domains_cli.rb
@@ -7,10 +7,14 @@ require_relative 'cli_helper'
 
 module Mastodon
   class DomainsCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
 
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
     option :dry_run, type: :boolean
     option :whitelist_mode, type: :boolean
     desc 'purge [DOMAIN]', 'Remove accounts from a DOMAIN without a trace'
@@ -24,7 +28,6 @@ module Mastodon
       are removed from the database.
     LONG_DESC
     def purge(domain = nil)
-      removed = 0
       dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
 
       scope = begin
@@ -38,25 +41,22 @@ module Mastodon
         end
       end
 
-      scope.find_each do |account|
+      processed, = parallelize_with_progress(scope) do |account|
         SuspendAccountService.new.call(account, destroy: true) unless options[:dry_run]
-        removed += 1
-        say('.', :green, false)
       end
 
       DomainBlock.where(domain: domain).destroy_all unless options[:dry_run]
 
-      say
-      say("Removed #{removed} accounts#{dry_run}", :green)
+      say("Removed #{processed} accounts#{dry_run}", :green)
 
       custom_emojis = CustomEmoji.where(domain: domain)
       custom_emojis_count = custom_emojis.count
       custom_emojis.destroy_all unless options[:dry_run]
+
       say("Removed #{custom_emojis_count} custom emojis", :green)
     end
 
     option :concurrency, type: :numeric, default: 50, aliases: [:c]
-    option :silent, type: :boolean, default: false, aliases: [:s]
     option :format, type: :string, default: 'summary', aliases: [:f]
     option :exclude_suspended, type: :boolean, default: false, aliases: [:x]
     desc 'crawl [START]', 'Crawl all known peers, optionally beginning at START'
@@ -69,8 +69,6 @@ module Mastodon
       The --concurrency (-c) option controls the number of threads performing HTTP
       requests at the same time. More threads means the crawl may complete faster.
 
-      The --silent (-s) option controls progress output.
-
       The --format (-f) option controls how the data is displayed at the end. By
       default (`summary`), a summary of the statistics is returned. The other options
       are `domains`, which returns a newline-delimited list of all discovered peers,
@@ -87,6 +85,7 @@ module Mastodon
       start_at        = Time.now.to_f
       seed            = start ? [start] : Account.remote.domains
       blocked_domains = Regexp.new('\\.?' + DomainBlock.where(severity: 1).pluck(:domain).join('|') + '$')
+      progress        = create_progress_bar
 
       pool = Concurrent::ThreadPoolExecutor.new(min_threads: 0, max_threads: options[:concurrency], idletime: 10, auto_terminate: true, max_queue: 0)
 
@@ -95,7 +94,6 @@ module Mastodon
         next if options[:exclude_suspended] && domain.match(blocked_domains)
 
         stats[domain] = nil
-        processed.increment
 
         begin
           Request.new(:get, "https://#{domain}/api/v1/instance").perform do |res|
@@ -115,11 +113,11 @@ module Mastodon
             next unless res.code == 200
             stats[domain]['activity'] = Oj.load(res.to_s)
           end
-
-          say('.', :green, false) unless options[:silent]
         rescue StandardError
           failed.increment
-          say('.', :red, false) unless options[:silent]
+        ensure
+          processed.increment
+          progress.increment unless progress.finished?
         end
       end
 
@@ -133,10 +131,9 @@ module Mastodon
       pool.shutdown
       pool.wait_for_termination(20)
     ensure
+      progress.finish
       pool.shutdown
 
-      say unless options[:silent]
-
       case options[:format]
       when 'summary'
         stats_to_summary(stats, processed, failed, start_at)
diff --git a/lib/mastodon/feeds_cli.rb b/lib/mastodon/feeds_cli.rb
index fe11c3df4..ea7c90dff 100644
--- a/lib/mastodon/feeds_cli.rb
+++ b/lib/mastodon/feeds_cli.rb
@@ -6,55 +6,33 @@ require_relative 'cli_helper'
 
 module Mastodon
   class FeedsCLI < Thor
+    include CLIHelper
+
     def self.exit_on_failure?
       true
     end
 
     option :all, type: :boolean, default: false
-    option :background, type: :boolean, default: false
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
     option :dry_run, type: :boolean, default: false
-    option :verbose, type: :boolean, default: false
     desc 'build [USERNAME]', 'Build home and list feeds for one or all users'
     long_desc <<-LONG_DESC
       Build home and list feeds that are stored in Redis from the database.
 
       With the --all option, all active users will be processed.
       Otherwise, a single user specified by USERNAME.
-
-      With the --background option, regeneration will be queued into Sidekiq,
-      and the command will exit as soon as possible.
-
-      With the --dry-run option, no work will be done.
-
-      With the --verbose option, when accounts are processed sequentially in the
-      foreground, the IDs of the accounts will be printed.
     LONG_DESC
     def build(username = nil)
       dry_run = options[:dry_run] ? '(DRY RUN)' : ''
 
       if options[:all] || username.nil?
-        processed = 0
-        queued    = 0
 
-        User.active.select(:id, :account_id).reorder(nil).find_in_batches do |users|
-          if options[:background]
-            RegenerationWorker.push_bulk(users.map(&:account_id)) unless options[:dry_run]
-            queued += users.size
-          else
-            users.each do |user|
-              RegenerationWorker.new.perform(user.account_id) unless options[:dry_run]
-              options[:verbose] ? say(user.account_id) : say('.', :green, false)
-              processed += 1
-            end
-          end
+        processed, = parallelize_with_progress(Account.joins(:user).merge(User.active)) do |account|
+          PrecomputeFeedService.new.call(account) unless options[:dry_run]
         end
 
-        if options[:background]
-          say("Scheduled feed regeneration for #{queued} accounts #{dry_run}", :green, true)
-        else
-          say
-          say("Regenerated feeds for #{processed} accounts #{dry_run}", :green, true)
-        end
+        say("Regenerated feeds for #{processed} accounts #{dry_run}", :green, true)
       elsif username.present?
         account = Account.find_local(username)
 
@@ -63,11 +41,7 @@ module Mastodon
           exit(1)
         end
 
-        if options[:background]
-          RegenerationWorker.perform_async(account.id) unless options[:dry_run]
-        else
-          RegenerationWorker.new.perform(account.id) unless options[:dry_run]
-        end
+        PrecomputeFeedService.new.call(account) unless options[:dry_run]
 
         say("OK #{dry_run}", :green, true)
       else
diff --git a/lib/mastodon/media_cli.rb b/lib/mastodon/media_cli.rb
index 08e646d4a..0659b6b65 100644
--- a/lib/mastodon/media_cli.rb
+++ b/lib/mastodon/media_cli.rb
@@ -7,14 +7,15 @@ require_relative 'cli_helper'
 module Mastodon
   class MediaCLI < Thor
     include ActionView::Helpers::NumberHelper
+    include CLIHelper
 
     def self.exit_on_failure?
       true
     end
 
-    option :days, type: :numeric, default: 7
-    option :background, type: :boolean, default: false
-    option :verbose, type: :boolean, default: false
+    option :days, type: :numeric, default: 7, aliases: [:d]
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, default: false, aliases: [:v]
     option :dry_run, type: :boolean, default: false
     desc 'remove', 'Remove remote media files'
     long_desc <<-DESC
@@ -42,141 +43,5 @@ module Mastodon
 
       say("Removed #{processed} media attachments (approx. #{number_to_human_size(aggregate)}) #{dry_run}", :green, true)
     end
-
-    option :start_after
-    option :dry_run, type: :boolean, default: false
-    desc 'remove-orphans', 'Scan storage and check for files that do not belong to existing media attachments'
-    long_desc <<~LONG_DESC
-      Scans file storage for files that do not belong to existing media attachments. Because this operation
-      requires iterating over every single file individually, it will be slow.
-
-      Please mind that some storage providers charge for the necessary API requests to list objects.
-    LONG_DESC
-    def remove_orphans
-      progress        = create_progress_bar(nil)
-      reclaimed_bytes = 0
-      removed         = 0
-      dry_run         = options[:dry_run] ? ' (DRY RUN)' : ''
-
-      case Paperclip::Attachment.default_options[:storage]
-      when :s3
-        paperclip_instance = MediaAttachment.new.file
-        s3_interface       = paperclip_instance.s3_interface
-        bucket             = s3_interface.bucket(Paperclip::Attachment.default_options[:s3_credentials][:bucket])
-        last_key           = options[:start_after]
-
-        loop do
-          objects = bucket.objects(start_after: last_key, prefix: 'media_attachments/files/').limit(1000).map { |x| x }
-
-          break if objects.empty?
-
-          last_key        = objects.last.key
-          attachments_map = MediaAttachment.where(id: objects.map { |object| object.key.split('/')[2..-2].join.to_i }).each_with_object({}) { |attachment, map| map[attachment.id] = attachment }
-
-          objects.each do |object|
-            attachment_id = object.key.split('/')[2..-2].join.to_i
-            filename      = object.key.split('/').last
-
-            progress.increment
-
-            next unless attachments_map[attachment_id].nil? || !attachments_map[attachment_id].variant?(filename)
-
-            reclaimed_bytes += object.size
-            removed += 1
-            object.delete unless options[:dry_run]
-            progress.log("Found and removed orphan: #{object.key}")
-          end
-        end
-      when :fog
-        say('The fog storage driver is not supported for this operation at this time', :red)
-        exit(1)
-      when :filesystem
-        require 'find'
-
-        root_path = ENV.fetch('RAILS_ROOT_PATH', File.join(':rails_root', 'public', 'system')).gsub(':rails_root', Rails.root.to_s)
-
-        Find.find(File.join(root_path, 'media_attachments', 'files')) do |path|
-          next if File.directory?(path)
-
-          key           = path.gsub("#{root_path}#{File::SEPARATOR}", '')
-          attachment_id = key.split(File::SEPARATOR)[2..-2].join.to_i
-          filename      = key.split(File::SEPARATOR).last
-          attachment    = MediaAttachment.find_by(id: attachment_id)
-
-          progress.increment
-
-          next unless attachment.nil? || !attachment.variant?(filename)
-
-          reclaimed_bytes += File.size(path)
-          removed += 1
-          File.delete(path) unless options[:dry_run]
-          progress.log("Found and removed orphan: #{key}")
-        end
-      end
-
-      progress.total = progress.progress
-      progress.finish
-
-      say("Removed #{removed} orphans (approx. #{number_to_human_size(reclaimed_bytes)})#{dry_run}", :green, true)
-    end
-
-    option :account, type: :string
-    option :domain, type: :string
-    option :status, type: :numeric
-    option :concurrency, type: :numeric, default: 5, aliases: [:c]
-    option :verbose, type: :boolean, default: false, aliases: [:v]
-    option :dry_run, type: :boolean, default: false
-    option :force, type: :boolean, default: false
-    desc 'refresh', 'Fetch remote media files'
-    long_desc <<-DESC
-      Re-downloads media attachments from other servers. You must specify the
-      source of media attachments with one of the following options:
-
-      Use the --status option to download attachments from a specific status,
-      using the status local numeric ID.
-
-      With the --background option, instead of deleting the files sequentially,
-      they will be queued into Sidekiq and the command will exit as soon as
-      possible. In Sidekiq they will be processed with higher concurrency, but
-      it may impact other operations of the Mastodon server, and it may overload
-      the underlying file storage.
-
-      With the --dry-run option, no work will be done.
-
-      With the --verbose option, when media attachments are processed sequentially in the
-      foreground, the IDs of the media attachments will be printed.
-    DESC
-    def remove
-      time_ago  = options[:days].days.ago
-      queued    = 0
-      processed = 0
-      size      = 0
-      dry_run   = options[:dry_run] ? '(DRY RUN)' : ''
-
-      if options[:background]
-        MediaAttachment.where.not(remote_url: '').where.not(file_file_name: nil).where('created_at < ?', time_ago).select(:id, :file_file_size).reorder(nil).find_in_batches do |media_attachments|
-          queued += media_attachments.size
-          size   += media_attachments.reduce(0) { |sum, m| sum + (m.file_file_size || 0) }
-          Maintenance::UncacheMediaWorker.push_bulk(media_attachments.map(&:id)) unless options[:dry_run]
-        end
-      else
-        MediaAttachment.where.not(remote_url: '').where.not(file_file_name: nil).where('created_at < ?', time_ago).reorder(nil).find_in_batches do |media_attachments|
-          media_attachments.each do |m|
-            size += m.file_file_size || 0
-            Maintenance::UncacheMediaWorker.new.perform(m) unless options[:dry_run]
-            options[:verbose] ? say(m.id) : say('.', :green, false)
-            processed += 1
-          end
-        end
-      end
-
-      say
-
-      if options[:background]
-        say("Scheduled the deletion of #{queued} media attachments (approx. #{number_to_human_size(size)}) #{dry_run}", :green, true)
-      else
-        say("Removed #{processed} media attachments (approx. #{number_to_human_size(size)}) #{dry_run}", :green, true)
-      end
-    end
   end
 end
diff --git a/lib/mastodon/preview_cards_cli.rb b/lib/mastodon/preview_cards_cli.rb
new file mode 100644
index 000000000..cf4407250
--- /dev/null
+++ b/lib/mastodon/preview_cards_cli.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'tty-prompt'
+require_relative '../../config/boot'
+require_relative '../../config/environment'
+require_relative 'cli_helper'
+
+module Mastodon
+  class PreviewCardsCLI < Thor
+    include ActionView::Helpers::NumberHelper
+    include CLIHelper
+
+    def self.exit_on_failure?
+      true
+    end
+
+    option :days, type: :numeric, default: 180
+    option :concurrency, type: :numeric, default: 5, aliases: [:c]
+    option :verbose, type: :boolean, aliases: [:v]
+    option :dry_run, type: :boolean, default: false
+    option :link, type: :boolean, default: false
+    desc 'remove', 'Remove preview cards'
+    long_desc <<-DESC
+      Removes local thumbnails for preview cards.
+
+      The --days option specifies how old preview cards have to be before
+      they are removed. It defaults to 180 days. Since preview cards will
+      not be re-fetched unless the link is re-posted after 2 weeks from
+      last time, it is not recommended to delete preview cards within the
+      last 14 days.
+
+      With the --link option, only link-type preview cards will be deleted,
+      leaving video and photo cards untouched.
+    DESC
+    def remove
+      time_ago = options[:days].days.ago
+      dry_run  = options[:dry_run] ? ' (DRY RUN)' : ''
+      link     = options[:link] ? 'link-type ' : ''
+      scope    = PreviewCard.cached
+      scope    = scope.where(type: :link) if options[:link]
+      scope    = scope.where('updated_at < ?', time_ago)
+
+      processed, aggregate = parallelize_with_progress(scope) do |preview_card|
+        next if preview_card.image.blank?
+
+        size = preview_card.image_file_size
+
+        unless options[:dry_run]
+          preview_card.image.destroy
+          preview_card.save
+        end
+
+        size
+      end
+
+      say("Removed #{processed} #{link}preview cards (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
+    end
+  end
+end