about summary refs log tree commit diff
path: root/app/services/batched_remove_status_service.rb
diff options
context:
space:
mode:
Diffstat (limited to 'app/services/batched_remove_status_service.rb')
-rw-r--r--app/services/batched_remove_status_service.rb24
1 files changed, 7 insertions, 17 deletions
diff --git a/app/services/batched_remove_status_service.rb b/app/services/batched_remove_status_service.rb
index 3ec000110..61617d958 100644
--- a/app/services/batched_remove_status_service.rb
+++ b/app/services/batched_remove_status_service.rb
@@ -8,7 +8,7 @@ class BatchedRemoveStatusService < BaseService
   # @param [Hash] options
   # @option [Boolean] :skip_side_effects Do not modify feeds and send updates to streaming API
   def call(statuses, **options)
-    ActiveRecord::Associations::Preloader.new.preload(statuses, options[:skip_side_effects] ? :reblogs : [:account, reblogs: :account])
+    ActiveRecord::Associations::Preloader.new.preload(statuses, options[:skip_side_effects] ? :reblogs : [:account, :tags, reblogs: :account])
 
     statuses_and_reblogs = statuses.flat_map { |status| [status] + status.reblogs }
 
@@ -27,7 +27,7 @@ class BatchedRemoveStatusService < BaseService
     # transaction lock the database, but we use the delete method instead
     # of destroy to avoid all callbacks. We rely on foreign keys to
     # cascade the delete faster without loading the associations.
-    statuses_and_reblogs.each(&:delete)
+    statuses_and_reblogs.each_slice(50) { |slice| Status.where(id: slice.map(&:id)).delete_all }
 
     # Since we skipped all callbacks, we also need to manually
     # deindex the statuses
@@ -35,11 +35,6 @@ class BatchedRemoveStatusService < BaseService
 
     return if options[:skip_side_effects]
 
-    ActiveRecord::Associations::Preloader.new.preload(statuses_and_reblogs, :tags)
-
-    @tags          = statuses_and_reblogs.each_with_object({}) { |s, h| h[s.id] = s.tags.map { |tag| tag.name.mb_chars.downcase } }
-    @json_payloads = statuses_and_reblogs.each_with_object({}) { |s, h| h[s.id] = Oj.dump(event: :delete, payload: s.id.to_s) }
-
     # Batch by source account
     statuses_and_reblogs.group_by(&:account_id).each_value do |account_statuses|
       account = account_statuses.first.account
@@ -51,8 +46,9 @@ class BatchedRemoveStatusService < BaseService
     end
 
     # Cannot be batched
+    @status_id_cutoff = Mastodon::Snowflake.id_at(2.weeks.ago)
     redis.pipelined do
-      statuses_and_reblogs.each do |status|
+      statuses.each do |status|
         unpush_from_public_timelines(status)
       end
     end
@@ -66,12 +62,6 @@ class BatchedRemoveStatusService < BaseService
         FeedManager.instance.unpush_from_home(follower, status)
       end
     end
-
-    return unless account.local?
-
-    statuses.each do |status|
-      FeedManager.instance.unpush_from_home(account, status)
-    end
   end
 
   def unpush_from_list_timelines(account, statuses)
@@ -83,9 +73,9 @@ class BatchedRemoveStatusService < BaseService
   end
 
   def unpush_from_public_timelines(status)
-    return unless status.public_visibility?
+    return unless status.public_visibility? && status.id > @status_id_cutoff
 
-    payload = @json_payloads[status.id]
+    payload = Oj.dump(event: :delete, payload: status.id.to_s)
 
     redis.publish('timeline:public', payload)
     redis.publish(status.local? ? 'timeline:public:local' : 'timeline:public:remote', payload)
@@ -95,7 +85,7 @@ class BatchedRemoveStatusService < BaseService
       redis.publish(status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', payload)
     end
 
-    @tags[status.id].each do |hashtag|
+    status.tags.map { |tag| tag.name.mb_chars.downcase }.each do |hashtag|
       redis.publish("timeline:hashtag:#{hashtag}", payload)
       redis.publish("timeline:hashtag:#{hashtag}:local", payload) if status.local?
     end