diff options
author | Starfall <us@starfall.systems> | 2022-01-31 12:50:14 -0600 |
---|---|---|
committer | Starfall <us@starfall.systems> | 2022-01-31 12:50:14 -0600 |
commit | 17265f47f8f931e70699088dd8bd2a1c7b78112b (patch) | |
tree | a1dde2630cd8e481cc4c5d047c4af241a251def0 /app/services | |
parent | 129962006c2ebcd195561ac556887dc87d32081c (diff) | |
parent | d6f3261c6cb810ea4eb6f74b9ee62af0d94cbd52 (diff) |
Merge branch 'glitchsoc'
Diffstat (limited to 'app/services')
25 files changed, 606 insertions, 288 deletions
diff --git a/app/services/account_statuses_cleanup_service.rb b/app/services/account_statuses_cleanup_service.rb new file mode 100644 index 000000000..3918b5ba4 --- /dev/null +++ b/app/services/account_statuses_cleanup_service.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +class AccountStatusesCleanupService < BaseService + # @param [AccountStatusesCleanupPolicy] account_policy + # @param [Integer] budget + # @return [Integer] + def call(account_policy, budget = 50) + return 0 unless account_policy.enabled? + + cutoff_id = account_policy.compute_cutoff_id + return 0 if cutoff_id.blank? + + num_deleted = 0 + last_deleted = nil + + account_policy.statuses_to_delete(budget, cutoff_id, account_policy.last_inspected).reorder(nil).find_each(order: :asc) do |status| + status.discard + RemovalWorker.perform_async(status.id, { 'redraft' => false }) + num_deleted += 1 + last_deleted = status.id + end + + account_policy.record_last_inspected(last_deleted.presence || cutoff_id) + + num_deleted + end +end diff --git a/app/services/activitypub/fetch_featured_collection_service.rb b/app/services/activitypub/fetch_featured_collection_service.rb index 72352aca6..780741feb 100644 --- a/app/services/activitypub/fetch_featured_collection_service.rb +++ b/app/services/activitypub/fetch_featured_collection_service.rb @@ -23,7 +23,7 @@ class ActivityPub::FetchFeaturedCollectionService < BaseService def process_items(items) status_ids = items.map { |item| value_or_id(item) } - .filter_map { |uri| ActivityPub::FetchRemoteStatusService.new.call(uri) unless ActivityPub::TagManager.instance.local_uri?(uri) } + .filter_map { |uri| ActivityPub::FetchRemoteStatusService.new.call(uri, on_behalf_of: local_follower) unless ActivityPub::TagManager.instance.local_uri?(uri) } .filter_map { |status| status.id if status.account_id == @account.id } to_remove = [] to_add = status_ids @@ -46,4 +46,8 @@ class ActivityPub::FetchFeaturedCollectionService < BaseService def supported_context? super(@json) end + + def local_follower + @local_follower ||= @account.followers.local.without_suspended.first + end end diff --git a/app/services/activitypub/fetch_remote_poll_service.rb b/app/services/activitypub/fetch_remote_poll_service.rb index 1c79ecf11..1829e791c 100644 --- a/app/services/activitypub/fetch_remote_poll_service.rb +++ b/app/services/activitypub/fetch_remote_poll_service.rb @@ -8,6 +8,6 @@ class ActivityPub::FetchRemotePollService < BaseService return unless supported_context?(json) - ActivityPub::ProcessPollService.new.call(poll, json) + ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json) end end diff --git a/app/services/activitypub/fetch_remote_status_service.rb b/app/services/activitypub/fetch_remote_status_service.rb index cf4f62899..4f789d50b 100644 --- a/app/services/activitypub/fetch_remote_status_service.rb +++ b/app/services/activitypub/fetch_remote_status_service.rb @@ -13,7 +13,20 @@ class ActivityPub::FetchRemoteStatusService < BaseService end end - return if !(supported_context? && expected_type?) || actor_id.nil? || !trustworthy_attribution?(@json['id'], actor_id) + return unless supported_context? + + actor_id = nil + activity_json = nil + + if expected_object_type? + actor_id = value_or_id(first_of_value(@json['attributedTo'])) + activity_json = { 'type' => 'Create', 'actor' => actor_id, 'object' => @json } + elsif expected_activity_type? + actor_id = value_or_id(first_of_value(@json['actor'])) + activity_json = @json + end + + return if activity_json.nil? || !trustworthy_attribution?(@json['id'], actor_id) actor = ActivityPub::TagManager.instance.uri_to_resource(actor_id, Account) actor = ActivityPub::FetchRemoteAccountService.new.call(actor_id, id: true) if actor.nil? || needs_update?(actor) @@ -25,14 +38,6 @@ class ActivityPub::FetchRemoteStatusService < BaseService private - def activity_json - { 'type' => 'Create', 'actor' => actor_id, 'object' => @json } - end - - def actor_id - value_or_id(first_of_value(@json['attributedTo'])) - end - def trustworthy_attribution?(uri, attributed_to) return false if uri.nil? || attributed_to.nil? Addressable::URI.parse(uri).normalized_host.casecmp(Addressable::URI.parse(attributed_to).normalized_host).zero? @@ -42,7 +47,11 @@ class ActivityPub::FetchRemoteStatusService < BaseService super(@json) end - def expected_type? + def expected_activity_type? + equals_or_includes_any?(@json['type'], %w(Create Announce)) + end + + def expected_object_type? equals_or_includes_any?(@json['type'], ActivityPub::Activity::Create::SUPPORTED_TYPES + ActivityPub::Activity::Create::CONVERTED_TYPES) end diff --git a/app/services/activitypub/process_account_service.rb b/app/services/activitypub/process_account_service.rb index 4ab6912e5..ec5140720 100644 --- a/app/services/activitypub/process_account_service.rb +++ b/app/services/activitypub/process_account_service.rb @@ -27,7 +27,6 @@ class ActivityPub::ProcessAccountService < BaseService create_account if @account.nil? update_account process_tags - process_attachments process_duplicate_accounts! if @options[:verified_webfinger] else @@ -301,23 +300,6 @@ class ActivityPub::ProcessAccountService < BaseService end end - def process_attachments - return if @json['attachment'].blank? - - previous_proofs = @account.identity_proofs.to_a - current_proofs = [] - - as_array(@json['attachment']).each do |attachment| - next unless equals_or_includes?(attachment['type'], 'IdentityProof') - current_proofs << process_identity_proof(attachment) - end - - previous_proofs.each do |previous_proof| - next if current_proofs.any? { |current_proof| current_proof.id == previous_proof.id } - previous_proof.delete - end - end - def process_emoji(tag) return if skip_download? return if tag['name'].blank? || tag['icon'].blank? || tag['icon']['url'].blank? @@ -334,12 +316,4 @@ class ActivityPub::ProcessAccountService < BaseService emoji.image_remote_url = image_url emoji.save end - - def process_identity_proof(attachment) - provider = attachment['signatureAlgorithm'] - provider_username = attachment['name'] - token = attachment['signatureValue'] - - @account.identity_proofs.where(provider: provider, provider_username: provider_username).find_or_create_by(provider: provider, provider_username: provider_username, token: token) - end end diff --git a/app/services/activitypub/process_poll_service.rb b/app/services/activitypub/process_poll_service.rb deleted file mode 100644 index d83e614d8..000000000 --- a/app/services/activitypub/process_poll_service.rb +++ /dev/null @@ -1,64 +0,0 @@ -# frozen_string_literal: true - -class ActivityPub::ProcessPollService < BaseService - include JsonLdHelper - - def call(poll, json) - @json = json - - return unless expected_type? - - previous_expires_at = poll.expires_at - - expires_at = begin - if @json['closed'].is_a?(String) - @json['closed'] - elsif !@json['closed'].nil? && !@json['closed'].is_a?(FalseClass) - Time.now.utc - else - @json['endTime'] - end - end - - items = begin - if @json['anyOf'].is_a?(Array) - @json['anyOf'] - else - @json['oneOf'] - end - end - - voters_count = @json['votersCount'] - - latest_options = items.filter_map { |item| item['name'].presence || item['content'] } - - # If for some reasons the options were changed, it invalidates all previous - # votes, so we need to remove them - poll.votes.delete_all if latest_options != poll.options - - begin - poll.update!( - last_fetched_at: Time.now.utc, - expires_at: expires_at, - options: latest_options, - cached_tallies: items.map { |item| item.dig('replies', 'totalItems') || 0 }, - voters_count: voters_count - ) - rescue ActiveRecord::StaleObjectError - poll.reload - retry - end - - # If the poll had no expiration date set but now has, and people have voted, - # schedule a notification. - if previous_expires_at.nil? && poll.expires_at.present? && poll.votes.exists? - PollExpirationNotifyWorker.perform_at(poll.expires_at + 5.minutes, poll.id) - end - end - - private - - def expected_type? - equals_or_includes_any?(@json['type'], %w(Question)) - end -end diff --git a/app/services/activitypub/process_status_update_service.rb b/app/services/activitypub/process_status_update_service.rb new file mode 100644 index 000000000..977928127 --- /dev/null +++ b/app/services/activitypub/process_status_update_service.rb @@ -0,0 +1,283 @@ +# frozen_string_literal: true + +class ActivityPub::ProcessStatusUpdateService < BaseService + include JsonLdHelper + + def call(status, json) + @json = json + @status_parser = ActivityPub::Parser::StatusParser.new(@json) + @uri = @status_parser.uri + @status = status + @account = status.account + @media_attachments_changed = false + @poll_changed = false + + # Only native types can be updated at the moment + return if !expected_type? || already_updated_more_recently? + + # Only allow processing one create/update per status at a time + RedisLock.acquire(lock_options) do |lock| + if lock.acquired? + Status.transaction do + create_previous_edit! + update_media_attachments! + update_poll! + update_immediate_attributes! + update_metadata! + create_edit! + end + + queue_poll_notifications! + + next unless significant_changes? + + reset_preview_card! + broadcast_updates! + else + raise Mastodon::RaceConditionError + end + end + end + + private + + def update_media_attachments! + previous_media_attachments = @status.media_attachments.to_a + next_media_attachments = [] + + as_array(@json['attachment']).each do |attachment| + media_attachment_parser = ActivityPub::Parser::MediaAttachmentParser.new(attachment) + + next if media_attachment_parser.remote_url.blank? || next_media_attachments.size > 4 + + begin + media_attachment = previous_media_attachments.find { |previous_media_attachment| previous_media_attachment.remote_url == media_attachment_parser.remote_url } + media_attachment ||= MediaAttachment.new(account: @account, remote_url: media_attachment_parser.remote_url) + + # If a previously existing media attachment was significantly updated, mark + # media attachments as changed even if none were added or removed + if media_attachment_parser.significantly_changes?(media_attachment) + @media_attachments_changed = true + end + + media_attachment.description = media_attachment_parser.description + media_attachment.focus = media_attachment_parser.focus + media_attachment.thumbnail_remote_url = media_attachment_parser.thumbnail_remote_url + media_attachment.blurhash = media_attachment_parser.blurhash + media_attachment.save! + + next_media_attachments << media_attachment + + next if unsupported_media_type?(media_attachment_parser.file_content_type) || skip_download? + + RedownloadMediaWorker.perform_async(media_attachment.id) if media_attachment.remote_url_previously_changed? || media_attachment.thumbnail_remote_url_previously_changed? + rescue Addressable::URI::InvalidURIError => e + Rails.logger.debug "Invalid URL in attachment: #{e}" + end + end + + removed_media_attachments = previous_media_attachments - next_media_attachments + added_media_attachments = next_media_attachments - previous_media_attachments + + MediaAttachment.where(id: removed_media_attachments.map(&:id)).update_all(status_id: nil) + MediaAttachment.where(id: added_media_attachments.map(&:id)).update_all(status_id: @status.id) + + @media_attachments_changed = true if removed_media_attachments.any? || added_media_attachments.any? + end + + def update_poll! + previous_poll = @status.preloadable_poll + @previous_expires_at = previous_poll&.expires_at + poll_parser = ActivityPub::Parser::PollParser.new(@json) + + if poll_parser.valid? + poll = previous_poll || @account.polls.new(status: @status) + + # If for some reasons the options were changed, it invalidates all previous + # votes, so we need to remove them + if poll_parser.significantly_changes?(poll) + @poll_changed = true + poll.votes.delete_all unless poll.new_record? + end + + poll.last_fetched_at = Time.now.utc + poll.options = poll_parser.options + poll.multiple = poll_parser.multiple + poll.expires_at = poll_parser.expires_at + poll.voters_count = poll_parser.voters_count + poll.cached_tallies = poll_parser.cached_tallies + poll.save! + + @status.poll_id = poll.id + elsif previous_poll.present? + previous_poll.destroy! + @poll_changed = true + @status.poll_id = nil + end + end + + def update_immediate_attributes! + @status.text = @status_parser.text || '' + @status.spoiler_text = @status_parser.spoiler_text || '' + @status.sensitive = @account.sensitized? || @status_parser.sensitive || false + @status.language = @status_parser.language || detected_language + @status.edited_at = @status_parser.edited_at || Time.now.utc if significant_changes? + + @status.save! + end + + def update_metadata! + @raw_tags = [] + @raw_mentions = [] + @raw_emojis = [] + + as_array(@json['tag']).each do |tag| + if equals_or_includes?(tag['type'], 'Hashtag') + @raw_tags << tag['name'] + elsif equals_or_includes?(tag['type'], 'Mention') + @raw_mentions << tag['href'] + elsif equals_or_includes?(tag['type'], 'Emoji') + @raw_emojis << tag + end + end + + update_tags! + update_mentions! + update_emojis! + end + + def update_tags! + @status.tags = Tag.find_or_create_by_names(@raw_tags) + end + + def update_mentions! + previous_mentions = @status.active_mentions.includes(:account).to_a + current_mentions = [] + + @raw_mentions.each do |href| + next if href.blank? + + account = ActivityPub::TagManager.instance.uri_to_resource(href, Account) + account ||= ActivityPub::FetchRemoteAccountService.new.call(href) + + next if account.nil? + + mention = previous_mentions.find { |x| x.account_id == account.id } + mention ||= account.mentions.new(status: @status) + + current_mentions << mention + end + + current_mentions.each do |mention| + mention.save if mention.new_record? + end + + # If previous mentions are no longer contained in the text, convert them + # to silent mentions, since withdrawing access from someone who already + # received a notification might be more confusing + removed_mentions = previous_mentions - current_mentions + + Mention.where(id: removed_mentions.map(&:id)).update_all(silent: true) unless removed_mentions.empty? + end + + def update_emojis! + return if skip_download? + + @raw_emojis.each do |raw_emoji| + custom_emoji_parser = ActivityPub::Parser::CustomEmojiParser.new(raw_emoji) + + next if custom_emoji_parser.shortcode.blank? || custom_emoji_parser.image_remote_url.blank? + + emoji = CustomEmoji.find_by(shortcode: custom_emoji_parser.shortcode, domain: @account.domain) + + next unless emoji.nil? || custom_emoji_parser.image_remote_url != emoji.image_remote_url || (custom_emoji_parser.updated_at && custom_emoji_parser.updated_at >= emoji.updated_at) + + begin + emoji ||= CustomEmoji.new(domain: @account.domain, shortcode: custom_emoji_parser.shortcode, uri: custom_emoji_parser.uri) + emoji.image_remote_url = custom_emoji_parser.image_remote_url + emoji.save + rescue Seahorse::Client::NetworkingError => e + Rails.logger.warn "Error storing emoji: #{e}" + end + end + end + + def expected_type? + equals_or_includes_any?(@json['type'], %w(Note Question)) + end + + def lock_options + { redis: Redis.current, key: "create:#{@uri}", autorelease: 15.minutes.seconds } + end + + def detected_language + LanguageDetector.instance.detect(@status_parser.text, @account) + end + + def create_previous_edit! + # We only need to create a previous edit when no previous edits exist, e.g. + # when the status has never been edited. For other cases, we always create + # an edit, so the step can be skipped + + return if @status.edits.any? + + @status.edits.create( + text: @status.text, + spoiler_text: @status.spoiler_text, + media_attachments_changed: false, + account_id: @account.id, + created_at: @status.created_at + ) + end + + def create_edit! + return unless significant_changes? + + @status_edit = @status.edits.create( + text: @status.text, + spoiler_text: @status.spoiler_text, + media_attachments_changed: @media_attachments_changed || @poll_changed, + account_id: @account.id, + created_at: @status.edited_at + ) + end + + def skip_download? + return @skip_download if defined?(@skip_download) + + @skip_download ||= DomainBlock.reject_media?(@account.domain) + end + + def unsupported_media_type?(mime_type) + mime_type.present? && !MediaAttachment.supported_mime_types.include?(mime_type) + end + + def significant_changes? + @status.text_changed? || @status.text_previously_changed? || @status.spoiler_text_changed? || @status.spoiler_text_previously_changed? || @media_attachments_changed || @poll_changed + end + + def already_updated_more_recently? + @status.edited_at.present? && @status_parser.edited_at.present? && @status.edited_at > @status_parser.edited_at + end + + def reset_preview_card! + @status.preview_cards.clear + LinkCrawlWorker.perform_in(rand(1..59).seconds, @status.id) + end + + def broadcast_updates! + ::DistributionWorker.perform_async(@status.id, { 'update' => true }) + end + + def queue_poll_notifications! + poll = @status.preloadable_poll + + # If the poll had no expiration date set but now has, or now has a sooner + # expiration date, and people have voted, schedule a notification + + return unless poll.present? && poll.expires_at.present? && poll.votes.exists? + + PollExpirationNotifyWorker.remove_from_scheduled(poll.id) if @previous_expires_at.present? && @previous_expires_at > poll.expires_at + PollExpirationNotifyWorker.perform_at(poll.expires_at + 5.minutes, poll.id) + end +end diff --git a/app/services/backup_service.rb b/app/services/backup_service.rb index 749c84736..f07f407d8 100644 --- a/app/services/backup_service.rb +++ b/app/services/backup_service.rb @@ -168,7 +168,7 @@ class BackupService < BaseService io.write(buffer) end end - rescue Errno::ENOENT, Seahorse::Client::NetworkingError - Rails.logger.warn "Could not backup file #{filename}: file not found" + rescue Errno::ENOENT, Seahorse::Client::NetworkingError => e + Rails.logger.warn "Could not backup file #{filename}: #{e}" end end diff --git a/app/services/batched_remove_status_service.rb b/app/services/batched_remove_status_service.rb index 363aa5ccf..2b649ee22 100644 --- a/app/services/batched_remove_status_service.rb +++ b/app/services/batched_remove_status_service.rb @@ -32,7 +32,7 @@ class BatchedRemoveStatusService < BaseService # Since we skipped all callbacks, we also need to manually # deindex the statuses - Chewy.strategy.current.update(StatusesIndex::Status, statuses_and_reblogs) if Chewy.enabled? + Chewy.strategy.current.update(StatusesIndex, statuses_and_reblogs) if Chewy.enabled? return if options[:skip_side_effects] diff --git a/app/services/delete_account_service.rb b/app/services/delete_account_service.rb index 182f0e127..0e3fedfe7 100644 --- a/app/services/delete_account_service.rb +++ b/app/services/delete_account_service.rb @@ -4,6 +4,7 @@ class DeleteAccountService < BaseService include Payloadable ASSOCIATIONS_ON_SUSPEND = %w( + account_notes account_pins active_relationships aliases @@ -16,7 +17,6 @@ class DeleteAccountService < BaseService domain_blocks featured_tags follow_requests - identity_proofs list_accounts migrations mute_relationships @@ -34,6 +34,7 @@ class DeleteAccountService < BaseService # by foreign keys, making them safe to delete without loading # into memory ASSOCIATIONS_WITHOUT_SIDE_EFFECTS = %w( + account_notes account_pins aliases conversation_mutes @@ -43,7 +44,6 @@ class DeleteAccountService < BaseService domain_blocks featured_tags follow_requests - identity_proofs list_accounts migrations mute_relationships @@ -187,7 +187,7 @@ class DeleteAccountService < BaseService @account.favourites.in_batches do |favourites| ids = favourites.pluck(:status_id) StatusStat.where(status_id: ids).update_all('favourites_count = GREATEST(0, favourites_count - 1)') - Chewy.strategy.current.update(StatusesIndex::Status, ids) if Chewy.enabled? + Chewy.strategy.current.update(StatusesIndex, ids) if Chewy.enabled? Rails.cache.delete_multi(ids.map { |id| "statuses/#{id}" }) favourites.delete_all end @@ -195,7 +195,7 @@ class DeleteAccountService < BaseService def purge_bookmarks! @account.bookmarks.in_batches do |bookmarks| - Chewy.strategy.current.update(StatusesIndex::Status, bookmarks.pluck(:status_id)) if Chewy.enabled? + Chewy.strategy.current.update(StatusesIndex, bookmarks.pluck(:status_id)) if Chewy.enabled? bookmarks.delete_all end end diff --git a/app/services/fan_out_on_write_service.rb b/app/services/fan_out_on_write_service.rb index 6fa98ce12..46feec5aa 100644 --- a/app/services/fan_out_on_write_service.rb +++ b/app/services/fan_out_on_write_service.rb @@ -3,118 +3,134 @@ class FanOutOnWriteService < BaseService # Push a status into home and mentions feeds # @param [Status] status - def call(status) - raise Mastodon::RaceConditionError if status.visibility.nil? - - deliver_to_self(status) if status.account.local? - - if status.direct_visibility? - deliver_to_mentioned_followers(status) - deliver_to_direct_timelines(status) - deliver_to_own_conversation(status) - elsif status.limited_visibility? - deliver_to_mentioned_followers(status) - else - deliver_to_followers(status) - deliver_to_lists(status) - end + # @param [Hash] options + # @option options [Boolean] update + # @option options [Array<Integer>] silenced_account_ids + def call(status, options = {}) + @status = status + @account = status.account + @options = options + + check_race_condition! + + fan_out_to_local_recipients! + fan_out_to_public_streams! if broadcastable? + end - return if status.account.silenced? || !status.public_visibility? - return if status.reblog? && !Setting.show_reblogs_in_public_timelines + private - render_anonymous_payload(status) + def check_race_condition! + # I don't know why but at some point we had an issue where + # this service was being executed with status objects + # that had a null visibility - which should not be possible + # since the column in the database is not nullable. + # + # This check re-queues the service to be run at a later time + # with the full object, if something like it occurs - deliver_to_hashtags(status) + raise Mastodon::RaceConditionError if @status.visibility.nil? + end - return if status.reply? && status.in_reply_to_account_id != status.account_id && !Setting.show_replies_in_public_timelines + def fan_out_to_local_recipients! + deliver_to_self! + notify_mentioned_accounts! - deliver_to_public(status) - deliver_to_media(status) if status.media_attachments.any? + case @status.visibility.to_sym + when :public, :unlisted, :private + deliver_to_all_followers! + deliver_to_lists! + when :limited + deliver_to_mentioned_followers! + else + deliver_to_mentioned_followers! + deliver_to_conversation! + deliver_to_direct_timelines! + end end - private + def fan_out_to_public_streams! + broadcast_to_hashtag_streams! + broadcast_to_public_streams! + end - def deliver_to_self(status) - Rails.logger.debug "Delivering status #{status.id} to author" - FeedManager.instance.push_to_home(status.account, status) - FeedManager.instance.push_to_direct(status.account, status) if status.direct_visibility? + def deliver_to_self! + FeedManager.instance.push_to_home(@account, @status, update: update?) if @account.local? + FeedManager.instance.push_to_direct(@account, @status, update: update?) if @account.local? && @status.direct_visibility? end - def deliver_to_followers(status) - Rails.logger.debug "Delivering status #{status.id} to followers" + def notify_mentioned_accounts! + @status.active_mentions.where.not(id: @options[:silenced_account_ids] || []).joins(:account).merge(Account.local).select(:id, :account_id).reorder(nil).find_in_batches do |mentions| + LocalNotificationWorker.push_bulk(mentions) do |mention| + [mention.account_id, mention.id, 'Mention', 'mention'] + end + end + end - status.account.followers_for_local_distribution.select(:id).reorder(nil).find_in_batches do |followers| + def deliver_to_all_followers! + @account.followers_for_local_distribution.select(:id).reorder(nil).find_in_batches do |followers| FeedInsertWorker.push_bulk(followers) do |follower| - [status.id, follower.id, :home] + [@status.id, follower.id, 'home', { 'update' => update? }] end end end - def deliver_to_lists(status) - Rails.logger.debug "Delivering status #{status.id} to lists" - - status.account.lists_for_local_distribution.select(:id).reorder(nil).find_in_batches do |lists| + def deliver_to_lists! + @account.lists_for_local_distribution.select(:id).reorder(nil).find_in_batches do |lists| FeedInsertWorker.push_bulk(lists) do |list| - [status.id, list.id, :list] + [@status.id, list.id, 'list', { 'update' => update? }] end end end - def deliver_to_mentioned_followers(status) - Rails.logger.debug "Delivering status #{status.id} to limited followers" - - status.mentions.joins(:account).merge(status.account.followers_for_local_distribution).select(:id, :account_id).reorder(nil).find_in_batches do |mentions| + def deliver_to_mentioned_followers! + @status.mentions.joins(:account).merge(@account.followers_for_local_distribution).select(:id, :account_id).reorder(nil).find_in_batches do |mentions| FeedInsertWorker.push_bulk(mentions) do |mention| - [status.id, mention.account_id, :home] + [@status.id, mention.account_id, 'home', { 'update' => update? }] end end end - def render_anonymous_payload(status) - @payload = InlineRenderer.render(status, nil, :status) - @payload = Oj.dump(event: :update, payload: @payload) + def deliver_to_direct_timelines! + FeedInsertWorker.push_bulk(@status.mentions.includes(:account).map(&:account).select { |mentioned_account| mentioned_account.local? }) do |account| + [@status.id, account.id, 'direct', { 'update' => update? }] + end end - def deliver_to_hashtags(status) - Rails.logger.debug "Delivering status #{status.id} to hashtags" - - status.tags.pluck(:name).each do |hashtag| - Redis.current.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", @payload) - Redis.current.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", @payload) if status.local? + def broadcast_to_hashtag_streams! + @status.tags.pluck(:name).each do |hashtag| + Redis.current.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", anonymous_payload) + Redis.current.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", anonymous_payload) if @status.local? end end - def deliver_to_public(status) - Rails.logger.debug "Delivering status #{status.id} to public timeline" + def broadcast_to_public_streams! + return if @status.reply? && @status.in_reply_to_account_id != @account.id && !Setting.show_replies_in_public_timelines - Redis.current.publish('timeline:public', @payload) - if status.local? - Redis.current.publish('timeline:public:local', @payload) - else - Redis.current.publish('timeline:public:remote', @payload) + Redis.current.publish('timeline:public', anonymous_payload) + Redis.current.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', anonymous_payload) + + if @status.media_attachments.any? + Redis.current.publish('timeline:public:media', anonymous_payload) + Redis.current.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', anonymous_payload) end end - def deliver_to_media(status) - Rails.logger.debug "Delivering status #{status.id} to media timeline" - - Redis.current.publish('timeline:public:media', @payload) - if status.local? - Redis.current.publish('timeline:public:local:media', @payload) - else - Redis.current.publish('timeline:public:remote:media', @payload) - end + def deliver_to_conversation! + AccountConversation.add_status(@account, @status) unless update? end - def deliver_to_direct_timelines(status) - Rails.logger.debug "Delivering status #{status.id} to direct timelines" + def anonymous_payload + @anonymous_payload ||= Oj.dump( + event: update? ? :'status.update' : :update, + payload: InlineRenderer.render(@status, nil, :status) + ) + end - FeedInsertWorker.push_bulk(status.mentions.includes(:account).map(&:account).select { |mentioned_account| mentioned_account.local? }) do |account| - [status.id, account.id, :direct] - end + def update? + @options[:update] end - def deliver_to_own_conversation(status) - AccountConversation.add_status(status.account, status) + def broadcastable? + @status.public_visibility? && !@account.silenced? && (!@status.reblog? || Setting.show_reblogs_in_public_timelines) end end diff --git a/app/services/fetch_link_card_service.rb b/app/services/fetch_link_card_service.rb index 5732ce8ac..94dc6389f 100644 --- a/app/services/fetch_link_card_service.rb +++ b/app/services/fetch_link_card_service.rb @@ -13,12 +13,12 @@ class FetchLinkCardService < BaseService }iox def call(status) - @status = status - @url = parse_urls + @status = status + @original_url = parse_urls - return if @url.nil? || @status.preview_cards.any? + return if @original_url.nil? || @status.preview_cards.any? - @url = @url.to_s + @url = @original_url.to_s RedisLock.acquire(lock_options) do |lock| if lock.acquired? @@ -31,7 +31,7 @@ class FetchLinkCardService < BaseService attach_card if @card&.persisted? rescue HTTP::Error, OpenSSL::SSL::SSLError, Addressable::URI::InvalidURIError, Mastodon::HostValidationError, Mastodon::LengthValidationError => e - Rails.logger.debug "Error fetching link #{@url}: #{e}" + Rails.logger.debug "Error fetching link #{@original_url}: #{e}" nil end @@ -47,6 +47,12 @@ class FetchLinkCardService < BaseService return @html if defined?(@html) Request.new(:get, @url).add_headers('Accept' => 'text/html', 'User-Agent' => Mastodon::Version.user_agent + ' Bot').perform do |res| + # We follow redirects, and ideally we want to save the preview card for + # the destination URL and not any link shortener in-between, so here + # we set the URL to the one of the last response in the redirect chain + @url = res.request.uri.to_s + @card = PreviewCard.find_or_initialize_by(url: @url) if @card.url != @url + if res.code == 200 && res.mime_type == 'text/html' @html_charset = res.charset @html = res.body_with_limit @@ -60,15 +66,19 @@ class FetchLinkCardService < BaseService def attach_card @status.preview_cards << @card Rails.cache.delete(@status) + Trends.links.register(@status) end def parse_urls - if @status.local? - urls = @status.text.scan(URL_PATTERN).map { |array| Addressable::URI.parse(array[1]).normalize } - else - html = Nokogiri::HTML(@status.text) - links = html.css('a') - urls = links.filter_map { |a| Addressable::URI.parse(a['href']) unless skip_link?(a) }.filter_map(&:normalize) + urls = begin + if @status.local? + @status.text.scan(URL_PATTERN).map { |array| Addressable::URI.parse(array[1]).normalize } + else + document = Nokogiri::HTML(@status.text) + links = document.css('a') + + links.filter_map { |a| Addressable::URI.parse(a['href']) unless skip_link?(a) }.filter_map(&:normalize) + end end urls.reject { |uri| bad_url?(uri) }.first @@ -79,18 +89,16 @@ class FetchLinkCardService < BaseService uri.host.blank? || TagManager.instance.local_url?(uri.to_s) || !%w(http https).include?(uri.scheme) end - # rubocop:disable Naming/MethodParameterName - def mention_link?(a) + def mention_link?(anchor) @status.mentions.any? do |mention| - a['href'] == ActivityPub::TagManager.instance.url_for(mention.account) + anchor['href'] == ActivityPub::TagManager.instance.url_for(mention.account) end end - def skip_link?(a) + def skip_link?(anchor) # Avoid links for hashtags and mentions (microformats) - a['rel']&.include?('tag') || a['class']&.match?(/u-url|h-card/) || mention_link?(a) + anchor['rel']&.include?('tag') || anchor['class']&.match?(/u-url|h-card/) || mention_link?(anchor) end - # rubocop:enable Naming/MethodParameterName def attempt_oembed service = FetchOEmbedService.new @@ -139,42 +147,14 @@ class FetchLinkCardService < BaseService def attempt_opengraph return if html.nil? - detector = CharlockHolmes::EncodingDetector.new - detector.strip_tags = true - - guess = detector.detect(@html, @html_charset) - encoding = guess&.fetch(:confidence, 0).to_i > 60 ? guess&.fetch(:encoding, nil) : nil - page = Nokogiri::HTML(@html, nil, encoding) - player_url = meta_property(page, 'twitter:player') - - if player_url && !bad_url?(Addressable::URI.parse(player_url)) - @card.type = :video - @card.width = meta_property(page, 'twitter:player:width') || 0 - @card.height = meta_property(page, 'twitter:player:height') || 0 - @card.html = content_tag(:iframe, nil, src: player_url, - width: @card.width, - height: @card.height, - allowtransparency: 'true', - scrolling: 'no', - frameborder: '0') - else - @card.type = :link - end - - @card.title = meta_property(page, 'og:title').presence || page.at_xpath('//title')&.content || '' - @card.description = meta_property(page, 'og:description').presence || meta_property(page, 'description') || '' - @card.image_remote_url = (Addressable::URI.parse(@url) + meta_property(page, 'og:image')).to_s if meta_property(page, 'og:image') - - return if @card.title.blank? && @card.html.blank? - - @card.save_with_optional_image! - end + link_details_extractor = LinkDetailsExtractor.new(@url, @html, @html_charset) - def meta_property(page, property) - page.at_xpath("//meta[contains(concat(' ', normalize-space(@property), ' '), ' #{property} ')]")&.attribute('content')&.value || page.at_xpath("//meta[@name=\"#{property}\"]")&.attribute('content')&.value + @card = PreviewCard.find_or_initialize_by(url: link_details_extractor.canonical_url) if link_details_extractor.canonical_url != @card.url + @card.assign_attributes(link_details_extractor.to_preview_card_attributes) + @card.save_with_optional_image! unless @card.title.blank? && @card.html.blank? end def lock_options - { redis: Redis.current, key: "fetch:#{@url}", autorelease: 15.minutes.seconds } + { redis: Redis.current, key: "fetch:#{@original_url}", autorelease: 15.minutes.seconds } end end diff --git a/app/services/fetch_oembed_service.rb b/app/services/fetch_oembed_service.rb index 60be9b9dc..4cbaa04c6 100644 --- a/app/services/fetch_oembed_service.rb +++ b/app/services/fetch_oembed_service.rb @@ -2,6 +2,7 @@ class FetchOEmbedService ENDPOINT_CACHE_EXPIRES_IN = 24.hours.freeze + URL_REGEX = /(=(http[s]?(%3A|:)(\/\/|%2F%2F)))([^&]*)/i.freeze attr_reader :url, :options, :format, :endpoint_url @@ -65,10 +66,12 @@ class FetchOEmbedService end def cache_endpoint! + return unless URL_REGEX.match?(@endpoint_url) + url_domain = Addressable::URI.parse(@url).normalized_host endpoint_hash = { - endpoint: @endpoint_url.gsub(/(=(http[s]?(%3A|:)(\/\/|%2F%2F)))([^&]*)/i, '={url}'), + endpoint: @endpoint_url.gsub(URL_REGEX, '={url}'), format: @format, } diff --git a/app/services/follow_service.rb b/app/services/follow_service.rb index 329262cca..ed28e1371 100644 --- a/app/services/follow_service.rb +++ b/app/services/follow_service.rb @@ -68,7 +68,7 @@ class FollowService < BaseService follow_request = @source_account.request_follow!(@target_account, reblogs: @options[:reblogs], notify: @options[:notify], rate_limit: @options[:with_rate_limit], bypass_limit: @options[:bypass_limit]) if @target_account.local? - LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, :follow_request) + LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, 'follow_request') elsif @target_account.activitypub? ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url) end @@ -79,7 +79,7 @@ class FollowService < BaseService def direct_follow! follow = @source_account.follow!(@target_account, reblogs: @options[:reblogs], notify: @options[:notify], rate_limit: @options[:with_rate_limit], bypass_limit: @options[:bypass_limit]) - LocalNotificationWorker.perform_async(@target_account.id, follow.id, follow.class.name, :follow) + LocalNotificationWorker.perform_async(@target_account.id, follow.id, follow.class.name, 'follow') MergeWorker.perform_async(@target_account.id, @source_account.id) follow diff --git a/app/services/import_service.rb b/app/services/import_service.rb index 74ad5b79f..8e6640b9d 100644 --- a/app/services/import_service.rb +++ b/app/services/import_service.rb @@ -76,7 +76,7 @@ class ImportService < BaseService if presence_hash[target_account.acct] items.delete(target_account.acct) extra = presence_hash[target_account.acct][1] - Import::RelationshipWorker.perform_async(@account.id, target_account.acct, action, extra) + Import::RelationshipWorker.perform_async(@account.id, target_account.acct, action, extra.stringify_keys) else Import::RelationshipWorker.perform_async(@account.id, target_account.acct, undo_action) end @@ -87,7 +87,7 @@ class ImportService < BaseService tail_items = items - head_items Import::RelationshipWorker.push_bulk(head_items + tail_items) do |acct, extra| - [@account.id, acct, action, extra] + [@account.id, acct, action, extra.stringify_keys] end end diff --git a/app/services/notify_service.rb b/app/services/notify_service.rb index fc187db40..09e28b76b 100644 --- a/app/services/notify_service.rb +++ b/app/services/notify_service.rb @@ -67,8 +67,49 @@ class NotifyService < BaseService message? && @notification.target_status.direct_visibility? end + # Returns true if the sender has been mentionned by the recipient up the thread def response_to_recipient? - @notification.target_status.in_reply_to_account_id == @recipient.id && @notification.target_status.thread&.direct_visibility? + return false if @notification.target_status.in_reply_to_id.nil? + + # Using an SQL CTE to avoid unneeded back-and-forth with SQL server in case of long threads + !Status.count_by_sql([<<-SQL.squish, id: @notification.target_status.in_reply_to_id, recipient_id: @recipient.id, sender_id: @notification.from_account.id]).zero? + WITH RECURSIVE ancestors(id, in_reply_to_id, replying_to_sender) AS ( + SELECT + s.id, s.in_reply_to_id, (CASE + WHEN s.account_id = :recipient_id THEN + EXISTS ( + SELECT * + FROM mentions m + WHERE m.silent = FALSE AND m.account_id = :sender_id AND m.status_id = s.id + ) + ELSE + FALSE + END) + FROM statuses s + WHERE s.id = :id + UNION ALL + SELECT + s.id, + s.in_reply_to_id, + (CASE + WHEN s.account_id = :recipient_id THEN + EXISTS ( + SELECT * + FROM mentions m + WHERE m.silent = FALSE AND m.account_id = :sender_id AND m.status_id = s.id + ) + ELSE + FALSE + END) + FROM ancestors st + JOIN statuses s ON s.id = st.in_reply_to_id + WHERE st.replying_to_sender IS FALSE + ) + SELECT COUNT(*) + FROM ancestors st + JOIN statuses s ON s.id = st.id + WHERE st.replying_to_sender IS TRUE AND s.visibility = 3 + SQL end def from_staff? @@ -127,7 +168,7 @@ class NotifyService < BaseService def push_notification! return if @notification.activity.nil? - Redis.current.publish("timeline:#{@recipient.id}", Oj.dump(event: :notification, payload: InlineRenderer.render(@notification, @recipient, :notification))) + Redis.current.publish("timeline:#{@recipient.id}:notifications", Oj.dump(event: :notification, payload: InlineRenderer.render(@notification, @recipient, :notification))) send_push_notifications! end diff --git a/app/services/post_status_service.rb b/app/services/post_status_service.rb index 250d0e8ed..9d26e0f5b 100644 --- a/app/services/post_status_service.rb +++ b/app/services/post_status_service.rb @@ -83,6 +83,9 @@ class PostStatusService < BaseService status_for_validation = @account.statuses.build(status_attributes) if status_for_validation.valid? + # Marking the status as destroyed is necessary to prevent the status from being + # persisted when the associated media attachments get updated when creating the + # scheduled status. status_for_validation.destroy # The following transaction block is needed to wrap the UPDATEs to @@ -97,7 +100,8 @@ class PostStatusService < BaseService end def postprocess_status! - LinkCrawlWorker.perform_async(@status.id) unless @status.spoiler_text? + Trends.tags.register(@status) + LinkCrawlWorker.perform_async(@status.id) DistributionWorker.perform_async(@status.id) ActivityPub::DistributionWorker.perform_async(@status.id) unless @status.local_only? PollExpirationNotifyWorker.perform_at(@status.poll.expires_at, @status.poll.id) if @status.poll diff --git a/app/services/process_hashtags_service.rb b/app/services/process_hashtags_service.rb index c42b79db8..47277c56c 100644 --- a/app/services/process_hashtags_service.rb +++ b/app/services/process_hashtags_service.rb @@ -8,7 +8,7 @@ class ProcessHashtagsService < BaseService Tag.find_or_create_by_names(tags) do |tag| status.tags << tag records << tag - tag.use!(status.account, status: status, at_time: status.created_at) if status.public_visibility? + tag.update(last_status_at: status.created_at) if tag.last_status_at.nil? || (tag.last_status_at < status.created_at && tag.last_status_at < 12.hours.ago) end return unless status.distributable? diff --git a/app/services/process_mentions_service.rb b/app/services/process_mentions_service.rb index ec4cb11f9..9d239fc65 100644 --- a/app/services/process_mentions_service.rb +++ b/app/services/process_mentions_service.rb @@ -8,12 +8,23 @@ class ProcessMentionsService < BaseService # remote users # @param [Status] status def call(status) - return unless status.local? + @status = status - @status = status - mentions = [] + return unless @status.local? - status.text = status.text.gsub(Account::MENTION_RE) do |match| + @previous_mentions = @status.active_mentions.includes(:account).to_a + @current_mentions = [] + + Status.transaction do + scan_text! + assign_mentions! + end + end + + private + + def scan_text! + @status.text = @status.text.gsub(Account::MENTION_RE) do |match| username, domain = Regexp.last_match(1).split('@') domain = begin @@ -26,49 +37,45 @@ class ProcessMentionsService < BaseService mentioned_account = Account.find_remote(username, domain) + # If the account cannot be found or isn't the right protocol, + # first try to resolve it if mention_undeliverable?(mentioned_account) begin - mentioned_account = resolve_account_service.call(Regexp.last_match(1)) + mentioned_account = ResolveAccountService.new.call(Regexp.last_match(1)) rescue Webfinger::Error, HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::UnexpectedResponseError mentioned_account = nil end end + # If after resolving it still isn't found or isn't the right + # protocol, then give up next match if mention_undeliverable?(mentioned_account) || mentioned_account&.suspended? - mention = mentioned_account.mentions.new(status: status) - mentions << mention if mention.save + mention = @previous_mentions.find { |x| x.account_id == mentioned_account.id } + mention ||= mentioned_account.mentions.new(status: @status) + + @current_mentions << mention "@#{mentioned_account.acct}" end - status.save! - - mentions.each { |mention| create_notification(mention) } + @status.save! end - private - - def mention_undeliverable?(mentioned_account) - mentioned_account.nil? || (!mentioned_account.local? && mentioned_account.ostatus?) - end - - def create_notification(mention) - mentioned_account = mention.account - - if mentioned_account.local? - LocalNotificationWorker.perform_async(mentioned_account.id, mention.id, mention.class.name, :mention) - elsif mentioned_account.activitypub? && !@status.local_only? - ActivityPub::DeliveryWorker.perform_async(activitypub_json, mention.status.account_id, mentioned_account.inbox_url, { synchronize_followers: !mention.status.distributable? }) + def assign_mentions! + @current_mentions.each do |mention| + mention.save if mention.new_record? end - end - def activitypub_json - return @activitypub_json if defined?(@activitypub_json) - @activitypub_json = Oj.dump(serialize_payload(ActivityPub::ActivityPresenter.from_status(@status), ActivityPub::ActivitySerializer, signer: @status.account)) + # If previous mentions are no longer contained in the text, convert them + # to silent mentions, since withdrawing access from someone who already + # received a notification might be more confusing + removed_mentions = @previous_mentions - @current_mentions + + Mention.where(id: removed_mentions.map(&:id)).update_all(silent: true) unless removed_mentions.empty? end - def resolve_account_service - ResolveAccountService.new + def mention_undeliverable?(mentioned_account) + mentioned_account.nil? || (!mentioned_account.local? && !mentioned_account.activitypub?) end end diff --git a/app/services/purge_domain_service.rb b/app/services/purge_domain_service.rb new file mode 100644 index 000000000..9df81f13e --- /dev/null +++ b/app/services/purge_domain_service.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +class PurgeDomainService < BaseService + def call(domain) + Account.remote.where(domain: domain).reorder(nil).find_each do |account| + DeleteAccountService.new.call(account, reserve_username: false, skip_side_effects: true) + end + CustomEmoji.remote.where(domain: domain).reorder(nil).find_each(&:destroy) + Instance.refresh + end +end diff --git a/app/services/reblog_service.rb b/app/services/reblog_service.rb index f41276de0..6556fbff7 100644 --- a/app/services/reblog_service.rb +++ b/app/services/reblog_service.rb @@ -30,12 +30,13 @@ class ReblogService < BaseService reblog = account.statuses.create!(reblog: reblogged_status, text: '', visibility: visibility, rate_limit: options[:with_rate_limit]) + Trends.tags.register(reblog) + Trends.links.register(reblog) DistributionWorker.perform_async(reblog.id) ActivityPub::DistributionWorker.perform_async(reblog.id) unless reblogged_status.local_only? create_notification(reblog) bump_potential_friendship(account, reblog) - record_use(account, reblog) reblog end @@ -46,7 +47,7 @@ class ReblogService < BaseService reblogged_status = reblog.reblog if reblogged_status.account.local? - LocalNotificationWorker.perform_async(reblogged_status.account_id, reblog.id, reblog.class.name, :reblog) + LocalNotificationWorker.perform_async(reblogged_status.account_id, reblog.id, reblog.class.name, 'reblog') elsif reblogged_status.account.activitypub? && !reblogged_status.account.following?(reblog.account) ActivityPub::DeliveryWorker.perform_async(build_json(reblog), reblog.account_id, reblogged_status.account.inbox_url) end @@ -60,16 +61,6 @@ class ReblogService < BaseService PotentialFriendshipTracker.record(account.id, reblog.reblog.account_id, :reblog) end - def record_use(account, reblog) - return unless reblog.public_visibility? - - original_status = reblog.reblog - - original_status.tags.each do |tag| - tag.use!(account) - end - end - def build_json(reblog) Oj.dump(serialize_payload(ActivityPub::ActivityPresenter.from_status(reblog), ActivityPub::ActivitySerializer, signer: reblog.account)) end diff --git a/app/services/remove_from_followers_service.rb b/app/services/remove_from_followers_service.rb new file mode 100644 index 000000000..3dac5467f --- /dev/null +++ b/app/services/remove_from_followers_service.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +class RemoveFromFollowersService < BaseService + include Payloadable + + def call(source_account, target_accounts) + source_account.passive_relationships.where(account_id: target_accounts).find_each do |follow| + follow.destroy + + if source_account.local? && !follow.account.local? && follow.account.activitypub? + create_notification(follow) + end + end + end + + private + + def create_notification(follow) + ActivityPub::DeliveryWorker.perform_async(build_json(follow), follow.target_account_id, follow.account.inbox_url) + end + + def build_json(follow) + Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer)) + end +end diff --git a/app/services/remove_status_service.rb b/app/services/remove_status_service.rb index 20c17e6df..e41ad2b0a 100644 --- a/app/services/remove_status_service.rb +++ b/app/services/remove_status_service.rb @@ -9,6 +9,7 @@ class RemoveStatusService < BaseService # @param [Hash] options # @option [Boolean] :redraft # @option [Boolean] :immediate + # @option [Boolean] :preserve # @option [Boolean] :original_removed def call(status, **options) @payload = Oj.dump(event: :delete, payload: status.id.to_s) @@ -44,7 +45,7 @@ class RemoveStatusService < BaseService remove_media end - @status.destroy! if @options[:immediate] || !@status.reported? + @status.destroy! if permanently? else raise Mastodon::RaceConditionError end @@ -88,7 +89,7 @@ class RemoveStatusService < BaseService # the author and wouldn't normally receive the delete # notification - so here, we explicitly send it to them - status_reach_finder = StatusReachFinder.new(@status) + status_reach_finder = StatusReachFinder.new(@status, unsafe: true) ActivityPub::DeliveryWorker.push_bulk(status_reach_finder.inboxes) do |inbox_url| [signed_activity_json, @account.id, inbox_url] @@ -104,7 +105,7 @@ class RemoveStatusService < BaseService # because once original status is gone, reblogs will disappear # without us being able to do all the fancy stuff - @status.reblogs.includes(:account).find_each do |reblog| + @status.reblogs.includes(:account).reorder(nil).find_each do |reblog| RemoveStatusService.new.call(reblog, original_removed: true) end end @@ -143,11 +144,15 @@ class RemoveStatusService < BaseService end def remove_media - return if @options[:redraft] || (!@options[:immediate] && @status.reported?) + return if @options[:redraft] || !permanently? @status.media_attachments.destroy_all end + def permanently? + @options[:immediate] || !(@options[:preserve] || @status.reported?) + end + def lock_options { redis: Redis.current, key: "distribute:#{@status.id}", autorelease: 5.minutes.seconds } end diff --git a/app/services/resolve_account_service.rb b/app/services/resolve_account_service.rb index 5400612bf..3a372ef2a 100644 --- a/app/services/resolve_account_service.rb +++ b/app/services/resolve_account_service.rb @@ -142,7 +142,8 @@ class ResolveAccountService < BaseService end def queue_deletion! - AccountDeletionWorker.perform_async(@account.id, reserve_username: false, skip_activitypub: true) + @account.suspend!(origin: :remote) + AccountDeletionWorker.perform_async(@account.id, { 'reserve_username' => false, 'skip_activitypub' => true }) end def lock_options diff --git a/app/services/unsuspend_account_service.rb b/app/services/unsuspend_account_service.rb index 949c670aa..39d8a6ba7 100644 --- a/app/services/unsuspend_account_service.rb +++ b/app/services/unsuspend_account_service.rb @@ -1,13 +1,14 @@ # frozen_string_literal: true class UnsuspendAccountService < BaseService + include Payloadable def call(account) @account = account unsuspend! refresh_remote_account! - return if @account.nil? + return if @account.nil? || @account.suspended? merge_into_home_timelines! merge_into_list_timelines! |