Fix “Scoped order is ignored, it's forced to be batch order.” warnings (#26793)

lolsob-rspec
Claire 2023-09-05 15:37:23 +02:00 committed by GitHub
parent 7b181d2005
commit 96f51d6c1f
15 changed files with 25 additions and 22 deletions

View File

@ -262,7 +262,7 @@ class FeedManager
add_to_feed(:home, account.id, status, aggregate_reblogs: aggregate) add_to_feed(:home, account.id, status, aggregate_reblogs: aggregate)
end end
account.following.includes(:account_stat).find_each do |target_account| account.following.includes(:account_stat).reorder(nil).find_each do |target_account|
if redis.zcard(timeline_key) >= limit if redis.zcard(timeline_key) >= limit
oldest_home_score = redis.zrange(timeline_key, 0, 0, with_scores: true).first.last.to_i oldest_home_score = redis.zrange(timeline_key, 0, 0, with_scores: true).first.last.to_i
last_status_score = Mastodon::Snowflake.id_at(target_account.last_status_at) last_status_score = Mastodon::Snowflake.id_at(target_account.last_status_at)

View File

@ -27,6 +27,6 @@ class Importer::PublicStatusesIndexImporter < Importer::BaseImporter
end end
def scope def scope
Status.indexable Status.indexable.reorder(nil)
end end
end end

View File

@ -11,7 +11,7 @@ class Importer::StatusesIndexImporter < Importer::BaseImporter
# from a different scope to avoid indexing them multiple times, but that # from a different scope to avoid indexing them multiple times, but that
# could end up being a very large array # could end up being a very large array
scope.find_in_batches(batch_size: @batch_size) do |tmp| scope.reorder(nil).find_in_batches(batch_size: @batch_size) do |tmp|
in_work_unit(tmp.map(&:status_id)) do |status_ids| in_work_unit(tmp.map(&:status_id)) do |status_ids|
deleted = 0 deleted = 0

View File

@ -22,7 +22,7 @@ class Admin::StatusBatchAction
private private
def statuses def statuses
Status.with_discarded.where(id: status_ids) Status.with_discarded.where(id: status_ids).reorder(nil)
end end
def process_action! def process_action!

View File

@ -20,7 +20,7 @@ module AccountMerging
] ]
owned_classes.each do |klass| owned_classes.each do |klass|
klass.where(account_id: other_account.id).find_each do |record| klass.where(account_id: other_account.id).reorder(nil).find_each do |record|
record.update_attribute(:account_id, id) record.update_attribute(:account_id, id)
rescue ActiveRecord::RecordNotUnique rescue ActiveRecord::RecordNotUnique
next next
@ -33,7 +33,7 @@ module AccountMerging
] ]
target_classes.each do |klass| target_classes.each do |klass|
klass.where(target_account_id: other_account.id).find_each do |record| klass.where(target_account_id: other_account.id).reorder(nil).find_each do |record|
record.update_attribute(:target_account_id, id) record.update_attribute(:target_account_id, id)
rescue ActiveRecord::RecordNotUnique rescue ActiveRecord::RecordNotUnique
next next

View File

@ -31,7 +31,7 @@ module AccountStatusesSearch
def add_to_public_statuses_index! def add_to_public_statuses_index!
return unless Chewy.enabled? return unless Chewy.enabled?
statuses.without_reblogs.where(visibility: :public).find_in_batches do |batch| statuses.without_reblogs.where(visibility: :public).reorder(nil).find_in_batches do |batch|
PublicStatusesIndex.import(batch) PublicStatusesIndex.import(batch)
end end
end end

View File

@ -62,13 +62,13 @@ class Trends::Statuses < Trends::Base
def refresh(at_time = Time.now.utc) def refresh(at_time = Time.now.utc)
# First, recalculate scores for statuses that were trending previously. We split the queries # First, recalculate scores for statuses that were trending previously. We split the queries
# to avoid having to load all of the IDs into Ruby just to send them back into Postgres # to avoid having to load all of the IDs into Ruby just to send them back into Postgres
Status.where(id: StatusTrend.select(:status_id)).includes(:status_stat, :account).find_in_batches(batch_size: BATCH_SIZE) do |statuses| Status.where(id: StatusTrend.select(:status_id)).includes(:status_stat, :account).reorder(nil).find_in_batches(batch_size: BATCH_SIZE) do |statuses|
calculate_scores(statuses, at_time) calculate_scores(statuses, at_time)
end end
# Then, calculate scores for statuses that were used today. There are potentially some # Then, calculate scores for statuses that were used today. There are potentially some
# duplicate items here that we might process one more time, but that should be fine # duplicate items here that we might process one more time, but that should be fine
Status.where(id: recently_used_ids(at_time)).includes(:status_stat, :account).find_in_batches(batch_size: BATCH_SIZE) do |statuses| Status.where(id: recently_used_ids(at_time)).includes(:status_stat, :account).reorder(nil).find_in_batches(batch_size: BATCH_SIZE) do |statuses|
calculate_scores(statuses, at_time) calculate_scores(statuses, at_time)
end end

View File

@ -38,7 +38,7 @@ class BulkImportService < BaseService
rows_by_acct = extract_rows_by_acct rows_by_acct = extract_rows_by_acct
if @import.overwrite? if @import.overwrite?
@account.following.find_each do |followee| @account.following.reorder(nil).find_each do |followee|
row = rows_by_acct.delete(followee.acct) row = rows_by_acct.delete(followee.acct)
if row.nil? if row.nil?
@ -67,7 +67,7 @@ class BulkImportService < BaseService
rows_by_acct = extract_rows_by_acct rows_by_acct = extract_rows_by_acct
if @import.overwrite? if @import.overwrite?
@account.blocking.find_each do |blocked_account| @account.blocking.reorder(nil).find_each do |blocked_account|
row = rows_by_acct.delete(blocked_account.acct) row = rows_by_acct.delete(blocked_account.acct)
if row.nil? if row.nil?
@ -93,7 +93,7 @@ class BulkImportService < BaseService
rows_by_acct = extract_rows_by_acct rows_by_acct = extract_rows_by_acct
if @import.overwrite? if @import.overwrite?
@account.muting.find_each do |muted_account| @account.muting.reorder(nil).find_each do |muted_account|
row = rows_by_acct.delete(muted_account.acct) row = rows_by_acct.delete(muted_account.acct)
if row.nil? if row.nil?

View File

@ -75,7 +75,7 @@ class ImportService < BaseService
if @import.overwrite? if @import.overwrite?
presence_hash = items.each_with_object({}) { |(id, extra), mapping| mapping[id] = [true, extra] } presence_hash = items.each_with_object({}) { |(id, extra), mapping| mapping[id] = [true, extra] }
overwrite_scope.find_each do |target_account| overwrite_scope.reorder(nil).find_each do |target_account|
if presence_hash[target_account.acct] if presence_hash[target_account.acct]
items.delete(target_account.acct) items.delete(target_account.acct)
extra = presence_hash[target_account.acct][1] extra = presence_hash[target_account.acct][1]

View File

@ -51,13 +51,13 @@ class SuspendAccountService < BaseService
end end
def unmerge_from_home_timelines! def unmerge_from_home_timelines!
@account.followers_for_local_distribution.find_each do |follower| @account.followers_for_local_distribution.reorder(nil).find_each do |follower|
FeedManager.instance.unmerge_from_home(@account, follower) FeedManager.instance.unmerge_from_home(@account, follower)
end end
end end
def unmerge_from_list_timelines! def unmerge_from_list_timelines!
@account.lists_for_local_distribution.find_each do |list| @account.lists_for_local_distribution.reorder(nil).find_each do |list|
FeedManager.instance.unmerge_from_list(@account, list) FeedManager.instance.unmerge_from_list(@account, list)
end end
end end
@ -65,7 +65,7 @@ class SuspendAccountService < BaseService
def privatize_media_attachments! def privatize_media_attachments!
attachment_names = MediaAttachment.attachment_definitions.keys attachment_names = MediaAttachment.attachment_definitions.keys
@account.media_attachments.find_each do |media_attachment| @account.media_attachments.reorder(nil).find_each do |media_attachment|
attachment_names.each do |attachment_name| attachment_names.each do |attachment_name|
attachment = media_attachment.public_send(attachment_name) attachment = media_attachment.public_send(attachment_name)
styles = MediaAttachment::DEFAULT_STYLES | attachment.styles.keys styles = MediaAttachment::DEFAULT_STYLES | attachment.styles.keys

View File

@ -47,13 +47,13 @@ class UnsuspendAccountService < BaseService
end end
def merge_into_home_timelines! def merge_into_home_timelines!
@account.followers_for_local_distribution.find_each do |follower| @account.followers_for_local_distribution.reorder(nil).find_each do |follower|
FeedManager.instance.merge_into_home(@account, follower) FeedManager.instance.merge_into_home(@account, follower)
end end
end end
def merge_into_list_timelines! def merge_into_list_timelines!
@account.lists_for_local_distribution.find_each do |list| @account.lists_for_local_distribution.reorder(nil).find_each do |list|
FeedManager.instance.merge_into_list(@account, list) FeedManager.instance.merge_into_list(@account, list)
end end
end end
@ -61,7 +61,7 @@ class UnsuspendAccountService < BaseService
def publish_media_attachments! def publish_media_attachments!
attachment_names = MediaAttachment.attachment_definitions.keys attachment_names = MediaAttachment.attachment_definitions.keys
@account.media_attachments.find_each do |media_attachment| @account.media_attachments.reorder(nil).find_each do |media_attachment|
attachment_names.each do |attachment_name| attachment_names.each do |attachment_name|
attachment = media_attachment.public_send(attachment_name) attachment = media_attachment.public_send(attachment_name)
styles = MediaAttachment::DEFAULT_STYLES | attachment.styles.keys styles = MediaAttachment::DEFAULT_STYLES | attachment.styles.keys

View File

@ -72,7 +72,7 @@ class MoveWorker
def queue_follow_unfollows! def queue_follow_unfollows!
bypass_locked = @target_account.local? bypass_locked = @target_account.local?
@source_account.followers.local.select(:id).find_in_batches do |accounts| @source_account.followers.local.select(:id).reorder(nil).find_in_batches do |accounts|
UnfollowFollowWorker.push_bulk(accounts.map(&:id)) { |follower_id| [follower_id, @source_account.id, @target_account.id, bypass_locked] } UnfollowFollowWorker.push_bulk(accounts.map(&:id)) { |follower_id| [follower_id, @source_account.id, @target_account.id, bypass_locked] }
rescue => e rescue => e
@deferred_error = e @deferred_error = e

View File

@ -50,6 +50,9 @@ Rails.application.configure do
config.x.vapid_private_key = vapid_key.private_key config.x.vapid_private_key = vapid_key.private_key
config.x.vapid_public_key = vapid_key.public_key config.x.vapid_public_key = vapid_key.public_key
# Raise exceptions when a reorder occurs in in_batches
config.active_record.error_on_ignored_order = true
# Raise exceptions for disallowed deprecations. # Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise config.active_support.disallowed_deprecation = :raise

View File

@ -90,7 +90,7 @@ class BackfillAdminActionLogs < ActiveRecord::Migration[6.1]
log.update_attribute('route_param', log.user.account_id) log.update_attribute('route_param', log.user.account_id)
end end
Admin::ActionLog.where(target_type: 'Report', human_identifier: nil).in_batches.update_all('human_identifier = target_id::text') AdminActionLog.where(target_type: 'Report', human_identifier: nil).in_batches.update_all('human_identifier = target_id::text')
AdminActionLog.includes(:domain_block).where(target_type: 'DomainBlock').find_each do |log| AdminActionLog.includes(:domain_block).where(target_type: 'DomainBlock').find_each do |log|
next if log.domain_block.nil? next if log.domain_block.nil?

View File

@ -90,7 +90,7 @@ class BackfillAdminActionLogsAgain < ActiveRecord::Migration[6.1]
log.update_attribute('route_param', log.user.account_id) log.update_attribute('route_param', log.user.account_id)
end end
Admin::ActionLog.where(target_type: 'Report', human_identifier: nil).in_batches.update_all('human_identifier = target_id::text') AdminActionLog.where(target_type: 'Report', human_identifier: nil).in_batches.update_all('human_identifier = target_id::text')
AdminActionLog.includes(:domain_block).where(target_type: 'DomainBlock').find_each do |log| AdminActionLog.includes(:domain_block).where(target_type: 'DomainBlock').find_each do |log|
next if log.domain_block.nil? next if log.domain_block.nil?