From 32a030dd74f9e0beed830daecb4dc6a4842aa14b Mon Sep 17 00:00:00 2001 From: Claire Date: Tue, 2 May 2023 12:08:48 +0200 Subject: [PATCH 01/96] Rewrite import feature (#21054) --- .rubocop.yml | 2 + .rubocop_todo.yml | 2 + .../settings/imports_controller.rb | 92 +++- app/lib/vacuum/imports_vacuum.rb | 18 + app/models/bulk_import.rb | 53 +++ app/models/bulk_import_row.rb | 15 + app/models/concerns/account_associations.rb | 3 + app/models/form/import.rb | 151 +++++++ app/models/import.rb | 4 +- app/services/bulk_import_row_service.rb | 60 +++ app/services/bulk_import_service.rb | 160 +++++++ app/services/import_service.rb | 3 + app/validators/import_validator.rb | 46 -- app/views/settings/imports/index.html.haml | 49 ++ app/views/settings/imports/show.html.haml | 20 +- app/workers/bulk_import_worker.rb | 13 + app/workers/import/relationship_worker.rb | 3 + app/workers/import/row_worker.rb | 33 ++ app/workers/import_worker.rb | 3 + app/workers/scheduler/vacuum_scheduler.rb | 5 + config/i18n-tasks.yml | 2 + config/locales/en.yml | 38 ++ config/navigation.rb | 2 +- config/routes.rb | 8 +- .../20230330135507_create_bulk_imports.rb | 22 + .../20230330140036_create_bulk_import_rows.rb | 12 + db/schema.rb | 29 +- .../settings/imports_controller_spec.rb | 316 +++++++++++-- spec/fabricators/bulk_import_fabricator.rb | 12 + .../fabricators/bulk_import_row_fabricator.rb | 6 + spec/fixtures/files/empty.csv | 0 spec/fixtures/files/following_accounts.csv | 5 + spec/fixtures/files/muted_accounts.csv | 5 + spec/lib/vacuum/imports_vacuum_spec.rb | 19 + spec/models/form/import_spec.rb | 281 ++++++++++++ spec/models/import_spec.rb | 15 - spec/services/bulk_import_row_service_spec.rb | 95 ++++ spec/services/bulk_import_service_spec.rb | 417 ++++++++++++++++++ spec/workers/bulk_import_worker_spec.rb | 26 ++ spec/workers/import/row_worker_spec.rb | 127 ++++++ 40 files changed, 2059 insertions(+), 113 deletions(-) create mode 100644 app/lib/vacuum/imports_vacuum.rb create mode 100644 app/models/bulk_import.rb create mode 100644 app/models/bulk_import_row.rb create mode 100644 app/models/form/import.rb create mode 100644 app/services/bulk_import_row_service.rb create mode 100644 app/services/bulk_import_service.rb delete mode 100644 app/validators/import_validator.rb create mode 100644 app/views/settings/imports/index.html.haml create mode 100644 app/workers/bulk_import_worker.rb create mode 100644 app/workers/import/row_worker.rb create mode 100644 db/migrate/20230330135507_create_bulk_imports.rb create mode 100644 db/migrate/20230330140036_create_bulk_import_rows.rb create mode 100644 spec/fabricators/bulk_import_fabricator.rb create mode 100644 spec/fabricators/bulk_import_row_fabricator.rb create mode 100644 spec/fixtures/files/empty.csv create mode 100644 spec/fixtures/files/following_accounts.csv create mode 100644 spec/fixtures/files/muted_accounts.csv create mode 100644 spec/lib/vacuum/imports_vacuum_spec.rb create mode 100644 spec/models/form/import_spec.rb create mode 100644 spec/services/bulk_import_row_service_spec.rb create mode 100644 spec/services/bulk_import_service_spec.rb create mode 100644 spec/workers/bulk_import_worker_spec.rb create mode 100644 spec/workers/import/row_worker_spec.rb diff --git a/.rubocop.yml b/.rubocop.yml index b510c43031..16a04ae958 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -65,6 +65,7 @@ Metrics/AbcSize: Metrics/BlockLength: CountAsOne: ['array', 'hash', 'heredoc', 'method_call'] Exclude: + - 'config/routes.rb' - 'lib/mastodon/*_cli.rb' - 'lib/tasks/*.rake' - 'app/models/concerns/account_associations.rb' @@ -130,6 +131,7 @@ Metrics/ClassLength: - 'app/services/activitypub/process_account_service.rb' - 'app/services/activitypub/process_status_update_service.rb' - 'app/services/backup_service.rb' + - 'app/services/bulk_import_service.rb' - 'app/services/delete_account_service.rb' - 'app/services/fan_out_on_write_service.rb' - 'app/services/fetch_link_card_service.rb' diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index dec4f38528..446e38b0a7 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -741,6 +741,7 @@ RSpec/LetSetup: - 'spec/controllers/following_accounts_controller_spec.rb' - 'spec/controllers/oauth/authorized_applications_controller_spec.rb' - 'spec/controllers/oauth/tokens_controller_spec.rb' + - 'spec/controllers/settings/imports_controller_spec.rb' - 'spec/lib/activitypub/activity/delete_spec.rb' - 'spec/lib/vacuum/preview_cards_vacuum_spec.rb' - 'spec/models/account_spec.rb' @@ -755,6 +756,7 @@ RSpec/LetSetup: - 'spec/services/activitypub/process_collection_service_spec.rb' - 'spec/services/batched_remove_status_service_spec.rb' - 'spec/services/block_domain_service_spec.rb' + - 'spec/services/bulk_import_service_spec.rb' - 'spec/services/delete_account_service_spec.rb' - 'spec/services/import_service_spec.rb' - 'spec/services/notify_service_spec.rb' diff --git a/app/controllers/settings/imports_controller.rb b/app/controllers/settings/imports_controller.rb index d4516526ee..bdbf8796fe 100644 --- a/app/controllers/settings/imports_controller.rb +++ b/app/controllers/settings/imports_controller.rb @@ -1,31 +1,97 @@ # frozen_string_literal: true -class Settings::ImportsController < Settings::BaseController - before_action :set_account +require 'csv' - def show - @import = Import.new +class Settings::ImportsController < Settings::BaseController + before_action :set_bulk_import, only: [:show, :confirm, :destroy] + before_action :set_recent_imports, only: [:index] + + TYPE_TO_FILENAME_MAP = { + following: 'following_accounts_failures.csv', + blocking: 'blocked_accounts_failures.csv', + muting: 'muted_accounts_failures.csv', + domain_blocking: 'blocked_domains_failures.csv', + bookmarks: 'bookmarks_failures.csv', + }.freeze + + TYPE_TO_HEADERS_MAP = { + following: ['Account address', 'Show boosts', 'Notify on new posts', 'Languages'], + blocking: false, + muting: ['Account address', 'Hide notifications'], + domain_blocking: false, + bookmarks: false, + }.freeze + + def index + @import = Form::Import.new(current_account: current_account) + end + + def show; end + + def failures + @bulk_import = current_account.bulk_imports.where(state: :finished).find(params[:id]) + + respond_to do |format| + format.csv do + filename = TYPE_TO_FILENAME_MAP[@bulk_import.type.to_sym] + headers = TYPE_TO_HEADERS_MAP[@bulk_import.type.to_sym] + + export_data = CSV.generate(headers: headers, write_headers: true) do |csv| + @bulk_import.rows.find_each do |row| + case @bulk_import.type.to_sym + when :following + csv << [row.data['acct'], row.data.fetch('show_reblogs', true), row.data.fetch('notify', false), row.data['languages']&.join(', ')] + when :blocking + csv << [row.data['acct']] + when :muting + csv << [row.data['acct'], row.data.fetch('hide_notifications', true)] + when :domain_blocking + csv << [row.data['domain']] + when :bookmarks + csv << [row.data['uri']] + end + end + end + + send_data export_data, filename: filename + end + end + end + + def confirm + @bulk_import.update!(state: :scheduled) + BulkImportWorker.perform_async(@bulk_import.id) + redirect_to settings_imports_path, notice: I18n.t('imports.success') end def create - @import = Import.new(import_params) - @import.account = @account + @import = Form::Import.new(import_params.merge(current_account: current_account)) if @import.save - ImportWorker.perform_async(@import.id) - redirect_to settings_import_path, notice: I18n.t('imports.success') + redirect_to settings_import_path(@import.bulk_import.id) else - render :show + # We need to set recent imports as we are displaying the index again + set_recent_imports + render :index end end + def destroy + @bulk_import.destroy! + redirect_to settings_imports_path + end + private - def set_account - @account = current_user.account + def import_params + params.require(:form_import).permit(:data, :type, :mode) end - def import_params - params.require(:import).permit(:data, :type, :mode) + def set_bulk_import + @bulk_import = current_account.bulk_imports.where(state: :unconfirmed).find(params[:id]) + end + + def set_recent_imports + @recent_imports = current_account.bulk_imports.reorder(id: :desc).limit(10) end end diff --git a/app/lib/vacuum/imports_vacuum.rb b/app/lib/vacuum/imports_vacuum.rb new file mode 100644 index 0000000000..ffb9449a42 --- /dev/null +++ b/app/lib/vacuum/imports_vacuum.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class Vacuum::ImportsVacuum + def perform + clean_unconfirmed_imports! + clean_old_imports! + end + + private + + def clean_unconfirmed_imports! + BulkImport.where(state: :unconfirmed).where('created_at <= ?', 10.minutes.ago).reorder(nil).in_batches.delete_all + end + + def clean_old_imports! + BulkImport.where('created_at <= ?', 1.week.ago).reorder(nil).in_batches.delete_all + end +end diff --git a/app/models/bulk_import.rb b/app/models/bulk_import.rb new file mode 100644 index 0000000000..af9a9670bf --- /dev/null +++ b/app/models/bulk_import.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +# == Schema Information +# +# Table name: bulk_imports +# +# id :bigint(8) not null, primary key +# type :integer not null +# state :integer not null +# total_items :integer default(0), not null +# imported_items :integer default(0), not null +# processed_items :integer default(0), not null +# finished_at :datetime +# overwrite :boolean default(FALSE), not null +# likely_mismatched :boolean default(FALSE), not null +# original_filename :string default(""), not null +# account_id :bigint(8) not null +# created_at :datetime not null +# updated_at :datetime not null +# +class BulkImport < ApplicationRecord + self.inheritance_column = false + + belongs_to :account + has_many :rows, class_name: 'BulkImportRow', inverse_of: :bulk_import, dependent: :delete_all + + enum type: { + following: 0, + blocking: 1, + muting: 2, + domain_blocking: 3, + bookmarks: 4, + } + + enum state: { + unconfirmed: 0, + scheduled: 1, + in_progress: 2, + finished: 3, + } + + validates :type, presence: true + + def self.progress!(bulk_import_id, imported: false) + # Use `increment_counter` so that the incrementation is done atomically in the database + BulkImport.increment_counter(:processed_items, bulk_import_id) # rubocop:disable Rails/SkipsModelValidations + BulkImport.increment_counter(:imported_items, bulk_import_id) if imported # rubocop:disable Rails/SkipsModelValidations + + # Since the incrementation has been done atomically, concurrent access to `bulk_import` is now bening + bulk_import = BulkImport.find(bulk_import_id) + bulk_import.update!(state: :finished, finished_at: Time.now.utc) if bulk_import.processed_items == bulk_import.total_items + end +end diff --git a/app/models/bulk_import_row.rb b/app/models/bulk_import_row.rb new file mode 100644 index 0000000000..dd7190c970 --- /dev/null +++ b/app/models/bulk_import_row.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +# == Schema Information +# +# Table name: bulk_import_rows +# +# id :bigint(8) not null, primary key +# bulk_import_id :bigint(8) not null +# data :jsonb +# created_at :datetime not null +# updated_at :datetime not null +# +class BulkImportRow < ApplicationRecord + belongs_to :bulk_import +end diff --git a/app/models/concerns/account_associations.rb b/app/models/concerns/account_associations.rb index bbe269e8f0..592812e960 100644 --- a/app/models/concerns/account_associations.rb +++ b/app/models/concerns/account_associations.rb @@ -68,5 +68,8 @@ module AccountAssociations # Account statuses cleanup policy has_one :statuses_cleanup_policy, class_name: 'AccountStatusesCleanupPolicy', inverse_of: :account, dependent: :destroy + + # Imports + has_many :bulk_imports, inverse_of: :account, dependent: :delete_all end end diff --git a/app/models/form/import.rb b/app/models/form/import.rb new file mode 100644 index 0000000000..750ef84be4 --- /dev/null +++ b/app/models/form/import.rb @@ -0,0 +1,151 @@ +# frozen_string_literal: true + +require 'csv' + +# A non-ActiveRecord helper class for CSV uploads. +# Handles saving contents to database. +class Form::Import + include ActiveModel::Model + + MODES = %i(merge overwrite).freeze + + FILE_SIZE_LIMIT = 20.megabytes + ROWS_PROCESSING_LIMIT = 20_000 + + EXPECTED_HEADERS_BY_TYPE = { + following: ['Account address', 'Show boosts', 'Notify on new posts', 'Languages'], + blocking: ['Account address'], + muting: ['Account address', 'Hide notifications'], + domain_blocking: ['#domain'], + bookmarks: ['#uri'], + }.freeze + + KNOWN_FIRST_HEADERS = EXPECTED_HEADERS_BY_TYPE.values.map(&:first).uniq.freeze + + ATTRIBUTE_BY_HEADER = { + 'Account address' => 'acct', + 'Show boosts' => 'show_reblogs', + 'Notify on new posts' => 'notify', + 'Languages' => 'languages', + 'Hide notifications' => 'hide_notifications', + '#domain' => 'domain', + '#uri' => 'uri', + }.freeze + + class EmptyFileError < StandardError; end + + attr_accessor :current_account, :data, :type, :overwrite, :bulk_import + + validates :type, presence: true + validates :data, presence: true + validate :validate_data + + def guessed_type + return :muting if csv_data.headers.include?('Hide notifications') + return :following if csv_data.headers.include?('Show boosts') || csv_data.headers.include?('Notify on new posts') || csv_data.headers.include?('Languages') + return :following if data.original_filename&.start_with?('follows') || data.original_filename&.start_with?('following_accounts') + return :blocking if data.original_filename&.start_with?('blocks') || data.original_filename&.start_with?('blocked_accounts') + return :muting if data.original_filename&.start_with?('mutes') || data.original_filename&.start_with?('muted_accounts') + return :domain_blocking if data.original_filename&.start_with?('domain_blocks') || data.original_filename&.start_with?('blocked_domains') + return :bookmarks if data.original_filename&.start_with?('bookmarks') + end + + # Whether the uploaded CSV file seems to correspond to a different import type than the one selected + def likely_mismatched? + guessed_type.present? && guessed_type != type.to_sym + end + + def save + return false unless valid? + + ApplicationRecord.transaction do + now = Time.now.utc + @bulk_import = current_account.bulk_imports.create(type: type, overwrite: overwrite || false, state: :unconfirmed, original_filename: data.original_filename, likely_mismatched: likely_mismatched?) + nb_items = BulkImportRow.insert_all(parsed_rows.map { |row| { bulk_import_id: bulk_import.id, data: row, created_at: now, updated_at: now } }).length # rubocop:disable Rails/SkipsModelValidations + @bulk_import.update(total_items: nb_items) + end + end + + def mode + overwrite ? :overwrite : :merge + end + + def mode=(str) + self.overwrite = str.to_sym == :overwrite + end + + private + + def default_csv_header + case type.to_sym + when :following, :blocking, :muting + 'Account address' + when :domain_blocking + '#domain' + when :bookmarks + '#uri' + end + end + + def csv_data + return @csv_data if defined?(@csv_data) + + csv_converter = lambda do |field, field_info| + case field_info.header + when 'Show boosts', 'Notify on new posts', 'Hide notifications' + ActiveModel::Type::Boolean.new.cast(field) + when 'Languages' + field&.split(',')&.map(&:strip)&.presence + when 'Account address' + field.strip.gsub(/\A@/, '') + when '#domain', '#uri' + field.strip + else + field + end + end + + @csv_data = CSV.open(data.path, encoding: 'UTF-8', skip_blanks: true, headers: true, converters: csv_converter) + @csv_data.take(1) # Ensure the headers are read + raise EmptyFileError if @csv_data.headers == true + + @csv_data = CSV.open(data.path, encoding: 'UTF-8', skip_blanks: true, headers: [default_csv_header], converters: csv_converter) unless KNOWN_FIRST_HEADERS.include?(@csv_data.headers&.first) + @csv_data + end + + def csv_row_count + return @csv_row_count if defined?(@csv_row_count) + + csv_data.rewind + @csv_row_count = csv_data.take(ROWS_PROCESSING_LIMIT + 2).count + end + + def parsed_rows + csv_data.rewind + + expected_headers = EXPECTED_HEADERS_BY_TYPE[type.to_sym] + + csv_data.take(ROWS_PROCESSING_LIMIT + 1).map do |row| + row.to_h.slice(*expected_headers).transform_keys { |key| ATTRIBUTE_BY_HEADER[key] } + end + end + + def validate_data + return if data.nil? + return errors.add(:data, I18n.t('imports.errors.too_large')) if data.size > FILE_SIZE_LIMIT + return errors.add(:data, I18n.t('imports.errors.incompatible_type')) unless csv_data.headers.include?(default_csv_header) + + errors.add(:data, I18n.t('imports.errors.over_rows_processing_limit', count: ROWS_PROCESSING_LIMIT)) if csv_row_count > ROWS_PROCESSING_LIMIT + + if type.to_sym == :following + base_limit = FollowLimitValidator.limit_for_account(current_account) + limit = base_limit + limit -= current_account.following_count unless overwrite + errors.add(:data, I18n.t('users.follow_limit_reached', limit: base_limit)) if csv_row_count > limit + end + rescue CSV::MalformedCSVError => e + errors.add(:data, I18n.t('imports.errors.invalid_csv_file', error: e.message)) + rescue EmptyFileError + errors.add(:data, I18n.t('imports.errors.empty')) + end +end diff --git a/app/models/import.rb b/app/models/import.rb index 21634005ed..7cd6cccf7c 100644 --- a/app/models/import.rb +++ b/app/models/import.rb @@ -17,6 +17,9 @@ # overwrite :boolean default(FALSE), not null # +# NOTE: This is a deprecated model, only kept to not break ongoing imports +# on upgrade. See `BulkImport` and `Form::Import` for its replacements. + class Import < ApplicationRecord FILE_TYPES = %w(text/plain text/csv application/csv).freeze MODES = %i(merge overwrite).freeze @@ -28,7 +31,6 @@ class Import < ApplicationRecord enum type: { following: 0, blocking: 1, muting: 2, domain_blocking: 3, bookmarks: 4 } validates :type, presence: true - validates_with ImportValidator, on: :create has_attached_file :data validates_attachment_content_type :data, content_type: FILE_TYPES diff --git a/app/services/bulk_import_row_service.rb b/app/services/bulk_import_row_service.rb new file mode 100644 index 0000000000..4046ef4eed --- /dev/null +++ b/app/services/bulk_import_row_service.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +class BulkImportRowService + def call(row) + @account = row.bulk_import.account + @data = row.data + @type = row.bulk_import.type.to_sym + + case @type + when :following, :blocking, :muting + target_acct = @data['acct'] + target_domain = domain(target_acct) + @target_account = stoplight_wrap_request(target_domain) { ResolveAccountService.new.call(target_acct, { check_delivery_availability: true }) } + return false if @target_account.nil? + when :bookmarks + target_uri = @data['uri'] + target_domain = Addressable::URI.parse(target_uri).normalized_host + @target_status = ActivityPub::TagManager.instance.uri_to_resource(target_uri, Status) + return false if @target_status.nil? && ActivityPub::TagManager.instance.local_uri?(target_uri) + + @target_status ||= stoplight_wrap_request(target_domain) { ActivityPub::FetchRemoteStatusService.new.call(target_uri) } + return false if @target_status.nil? + end + + case @type + when :following + FollowService.new.call(@account, @target_account, reblogs: @data['show_reblogs'], notify: @data['notify'], languages: @data['languages']) + when :blocking + BlockService.new.call(@account, @target_account) + when :muting + MuteService.new.call(@account, @target_account, notifications: @data['hide_notifications']) + when :bookmarks + return false unless StatusPolicy.new(@account, @target_status).show? + + @account.bookmarks.find_or_create_by!(status: @target_status) + end + + true + rescue ActiveRecord::RecordNotFound + false + end + + def domain(uri) + domain = uri.is_a?(Account) ? uri.domain : uri.split('@')[1] + TagManager.instance.local_domain?(domain) ? nil : TagManager.instance.normalize_domain(domain) + end + + def stoplight_wrap_request(domain, &block) + if domain.present? + Stoplight("source:#{domain}", &block) + .with_fallback { nil } + .with_threshold(1) + .with_cool_off_time(5.minutes.seconds) + .with_error_handler { |error, handle| error.is_a?(HTTP::Error) || error.is_a?(OpenSSL::SSL::SSLError) ? handle.call(error) : raise(error) } + .run + else + yield + end + end +end diff --git a/app/services/bulk_import_service.rb b/app/services/bulk_import_service.rb new file mode 100644 index 0000000000..2701b0c7e0 --- /dev/null +++ b/app/services/bulk_import_service.rb @@ -0,0 +1,160 @@ +# frozen_string_literal: true + +class BulkImportService < BaseService + def call(import) + @import = import + @account = @import.account + + case @import.type.to_sym + when :following + import_follows! + when :blocking + import_blocks! + when :muting + import_mutes! + when :domain_blocking + import_domain_blocks! + when :bookmarks + import_bookmarks! + end + + @import.update!(state: :finished, finished_at: Time.now.utc) if @import.processed_items == @import.total_items + rescue + @import.update!(state: :finished, finished_at: Time.now.utc) + + raise + end + + private + + def extract_rows_by_acct + local_domain_suffix = "@#{Rails.configuration.x.local_domain}" + @import.rows.to_a.index_by { |row| row.data['acct'].delete_suffix(local_domain_suffix) } + end + + def import_follows! + rows_by_acct = extract_rows_by_acct + + if @import.overwrite? + @account.following.find_each do |followee| + row = rows_by_acct.delete(followee.acct) + + if row.nil? + UnfollowService.new.call(@account, followee) + else + row.destroy + @import.processed_items += 1 + @import.imported_items += 1 + + # Since we're updating the settings of an existing relationship, we can safely call + # FollowService directly + FollowService.new.call(@account, followee, reblogs: row.data['show_reblogs'], notify: row.data['notify'], languages: row.data['languages']) + end + end + + # Save pending infos due to `overwrite?` handling + @import.save! + end + + Import::RowWorker.push_bulk(rows_by_acct.values) do |row| + [row.id] + end + end + + def import_blocks! + rows_by_acct = extract_rows_by_acct + + if @import.overwrite? + @account.blocking.find_each do |blocked_account| + row = rows_by_acct.delete(blocked_account.acct) + + if row.nil? + UnblockService.new.call(@account, blocked_account) + else + row.destroy + @import.processed_items += 1 + @import.imported_items += 1 + BlockService.new.call(@account, blocked_account) + end + end + + # Save pending infos due to `overwrite?` handling + @import.save! + end + + Import::RowWorker.push_bulk(rows_by_acct.values) do |row| + [row.id] + end + end + + def import_mutes! + rows_by_acct = extract_rows_by_acct + + if @import.overwrite? + @account.muting.find_each do |muted_account| + row = rows_by_acct.delete(muted_account.acct) + + if row.nil? + UnmuteService.new.call(@account, muted_account) + else + row.destroy + @import.processed_items += 1 + @import.imported_items += 1 + MuteService.new.call(@account, muted_account, notifications: row.data['hide_notifications']) + end + end + + # Save pending infos due to `overwrite?` handling + @import.save! + end + + Import::RowWorker.push_bulk(rows_by_acct.values) do |row| + [row.id] + end + end + + def import_domain_blocks! + domains = @import.rows.map { |row| row.data['domain'] } + + if @import.overwrite? + @account.domain_blocks.find_each do |domain_block| + domain = domains.delete(domain_block) + + @account.unblock_domain!(domain_block.domain) if domain.nil? + end + end + + @import.rows.delete_all + domains.each { |domain| @account.block_domain!(domain) } + @import.update!(processed_items: @import.total_items, imported_items: @import.total_items) + + AfterAccountDomainBlockWorker.push_bulk(domains) do |domain| + [@account.id, domain] + end + end + + def import_bookmarks! + rows_by_uri = @import.rows.index_by { |row| row.data['uri'] } + + if @import.overwrite? + @account.bookmarks.includes(:status).find_each do |bookmark| + row = rows_by_uri.delete(ActivityPub::TagManager.instance.uri_for(bookmark.status)) + + if row.nil? + bookmark.destroy! + else + row.destroy + @import.processed_items += 1 + @import.imported_items += 1 + end + end + + # Save pending infos due to `overwrite?` handling + @import.save! + end + + Import::RowWorker.push_bulk(rows_by_uri.values) do |row| + [row.id] + end + end +end diff --git a/app/services/import_service.rb b/app/services/import_service.rb index f6c94cbb6f..133c081be5 100644 --- a/app/services/import_service.rb +++ b/app/services/import_service.rb @@ -2,6 +2,9 @@ require 'csv' +# NOTE: This is a deprecated service, only kept to not break ongoing imports +# on upgrade. See `BulkImportService` for its replacement. + class ImportService < BaseService ROWS_PROCESSING_LIMIT = 20_000 diff --git a/app/validators/import_validator.rb b/app/validators/import_validator.rb deleted file mode 100644 index 782baf5d6b..0000000000 --- a/app/validators/import_validator.rb +++ /dev/null @@ -1,46 +0,0 @@ -# frozen_string_literal: true - -require 'csv' - -class ImportValidator < ActiveModel::Validator - KNOWN_HEADERS = [ - 'Account address', - '#domain', - '#uri', - ].freeze - - def validate(import) - return if import.type.blank? || import.data.blank? - - # We parse because newlines could be part of individual rows. This - # runs on create so we should be reading the local file here before - # it is uploaded to object storage or moved anywhere... - csv_data = CSV.parse(import.data.queued_for_write[:original].read) - - row_count = csv_data.size - row_count -= 1 if KNOWN_HEADERS.include?(csv_data.first&.first) - - import.errors.add(:data, I18n.t('imports.errors.over_rows_processing_limit', count: ImportService::ROWS_PROCESSING_LIMIT)) if row_count > ImportService::ROWS_PROCESSING_LIMIT - - case import.type - when 'following' - validate_following_import(import, row_count) - end - rescue CSV::MalformedCSVError - import.errors.add(:data, :malformed) - end - - private - - def validate_following_import(import, row_count) - base_limit = FollowLimitValidator.limit_for_account(import.account) - - limit = if import.overwrite? - base_limit - else - base_limit - import.account.following_count - end - - import.errors.add(:data, I18n.t('users.follow_limit_reached', limit: base_limit)) if row_count > limit - end -end diff --git a/app/views/settings/imports/index.html.haml b/app/views/settings/imports/index.html.haml new file mode 100644 index 0000000000..f55d850791 --- /dev/null +++ b/app/views/settings/imports/index.html.haml @@ -0,0 +1,49 @@ +- content_for :page_title do + = t('settings.import') + += simple_form_for @import, url: settings_imports_path do |f| + .field-group + = f.input :type, as: :grouped_select, collection: { constructive: %i(following bookmarks), destructive: %i(muting blocking domain_blocking) }, wrapper: :with_block_label, include_blank: false, label_method: ->(type) { I18n.t("imports.types.#{type}") }, group_label_method: ->(group) { I18n.t("imports.type_groups.#{group.first}") }, group_method: :last, hint: t('imports.preface') + + .fields-row + .fields-group.fields-row__column.fields-row__column-6 + = f.input :data, wrapper: :with_block_label, hint: t('simple_form.hints.imports.data') + .fields-group.fields-row__column.fields-row__column-6 + = f.input :mode, as: :radio_buttons, collection: Import::MODES, label_method: ->(mode) { safe_join([I18n.t("imports.modes.#{mode}"), content_tag(:span, I18n.t("imports.modes.#{mode}_long"), class: 'hint')]) }, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li' + + .actions + = f.button :button, t('imports.upload'), type: :submit + +- unless @recent_imports.empty? + %hr.spacer/ + + %h3= t('imports.recent_imports') + + .table-wrapper + %table.table + %thead + %tr + %th= t('imports.type') + %th= t('imports.status') + %th= t('imports.imported') + %th= t('imports.time_started') + %th= t('imports.failures') + %tbody + - @recent_imports.each do |import| + %tr + %td= t("imports.types.#{import.type}") + %td + - if import.unconfirmed? + = link_to t("imports.states.#{import.state}"), settings_import_path(import) + - else + = t("imports.states.#{import.state}") + %td + #{import.imported_items} / #{import.total_items} + %td= l(import.created_at) + %td + - num_failed = import.processed_items - import.imported_items + - if num_failed.positive? + - if import.finished? + = link_to num_failed, failures_settings_import_path(import, format: 'csv') + - else + = num_failed diff --git a/app/views/settings/imports/show.html.haml b/app/views/settings/imports/show.html.haml index 7bb4beb01c..65954e3e1e 100644 --- a/app/views/settings/imports/show.html.haml +++ b/app/views/settings/imports/show.html.haml @@ -1,15 +1,15 @@ - content_for :page_title do - = t('settings.import') + = t("imports.titles.#{@bulk_import.type.to_s}") -= simple_form_for @import, url: settings_import_path do |f| - .field-group - = f.input :type, collection: Import.types.keys, wrapper: :with_block_label, include_blank: false, label_method: lambda { |type| I18n.t("imports.types.#{type}") }, hint: t('imports.preface') +- if @bulk_import.likely_mismatched? + .flash-message.warning= t("imports.mismatched_types_warning") - .fields-row - .fields-group.fields-row__column.fields-row__column-6 - = f.input :data, wrapper: :with_block_label, hint: t('simple_form.hints.imports.data') - .fields-group.fields-row__column.fields-row__column-6 - = f.input :mode, as: :radio_buttons, collection: Import::MODES, label_method: lambda { |mode| safe_join([I18n.t("imports.modes.#{mode}"), content_tag(:span, I18n.t("imports.modes.#{mode}_long"), class: 'hint')]) }, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li' +- if @bulk_import.overwrite? + %p.hint= t("imports.overwrite_preambles.#{@bulk_import.type.to_s}_html", filename: @bulk_import.original_filename, total_items: @bulk_import.total_items) +- else + %p.hint= t("imports.preambles.#{@bulk_import.type.to_s}_html", filename: @bulk_import.original_filename, total_items: @bulk_import.total_items) +.simple_form .actions - = f.button :button, t('imports.upload'), type: :submit + = link_to t('generic.cancel'), settings_import_path(@bulk_import), method: :delete, class: 'button button-tertiary' + = link_to t('generic.confirm'), confirm_settings_import_path(@bulk_import), method: :post, class: 'button' diff --git a/app/workers/bulk_import_worker.rb b/app/workers/bulk_import_worker.rb new file mode 100644 index 0000000000..54571a95c0 --- /dev/null +++ b/app/workers/bulk_import_worker.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +class BulkImportWorker + include Sidekiq::Worker + + sidekiq_options queue: 'pull', retry: false + + def perform(import_id) + import = BulkImport.find(import_id) + import.update!(state: :in_progress) + BulkImportService.new.call(import) + end +end diff --git a/app/workers/import/relationship_worker.rb b/app/workers/import/relationship_worker.rb index c2728c1961..a411ab5876 100644 --- a/app/workers/import/relationship_worker.rb +++ b/app/workers/import/relationship_worker.rb @@ -1,5 +1,8 @@ # frozen_string_literal: true +# NOTE: This is a deprecated worker, only kept to not break ongoing imports +# on upgrade. See `Import::RowWorker` for its replacement. + class Import::RelationshipWorker include Sidekiq::Worker diff --git a/app/workers/import/row_worker.rb b/app/workers/import/row_worker.rb new file mode 100644 index 0000000000..09dd6ce736 --- /dev/null +++ b/app/workers/import/row_worker.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +class Import::RowWorker + include Sidekiq::Worker + + sidekiq_options queue: 'pull', retry: 6, dead: false + + sidekiq_retries_exhausted do |msg, _exception| + ActiveRecord::Base.connection_pool.with_connection do + # Increment the total number of processed items, and bump the state of the import if needed + bulk_import_id = BulkImportRow.where(id: msg['args'][0]).pick(:id) + BulkImport.progress!(bulk_import_id) unless bulk_import_id.nil? + end + end + + def perform(row_id) + row = BulkImportRow.eager_load(bulk_import: :account).find_by(id: row_id) + return true if row.nil? + + imported = BulkImportRowService.new.call(row) + + mark_as_processed!(row, imported) + end + + private + + def mark_as_processed!(row, imported) + bulk_import_id = row.bulk_import_id + row.destroy! if imported + + BulkImport.progress!(bulk_import_id, imported: imported) + end +end diff --git a/app/workers/import_worker.rb b/app/workers/import_worker.rb index dfa71b29ec..b6afb972a9 100644 --- a/app/workers/import_worker.rb +++ b/app/workers/import_worker.rb @@ -1,5 +1,8 @@ # frozen_string_literal: true +# NOTE: This is a deprecated worker, only kept to not break ongoing imports +# on upgrade. See `ImportWorker` for its replacement. + class ImportWorker include Sidekiq::Worker diff --git a/app/workers/scheduler/vacuum_scheduler.rb b/app/workers/scheduler/vacuum_scheduler.rb index 9544f808bf..9e884caefd 100644 --- a/app/workers/scheduler/vacuum_scheduler.rb +++ b/app/workers/scheduler/vacuum_scheduler.rb @@ -23,6 +23,7 @@ class Scheduler::VacuumScheduler backups_vacuum, access_tokens_vacuum, feeds_vacuum, + imports_vacuum, ] end @@ -50,6 +51,10 @@ class Scheduler::VacuumScheduler Vacuum::FeedsVacuum.new end + def imports_vacuum + Vacuum::ImportsVacuum.new + end + def content_retention_policy ContentRetentionPolicy.current end diff --git a/config/i18n-tasks.yml b/config/i18n-tasks.yml index 46dd3124bf..ddf503197c 100644 --- a/config/i18n-tasks.yml +++ b/config/i18n-tasks.yml @@ -64,6 +64,8 @@ ignore_unused: - 'statuses.attached.*' - 'move_handler.carry_{mutes,blocks}_over_text' - 'notification_mailer.*' + - 'imports.overwrite_preambles.{following,blocking,muting,domain_blocking,bookmarks}_html' + - 'imports.preambles.{following,blocking,muting,domain_blocking,bookmarks}_html' ignore_inconsistent_interpolations: - '*.one' diff --git a/config/locales/en.yml b/config/locales/en.yml index ddefbc49b1..0188519c26 100644 --- a/config/locales/en.yml +++ b/config/locales/en.yml @@ -1218,7 +1218,9 @@ en: all_matching_items_selected_html: one: "%{count} item matching your search is selected." other: All %{count} items matching your search are selected. + cancel: Cancel changes_saved_msg: Changes successfully saved! + confirm: Confirm copy: Copy delete: Delete deselect: Deselect all @@ -1234,15 +1236,51 @@ en: other: Something isn't quite right yet! Please review %{count} errors below imports: errors: + empty: Empty CSV file + incompatible_type: Incompatible with the selected import type invalid_csv_file: 'Invalid CSV file. Error: %{error}' over_rows_processing_limit: contains more than %{count} rows + too_large: File is too large + failures: Failures + imported: Imported + mismatched_types_warning: It appears you may have selected the wrong type for this import, please double-check. modes: merge: Merge merge_long: Keep existing records and add new ones overwrite: Overwrite overwrite_long: Replace current records with the new ones + overwrite_preambles: + blocking_html: You are about to replace your block list with up to %{total_items} accounts from %{filename}. + bookmarks_html: You are about to replace your bookmarks with up to %{total_items} posts from %{filename}. + domain_blocking_html: You are about to replace your domain block list with up to %{total_items} domains from %{filename}. + following_html: You are about to follow up to %{total_items} accounts from %{filename} and stop following anyone else. + muting_html: You are about to replace your list of muted accounts with up to %{total_items} accounts from %{filename}. + preambles: + blocking_html: You are about to block up to %{total_items} accounts from %{filename}. + bookmarks_html: You are about to add up to %{total_items} posts from %{filename} to your bookmarks. + domain_blocking_html: You are about to block up to %{total_items} domains from %{filename}. + following_html: You are about to follow up to %{total_items} accounts from %{filename}. + muting_html: You are about to mute up to %{total_items} accounts from %{filename}. preface: You can import data that you have exported from another server, such as a list of the people you are following or blocking. + recent_imports: Recent imports + states: + finished: Finished + in_progress: In progress + scheduled: Scheduled + unconfirmed: Unconfirmed + status: Status success: Your data was successfully uploaded and will be processed in due time + time_started: Started at + titles: + blocking: Importing blocked accounts + bookmarks: Importing bookmarks + domain_blocking: Importing blocked domains + following: Importing followed accounts + muting: Importing muted accounts + type: Import type + type_groups: + constructive: Follows & Bookmarks + destructive: Blocks & mutes types: blocking: Blocking list bookmarks: Bookmarks diff --git a/config/navigation.rb b/config/navigation.rb index 30817d0252..4b829c151a 100644 --- a/config/navigation.rb +++ b/config/navigation.rb @@ -26,7 +26,7 @@ SimpleNavigation::Configuration.run do |navigation| end n.item :data, safe_join([fa_icon('cloud-download fw'), t('settings.import_and_export')]), settings_export_path do |s| - s.item :import, safe_join([fa_icon('cloud-upload fw'), t('settings.import')]), settings_import_path, if: -> { current_user.functional? } + s.item :import, safe_join([fa_icon('cloud-upload fw'), t('settings.import')]), settings_imports_path, if: -> { current_user.functional? } s.item :export, safe_join([fa_icon('cloud-download fw'), t('settings.export')]), settings_export_path end diff --git a/config/routes.rb b/config/routes.rb index c09c5175a2..f177a1fdb9 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -144,7 +144,13 @@ Rails.application.routes.draw do resource :other, only: [:show, :update], controller: :other end - resource :import, only: [:show, :create] + resources :imports, only: [:index, :show, :destroy, :create] do + member do + post :confirm + get :failures + end + end + resource :export, only: [:show, :create] namespace :exports, constraints: { format: :csv } do diff --git a/db/migrate/20230330135507_create_bulk_imports.rb b/db/migrate/20230330135507_create_bulk_imports.rb new file mode 100644 index 0000000000..117a135086 --- /dev/null +++ b/db/migrate/20230330135507_create_bulk_imports.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +class CreateBulkImports < ActiveRecord::Migration[6.1] + def change + create_table :bulk_imports do |t| + t.integer :type, null: false + t.integer :state, null: false + t.integer :total_items, null: false, default: 0 + t.integer :imported_items, null: false, default: 0 + t.integer :processed_items, null: false, default: 0 + t.datetime :finished_at + t.boolean :overwrite, null: false, default: false + t.boolean :likely_mismatched, null: false, default: false + t.string :original_filename, null: false, default: '' + t.references :account, null: false, foreign_key: { on_delete: :cascade } + + t.timestamps + end + + add_index :bulk_imports, [:id], name: :index_bulk_imports_unconfirmed, where: 'state = 0' + end +end diff --git a/db/migrate/20230330140036_create_bulk_import_rows.rb b/db/migrate/20230330140036_create_bulk_import_rows.rb new file mode 100644 index 0000000000..ef6ad7069b --- /dev/null +++ b/db/migrate/20230330140036_create_bulk_import_rows.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +class CreateBulkImportRows < ActiveRecord::Migration[6.1] + def change + create_table :bulk_import_rows do |t| + t.references :bulk_import, null: false, foreign_key: { on_delete: :cascade } + t.jsonb :data + + t.timestamps + end + end +end diff --git a/db/schema.rb b/db/schema.rb index 620bed2bc9..7bd6f4d602 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 2023_02_15_074423) do +ActiveRecord::Schema.define(version: 2023_03_30_140036) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" @@ -294,6 +294,31 @@ ActiveRecord::Schema.define(version: 2023_02_15_074423) do t.index ["status_id"], name: "index_bookmarks_on_status_id" end + create_table "bulk_import_rows", force: :cascade do |t| + t.bigint "bulk_import_id", null: false + t.jsonb "data" + t.datetime "created_at", precision: 6, null: false + t.datetime "updated_at", precision: 6, null: false + t.index ["bulk_import_id"], name: "index_bulk_import_rows_on_bulk_import_id" + end + + create_table "bulk_imports", force: :cascade do |t| + t.integer "type", null: false + t.integer "state", null: false + t.integer "total_items", default: 0, null: false + t.integer "imported_items", default: 0, null: false + t.integer "processed_items", default: 0, null: false + t.datetime "finished_at" + t.boolean "overwrite", default: false, null: false + t.boolean "likely_mismatched", default: false, null: false + t.string "original_filename", default: "", null: false + t.bigint "account_id", null: false + t.datetime "created_at", precision: 6, null: false + t.datetime "updated_at", precision: 6, null: false + t.index ["account_id"], name: "index_bulk_imports_on_account_id" + t.index ["id"], name: "index_bulk_imports_unconfirmed", where: "(state = 0)" + end + create_table "canonical_email_blocks", force: :cascade do |t| t.string "canonical_email_hash", default: "", null: false t.bigint "reference_account_id" @@ -1146,6 +1171,8 @@ ActiveRecord::Schema.define(version: 2023_02_15_074423) do add_foreign_key "blocks", "accounts", name: "fk_4269e03e65", on_delete: :cascade add_foreign_key "bookmarks", "accounts", on_delete: :cascade add_foreign_key "bookmarks", "statuses", on_delete: :cascade + add_foreign_key "bulk_import_rows", "bulk_imports", on_delete: :cascade + add_foreign_key "bulk_imports", "accounts", on_delete: :cascade add_foreign_key "canonical_email_blocks", "accounts", column: "reference_account_id", on_delete: :cascade add_foreign_key "conversation_mutes", "accounts", name: "fk_225b4212bb", on_delete: :cascade add_foreign_key "conversation_mutes", "conversations", on_delete: :cascade diff --git a/spec/controllers/settings/imports_controller_spec.rb b/spec/controllers/settings/imports_controller_spec.rb index 98ba897e41..500b442396 100644 --- a/spec/controllers/settings/imports_controller_spec.rb +++ b/spec/controllers/settings/imports_controller_spec.rb @@ -5,13 +5,22 @@ require 'rails_helper' RSpec.describe Settings::ImportsController, type: :controller do render_views + let(:user) { Fabricate(:user) } + before do - sign_in Fabricate(:user), scope: :user + sign_in user, scope: :user end - describe 'GET #show' do + describe 'GET #index' do + let!(:import) { Fabricate(:bulk_import, account: user.account) } + let!(:other_import) { Fabricate(:bulk_import) } + before do - get :show + get :index + end + + it 'assigns the expected imports' do + expect(assigns(:recent_imports)).to eq [import] end it 'returns http success' do @@ -23,31 +32,288 @@ RSpec.describe Settings::ImportsController, type: :controller do end end - describe 'POST #create' do - it 'redirects to settings path with successful following import' do - service = double(call: nil) - allow(ResolveAccountService).to receive(:new).and_return(service) - post :create, params: { - import: { - type: 'following', - data: fixture_file_upload('imports.txt'), - }, - } - - expect(response).to redirect_to(settings_import_path) + describe 'GET #show' do + before do + get :show, params: { id: bulk_import.id } end - it 'redirects to settings path with successful blocking import' do - service = double(call: nil) - allow(ResolveAccountService).to receive(:new).and_return(service) - post :create, params: { - import: { - type: 'blocking', - data: fixture_file_upload('imports.txt'), - }, - } + context 'with someone else\'s import' do + let(:bulk_import) { Fabricate(:bulk_import, state: :unconfirmed) } - expect(response).to redirect_to(settings_import_path) + it 'returns http not found' do + expect(response).to have_http_status(404) + end + end + + context 'with an already-confirmed import' do + let(:bulk_import) { Fabricate(:bulk_import, account: user.account, state: :in_progress) } + + it 'returns http not found' do + expect(response).to have_http_status(404) + end + end + + context 'with an unconfirmed import' do + let(:bulk_import) { Fabricate(:bulk_import, account: user.account, state: :unconfirmed) } + + it 'returns http success' do + expect(response).to have_http_status(200) + end end end + + describe 'POST #confirm' do + subject { post :confirm, params: { id: bulk_import.id } } + + before do + allow(BulkImportWorker).to receive(:perform_async) + end + + context 'with someone else\'s import' do + let(:bulk_import) { Fabricate(:bulk_import, state: :unconfirmed) } + + it 'does not change the import\'s state' do + expect { subject }.to_not(change { bulk_import.reload.state }) + end + + it 'does not fire the import worker' do + subject + expect(BulkImportWorker).to_not have_received(:perform_async) + end + + it 'returns http not found' do + subject + expect(response).to have_http_status(404) + end + end + + context 'with an already-confirmed import' do + let(:bulk_import) { Fabricate(:bulk_import, account: user.account, state: :in_progress) } + + it 'does not change the import\'s state' do + expect { subject }.to_not(change { bulk_import.reload.state }) + end + + it 'does not fire the import worker' do + subject + expect(BulkImportWorker).to_not have_received(:perform_async) + end + + it 'returns http not found' do + subject + expect(response).to have_http_status(404) + end + end + + context 'with an unconfirmed import' do + let(:bulk_import) { Fabricate(:bulk_import, account: user.account, state: :unconfirmed) } + + it 'changes the import\'s state to scheduled' do + expect { subject }.to change { bulk_import.reload.state.to_sym }.from(:unconfirmed).to(:scheduled) + end + + it 'fires the import worker on the expected import' do + subject + expect(BulkImportWorker).to have_received(:perform_async).with(bulk_import.id) + end + + it 'redirects to imports path' do + subject + expect(response).to redirect_to(settings_imports_path) + end + end + end + + describe 'DELETE #destroy' do + subject { delete :destroy, params: { id: bulk_import.id } } + + context 'with someone else\'s import' do + let(:bulk_import) { Fabricate(:bulk_import, state: :unconfirmed) } + + it 'does not delete the import' do + expect { subject }.to_not(change { BulkImport.exists?(bulk_import.id) }) + end + + it 'returns http not found' do + subject + expect(response).to have_http_status(404) + end + end + + context 'with an already-confirmed import' do + let(:bulk_import) { Fabricate(:bulk_import, account: user.account, state: :in_progress) } + + it 'does not delete the import' do + expect { subject }.to_not(change { BulkImport.exists?(bulk_import.id) }) + end + + it 'returns http not found' do + subject + expect(response).to have_http_status(404) + end + end + + context 'with an unconfirmed import' do + let(:bulk_import) { Fabricate(:bulk_import, account: user.account, state: :unconfirmed) } + + it 'deletes the import' do + expect { subject }.to change { BulkImport.exists?(bulk_import.id) }.from(true).to(false) + end + + it 'redirects to imports path' do + subject + expect(response).to redirect_to(settings_imports_path) + end + end + end + + describe 'GET #failures' do + subject { get :failures, params: { id: bulk_import.id }, format: :csv } + + shared_examples 'export failed rows' do |expected_contents| + let(:bulk_import) { Fabricate(:bulk_import, account: user.account, type: import_type, state: :finished) } + + before do + bulk_import.update(total_items: bulk_import.rows.count, processed_items: bulk_import.rows.count, imported_items: 0) + end + + it 'returns http success' do + subject + expect(response).to have_http_status(200) + end + + it 'returns expected contents' do + subject + expect(response.body).to eq expected_contents + end + end + + context 'with follows' do + let(:import_type) { 'following' } + + let!(:rows) do + [ + { 'acct' => 'foo@bar' }, + { 'acct' => 'user@bar', 'show_reblogs' => false, 'notify' => true, 'languages' => ['fr', 'de'] }, + ].map { |data| Fabricate(:bulk_import_row, bulk_import: bulk_import, data: data) } + end + + include_examples 'export failed rows', "Account address,Show boosts,Notify on new posts,Languages\nfoo@bar,true,false,\nuser@bar,false,true,\"fr, de\"\n" + end + + context 'with blocks' do + let(:import_type) { 'blocking' } + + let!(:rows) do + [ + { 'acct' => 'foo@bar' }, + { 'acct' => 'user@bar' }, + ].map { |data| Fabricate(:bulk_import_row, bulk_import: bulk_import, data: data) } + end + + include_examples 'export failed rows', "foo@bar\nuser@bar\n" + end + + context 'with mutes' do + let(:import_type) { 'muting' } + + let!(:rows) do + [ + { 'acct' => 'foo@bar' }, + { 'acct' => 'user@bar', 'hide_notifications' => false }, + ].map { |data| Fabricate(:bulk_import_row, bulk_import: bulk_import, data: data) } + end + + include_examples 'export failed rows', "Account address,Hide notifications\nfoo@bar,true\nuser@bar,false\n" + end + + context 'with domain blocks' do + let(:import_type) { 'domain_blocking' } + + let!(:rows) do + [ + { 'domain' => 'bad.domain' }, + { 'domain' => 'evil.domain' }, + ].map { |data| Fabricate(:bulk_import_row, bulk_import: bulk_import, data: data) } + end + + include_examples 'export failed rows', "bad.domain\nevil.domain\n" + end + + context 'with bookmarks' do + let(:import_type) { 'bookmarks' } + + let!(:rows) do + [ + { 'uri' => 'https://foo.com/1' }, + { 'uri' => 'https://foo.com/2' }, + ].map { |data| Fabricate(:bulk_import_row, bulk_import: bulk_import, data: data) } + end + + include_examples 'export failed rows', "https://foo.com/1\nhttps://foo.com/2\n" + end + end + + describe 'POST #create' do + subject do + post :create, params: { + form_import: { + type: import_type, + mode: import_mode, + data: fixture_file_upload(import_file), + }, + } + end + + shared_examples 'successful import' do |type, file, mode| + let(:import_type) { type } + let(:import_file) { file } + let(:import_mode) { mode } + + it 'creates an unconfirmed bulk_import with expected type' do + expect { subject }.to change { user.account.bulk_imports.pluck(:state, :type) }.from([]).to([['unconfirmed', import_type]]) + end + + it 'redirects to confirmation page for the import' do + subject + expect(response).to redirect_to(settings_import_path(user.account.bulk_imports.first)) + end + end + + shared_examples 'unsuccessful import' do |type, file, mode| + let(:import_type) { type } + let(:import_file) { file } + let(:import_mode) { mode } + + it 'does not creates an unconfirmed bulk_import' do + expect { subject }.to_not(change { user.account.bulk_imports.count }) + end + + it 'sets error to the import' do + subject + expect(assigns(:import).errors).to_not be_empty + end + end + + it_behaves_like 'successful import', 'following', 'imports.txt', 'merge' + it_behaves_like 'successful import', 'following', 'imports.txt', 'overwrite' + it_behaves_like 'successful import', 'blocking', 'imports.txt', 'merge' + it_behaves_like 'successful import', 'blocking', 'imports.txt', 'overwrite' + it_behaves_like 'successful import', 'muting', 'imports.txt', 'merge' + it_behaves_like 'successful import', 'muting', 'imports.txt', 'overwrite' + it_behaves_like 'successful import', 'domain_blocking', 'domain_blocks.csv', 'merge' + it_behaves_like 'successful import', 'domain_blocking', 'domain_blocks.csv', 'overwrite' + it_behaves_like 'successful import', 'bookmarks', 'bookmark-imports.txt', 'merge' + it_behaves_like 'successful import', 'bookmarks', 'bookmark-imports.txt', 'overwrite' + + it_behaves_like 'unsuccessful import', 'following', 'domain_blocks.csv', 'merge' + it_behaves_like 'unsuccessful import', 'following', 'domain_blocks.csv', 'overwrite' + it_behaves_like 'unsuccessful import', 'blocking', 'domain_blocks.csv', 'merge' + it_behaves_like 'unsuccessful import', 'blocking', 'domain_blocks.csv', 'overwrite' + it_behaves_like 'unsuccessful import', 'muting', 'domain_blocks.csv', 'merge' + it_behaves_like 'unsuccessful import', 'muting', 'domain_blocks.csv', 'overwrite' + + it_behaves_like 'unsuccessful import', 'following', 'empty.csv', 'merge' + it_behaves_like 'unsuccessful import', 'following', 'empty.csv', 'overwrite' + end end diff --git a/spec/fabricators/bulk_import_fabricator.rb b/spec/fabricators/bulk_import_fabricator.rb new file mode 100644 index 0000000000..673b7960d9 --- /dev/null +++ b/spec/fabricators/bulk_import_fabricator.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +Fabricator(:bulk_import) do + type 1 + state 1 + total_items 1 + processed_items 1 + imported_items 1 + finished_at '2022-11-18 14:55:07' + overwrite false + account +end diff --git a/spec/fabricators/bulk_import_row_fabricator.rb b/spec/fabricators/bulk_import_row_fabricator.rb new file mode 100644 index 0000000000..f8358e734d --- /dev/null +++ b/spec/fabricators/bulk_import_row_fabricator.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +Fabricator(:bulk_import_row) do + bulk_import + data '' +end diff --git a/spec/fixtures/files/empty.csv b/spec/fixtures/files/empty.csv new file mode 100644 index 0000000000..e69de29bb2 diff --git a/spec/fixtures/files/following_accounts.csv b/spec/fixtures/files/following_accounts.csv new file mode 100644 index 0000000000..c7917443f8 --- /dev/null +++ b/spec/fixtures/files/following_accounts.csv @@ -0,0 +1,5 @@ +Account address,Show boosts,Notify on new posts,Languages + +user@example.com,true,false, + +user@test.com,true,true,"en,fr" diff --git a/spec/fixtures/files/muted_accounts.csv b/spec/fixtures/files/muted_accounts.csv new file mode 100644 index 0000000000..66f4315bce --- /dev/null +++ b/spec/fixtures/files/muted_accounts.csv @@ -0,0 +1,5 @@ +Account address,Hide notifications + +user@example.com,true + +user@test.com,false diff --git a/spec/lib/vacuum/imports_vacuum_spec.rb b/spec/lib/vacuum/imports_vacuum_spec.rb new file mode 100644 index 0000000000..1e0abc5e01 --- /dev/null +++ b/spec/lib/vacuum/imports_vacuum_spec.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Vacuum::ImportsVacuum do + subject { described_class.new } + + let!(:old_unconfirmed) { Fabricate(:bulk_import, state: :unconfirmed, created_at: 2.days.ago) } + let!(:new_unconfirmed) { Fabricate(:bulk_import, state: :unconfirmed, created_at: 10.seconds.ago) } + let!(:recent_ongoing) { Fabricate(:bulk_import, state: :in_progress, created_at: 20.minutes.ago) } + let!(:recent_finished) { Fabricate(:bulk_import, state: :finished, created_at: 1.day.ago) } + let!(:old_finished) { Fabricate(:bulk_import, state: :finished, created_at: 2.months.ago) } + + describe '#perform' do + it 'cleans up the expected imports' do + expect { subject.perform }.to change { BulkImport.all.pluck(:id) }.from([old_unconfirmed, new_unconfirmed, recent_ongoing, recent_finished, old_finished].map(&:id)).to([new_unconfirmed, recent_ongoing, recent_finished].map(&:id)) + end + end +end diff --git a/spec/models/form/import_spec.rb b/spec/models/form/import_spec.rb new file mode 100644 index 0000000000..e1fea4205c --- /dev/null +++ b/spec/models/form/import_spec.rb @@ -0,0 +1,281 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Form::Import do + subject { described_class.new(current_account: account, type: import_type, mode: import_mode, data: data) } + + let(:account) { Fabricate(:account) } + let(:data) { fixture_file_upload(import_file) } + let(:import_mode) { 'merge' } + + describe 'validations' do + shared_examples 'incompatible import type' do |type, file| + let(:import_file) { file } + let(:import_type) { type } + + it 'has errors' do + subject.validate + expect(subject.errors[:data]).to include(I18n.t('imports.errors.incompatible_type')) + end + end + + shared_examples 'too many CSV rows' do |type, file, allowed_rows| + let(:import_file) { file } + let(:import_type) { type } + + before do + stub_const 'Form::Import::ROWS_PROCESSING_LIMIT', allowed_rows + end + + it 'has errors' do + subject.validate + expect(subject.errors[:data]).to include(I18n.t('imports.errors.over_rows_processing_limit', count: Form::Import::ROWS_PROCESSING_LIMIT)) + end + end + + shared_examples 'valid import' do |type, file| + let(:import_file) { file } + let(:import_type) { type } + + it 'passes validation' do + expect(subject).to be_valid + end + end + + context 'when the file too large' do + let(:import_type) { 'following' } + let(:import_file) { 'imports.txt' } + + before do + stub_const 'Form::Import::FILE_SIZE_LIMIT', 5 + end + + it 'has errors' do + subject.validate + expect(subject.errors[:data]).to include(I18n.t('imports.errors.too_large')) + end + end + + context 'when the CSV file is malformed CSV' do + let(:import_type) { 'following' } + let(:import_file) { 'boop.ogg' } + + it 'has errors' do + # NOTE: not testing more specific error because we don't know the string to match + expect(subject).to model_have_error_on_field(:data) + end + end + + context 'when importing more follows than allowed' do + let(:import_type) { 'following' } + let(:import_file) { 'imports.txt' } + + before do + allow(FollowLimitValidator).to receive(:limit_for_account).with(account).and_return(1) + end + + it 'has errors' do + subject.validate + expect(subject.errors[:data]).to include(I18n.t('users.follow_limit_reached', limit: 1)) + end + end + + it_behaves_like 'too many CSV rows', 'following', 'imports.txt', 1 + it_behaves_like 'too many CSV rows', 'blocking', 'imports.txt', 1 + it_behaves_like 'too many CSV rows', 'muting', 'imports.txt', 1 + it_behaves_like 'too many CSV rows', 'domain_blocking', 'domain_blocks.csv', 2 + it_behaves_like 'too many CSV rows', 'bookmarks', 'bookmark-imports.txt', 3 + + # Importing list of addresses with no headers into various types + it_behaves_like 'valid import', 'following', 'imports.txt' + it_behaves_like 'valid import', 'blocking', 'imports.txt' + it_behaves_like 'valid import', 'muting', 'imports.txt' + + # Importing domain blocks with headers into expected type + it_behaves_like 'valid import', 'domain_blocking', 'domain_blocks.csv' + + # Importing bookmarks list with no headers into expected type + it_behaves_like 'valid import', 'bookmarks', 'bookmark-imports.txt' + + # Importing followed accounts with headers into various compatible types + it_behaves_like 'valid import', 'following', 'following_accounts.csv' + it_behaves_like 'valid import', 'blocking', 'following_accounts.csv' + it_behaves_like 'valid import', 'muting', 'following_accounts.csv' + + # Importing domain blocks with headers into incompatible types + it_behaves_like 'incompatible import type', 'following', 'domain_blocks.csv' + it_behaves_like 'incompatible import type', 'blocking', 'domain_blocks.csv' + it_behaves_like 'incompatible import type', 'muting', 'domain_blocks.csv' + it_behaves_like 'incompatible import type', 'bookmarks', 'domain_blocks.csv' + + # Importing followed accounts with headers into incompatible types + it_behaves_like 'incompatible import type', 'domain_blocking', 'following_accounts.csv' + it_behaves_like 'incompatible import type', 'bookmarks', 'following_accounts.csv' + end + + describe '#guessed_type' do + shared_examples 'with enough information' do |type, file, original_filename, expected_guess| + let(:import_file) { file } + let(:import_type) { type } + + before do + allow(data).to receive(:original_filename).and_return(original_filename) + end + + it 'guesses the expected type' do + expect(subject.guessed_type).to eq expected_guess + end + end + + context 'when the headers are enough to disambiguate' do + it_behaves_like 'with enough information', 'following', 'following_accounts.csv', 'import.csv', :following + it_behaves_like 'with enough information', 'blocking', 'following_accounts.csv', 'import.csv', :following + it_behaves_like 'with enough information', 'muting', 'following_accounts.csv', 'import.csv', :following + + it_behaves_like 'with enough information', 'following', 'muted_accounts.csv', 'imports.csv', :muting + it_behaves_like 'with enough information', 'blocking', 'muted_accounts.csv', 'imports.csv', :muting + it_behaves_like 'with enough information', 'muting', 'muted_accounts.csv', 'imports.csv', :muting + end + + context 'when the file name is enough to disambiguate' do + it_behaves_like 'with enough information', 'following', 'imports.txt', 'following_accounts.csv', :following + it_behaves_like 'with enough information', 'blocking', 'imports.txt', 'following_accounts.csv', :following + it_behaves_like 'with enough information', 'muting', 'imports.txt', 'following_accounts.csv', :following + + it_behaves_like 'with enough information', 'following', 'imports.txt', 'follows.csv', :following + it_behaves_like 'with enough information', 'blocking', 'imports.txt', 'follows.csv', :following + it_behaves_like 'with enough information', 'muting', 'imports.txt', 'follows.csv', :following + + it_behaves_like 'with enough information', 'following', 'imports.txt', 'blocked_accounts.csv', :blocking + it_behaves_like 'with enough information', 'blocking', 'imports.txt', 'blocked_accounts.csv', :blocking + it_behaves_like 'with enough information', 'muting', 'imports.txt', 'blocked_accounts.csv', :blocking + + it_behaves_like 'with enough information', 'following', 'imports.txt', 'blocks.csv', :blocking + it_behaves_like 'with enough information', 'blocking', 'imports.txt', 'blocks.csv', :blocking + it_behaves_like 'with enough information', 'muting', 'imports.txt', 'blocks.csv', :blocking + + it_behaves_like 'with enough information', 'following', 'imports.txt', 'muted_accounts.csv', :muting + it_behaves_like 'with enough information', 'blocking', 'imports.txt', 'muted_accounts.csv', :muting + it_behaves_like 'with enough information', 'muting', 'imports.txt', 'muted_accounts.csv', :muting + + it_behaves_like 'with enough information', 'following', 'imports.txt', 'mutes.csv', :muting + it_behaves_like 'with enough information', 'blocking', 'imports.txt', 'mutes.csv', :muting + it_behaves_like 'with enough information', 'muting', 'imports.txt', 'mutes.csv', :muting + end + end + + describe '#likely_mismatched?' do + shared_examples 'with matching types' do |type, file, original_filename = nil| + let(:import_file) { file } + let(:import_type) { type } + + before do + allow(data).to receive(:original_filename).and_return(original_filename) if original_filename.present? + end + + it 'returns false' do + expect(subject.likely_mismatched?).to be false + end + end + + shared_examples 'with mismatching types' do |type, file, original_filename = nil| + let(:import_file) { file } + let(:import_type) { type } + + before do + allow(data).to receive(:original_filename).and_return(original_filename) if original_filename.present? + end + + it 'returns true' do + expect(subject.likely_mismatched?).to be true + end + end + + it_behaves_like 'with matching types', 'following', 'following_accounts.csv' + it_behaves_like 'with matching types', 'following', 'following_accounts.csv', 'imports.txt' + it_behaves_like 'with matching types', 'following', 'imports.txt' + it_behaves_like 'with matching types', 'blocking', 'imports.txt', 'blocks.csv' + it_behaves_like 'with matching types', 'blocking', 'imports.txt' + it_behaves_like 'with matching types', 'muting', 'muted_accounts.csv' + it_behaves_like 'with matching types', 'muting', 'muted_accounts.csv', 'imports.txt' + it_behaves_like 'with matching types', 'muting', 'imports.txt' + it_behaves_like 'with matching types', 'domain_blocking', 'domain_blocks.csv' + it_behaves_like 'with matching types', 'domain_blocking', 'domain_blocks.csv', 'imports.txt' + it_behaves_like 'with matching types', 'bookmarks', 'bookmark-imports.txt' + it_behaves_like 'with matching types', 'bookmarks', 'bookmark-imports.txt', 'imports.txt' + + it_behaves_like 'with mismatching types', 'following', 'imports.txt', 'blocks.csv' + it_behaves_like 'with mismatching types', 'following', 'imports.txt', 'blocked_accounts.csv' + it_behaves_like 'with mismatching types', 'following', 'imports.txt', 'mutes.csv' + it_behaves_like 'with mismatching types', 'following', 'imports.txt', 'muted_accounts.csv' + it_behaves_like 'with mismatching types', 'following', 'muted_accounts.csv' + it_behaves_like 'with mismatching types', 'following', 'muted_accounts.csv', 'imports.txt' + it_behaves_like 'with mismatching types', 'blocking', 'following_accounts.csv' + it_behaves_like 'with mismatching types', 'blocking', 'following_accounts.csv', 'imports.txt' + it_behaves_like 'with mismatching types', 'blocking', 'muted_accounts.csv' + it_behaves_like 'with mismatching types', 'blocking', 'muted_accounts.csv', 'imports.txt' + it_behaves_like 'with mismatching types', 'blocking', 'imports.txt', 'follows.csv' + it_behaves_like 'with mismatching types', 'blocking', 'imports.txt', 'following_accounts.csv' + it_behaves_like 'with mismatching types', 'blocking', 'imports.txt', 'mutes.csv' + it_behaves_like 'with mismatching types', 'blocking', 'imports.txt', 'muted_accounts.csv' + it_behaves_like 'with mismatching types', 'muting', 'following_accounts.csv' + it_behaves_like 'with mismatching types', 'muting', 'following_accounts.csv', 'imports.txt' + it_behaves_like 'with mismatching types', 'muting', 'imports.txt', 'follows.csv' + it_behaves_like 'with mismatching types', 'muting', 'imports.txt', 'following_accounts.csv' + it_behaves_like 'with mismatching types', 'muting', 'imports.txt', 'blocks.csv' + it_behaves_like 'with mismatching types', 'muting', 'imports.txt', 'blocked_accounts.csv' + end + + describe 'save' do + shared_examples 'on successful import' do |type, mode, file, expected_rows| + let(:import_type) { type } + let(:import_file) { file } + let(:import_mode) { mode } + + before do + subject.save + end + + it 'creates the expected rows' do + expect(account.bulk_imports.first.rows.pluck(:data)).to match_array(expected_rows) + end + + it 'creates a BulkImport with expected attributes' do + bulk_import = account.bulk_imports.first + expect(bulk_import).to_not be_nil + expect(bulk_import.type.to_sym).to eq subject.type.to_sym + expect(bulk_import.original_filename).to eq subject.data.original_filename + expect(bulk_import.likely_mismatched?).to eq subject.likely_mismatched? + expect(bulk_import.overwrite?).to eq !!subject.overwrite # rubocop:disable Style/DoubleNegation + expect(bulk_import.processed_items).to eq 0 + expect(bulk_import.imported_items).to eq 0 + expect(bulk_import.total_items).to eq bulk_import.rows.count + expect(bulk_import.unconfirmed?).to be true + end + end + + it_behaves_like 'on successful import', 'following', 'merge', 'imports.txt', (%w(user@example.com user@test.com).map { |acct| { 'acct' => acct } }) + it_behaves_like 'on successful import', 'following', 'overwrite', 'imports.txt', (%w(user@example.com user@test.com).map { |acct| { 'acct' => acct } }) + it_behaves_like 'on successful import', 'blocking', 'merge', 'imports.txt', (%w(user@example.com user@test.com).map { |acct| { 'acct' => acct } }) + it_behaves_like 'on successful import', 'blocking', 'overwrite', 'imports.txt', (%w(user@example.com user@test.com).map { |acct| { 'acct' => acct } }) + it_behaves_like 'on successful import', 'muting', 'merge', 'imports.txt', (%w(user@example.com user@test.com).map { |acct| { 'acct' => acct } }) + it_behaves_like 'on successful import', 'domain_blocking', 'merge', 'domain_blocks.csv', (%w(bad.domain worse.domain reject.media).map { |domain| { 'domain' => domain } }) + it_behaves_like 'on successful import', 'bookmarks', 'merge', 'bookmark-imports.txt', (%w(https://example.com/statuses/1312 https://local.com/users/foo/statuses/42 https://unknown-remote.com/users/bar/statuses/1 https://example.com/statuses/direct).map { |uri| { 'uri' => uri } }) + + it_behaves_like 'on successful import', 'following', 'merge', 'following_accounts.csv', [ + { 'acct' => 'user@example.com', 'show_reblogs' => true, 'notify' => false, 'languages' => nil }, + { 'acct' => 'user@test.com', 'show_reblogs' => true, 'notify' => true, 'languages' => ['en', 'fr'] }, + ] + + it_behaves_like 'on successful import', 'muting', 'merge', 'muted_accounts.csv', [ + { 'acct' => 'user@example.com', 'hide_notifications' => true }, + { 'acct' => 'user@test.com', 'hide_notifications' => false }, + ] + + # Based on the bug report 20571 where UTF-8 encoded domains were rejecting import of their users + # + # https://github.com/mastodon/mastodon/issues/20571 + it_behaves_like 'on successful import', 'following', 'merge', 'utf8-followers.txt', [{ 'acct' => 'nare@թութ.հայ' }] + end +end diff --git a/spec/models/import_spec.rb b/spec/models/import_spec.rb index 1c84744138..106af4e0f7 100644 --- a/spec/models/import_spec.rb +++ b/spec/models/import_spec.rb @@ -22,20 +22,5 @@ RSpec.describe Import, type: :model do import = Import.create(account: account, type: type) expect(import).to model_have_error_on_field(:data) end - - it 'is invalid with malformed data' do - import = Import.create(account: account, type: type, data: StringIO.new('\"test')) - expect(import).to model_have_error_on_field(:data) - end - - it 'is invalid with too many rows in data' do - import = Import.create(account: account, type: type, data: StringIO.new("foo@bar.com\n" * (ImportService::ROWS_PROCESSING_LIMIT + 10))) - expect(import).to model_have_error_on_field(:data) - end - - it 'is invalid when there are more rows when following limit' do - import = Import.create(account: account, type: type, data: StringIO.new("foo@bar.com\n" * (FollowLimitValidator.limit_for_account(account) + 10))) - expect(import).to model_have_error_on_field(:data) - end end end diff --git a/spec/services/bulk_import_row_service_spec.rb b/spec/services/bulk_import_row_service_spec.rb new file mode 100644 index 0000000000..5bbe6b0042 --- /dev/null +++ b/spec/services/bulk_import_row_service_spec.rb @@ -0,0 +1,95 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe BulkImportRowService do + subject { described_class.new } + + let(:account) { Fabricate(:account) } + let(:import) { Fabricate(:bulk_import, account: account, type: import_type) } + let(:import_row) { Fabricate(:bulk_import_row, bulk_import: import, data: data) } + + describe '#call' do + context 'when importing a follow' do + let(:import_type) { 'following' } + let(:target_account) { Fabricate(:account) } + let(:service_double) { instance_double(FollowService, call: nil) } + let(:data) do + { 'acct' => target_account.acct } + end + + before do + allow(FollowService).to receive(:new).and_return(service_double) + end + + it 'calls FollowService with the expected arguments and returns true' do + expect(subject.call(import_row)).to be true + + expect(service_double).to have_received(:call).with(account, target_account, { reblogs: nil, notify: nil, languages: nil }) + end + end + + context 'when importing a block' do + let(:import_type) { 'blocking' } + let(:target_account) { Fabricate(:account) } + let(:service_double) { instance_double(BlockService, call: nil) } + let(:data) do + { 'acct' => target_account.acct } + end + + before do + allow(BlockService).to receive(:new).and_return(service_double) + end + + it 'calls BlockService with the expected arguments and returns true' do + expect(subject.call(import_row)).to be true + + expect(service_double).to have_received(:call).with(account, target_account) + end + end + + context 'when importing a mute' do + let(:import_type) { 'muting' } + let(:target_account) { Fabricate(:account) } + let(:service_double) { instance_double(MuteService, call: nil) } + let(:data) do + { 'acct' => target_account.acct } + end + + before do + allow(MuteService).to receive(:new).and_return(service_double) + end + + it 'calls MuteService with the expected arguments and returns true' do + expect(subject.call(import_row)).to be true + + expect(service_double).to have_received(:call).with(account, target_account, { notifications: nil }) + end + end + + context 'when importing a bookmark' do + let(:import_type) { 'bookmarks' } + let(:data) do + { 'uri' => ActivityPub::TagManager.instance.uri_for(target_status) } + end + + context 'when the status is public' do + let(:target_status) { Fabricate(:status) } + + it 'bookmarks the status and returns true' do + expect(subject.call(import_row)).to be true + expect(account.bookmarked?(target_status)).to be true + end + end + + context 'when the status is not accessible to the user' do + let(:target_status) { Fabricate(:status, visibility: :direct) } + + it 'does not bookmark the status and returns false' do + expect(subject.call(import_row)).to be false + expect(account.bookmarked?(target_status)).to be false + end + end + end + end +end diff --git a/spec/services/bulk_import_service_spec.rb b/spec/services/bulk_import_service_spec.rb new file mode 100644 index 0000000000..09dfb0a0b6 --- /dev/null +++ b/spec/services/bulk_import_service_spec.rb @@ -0,0 +1,417 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe BulkImportService do + subject { described_class.new } + + let(:account) { Fabricate(:account) } + let(:import) { Fabricate(:bulk_import, account: account, type: import_type, overwrite: overwrite, state: :in_progress, imported_items: 0, processed_items: 0) } + + before do + import.update(total_items: import.rows.count) + end + + describe '#call' do + around do |example| + Sidekiq::Testing.fake! do + example.run + Sidekiq::Worker.clear_all + end + end + + context 'when importing follows' do + let(:import_type) { 'following' } + let(:overwrite) { false } + + let!(:rows) do + [ + { 'acct' => 'user@foo.bar' }, + { 'acct' => 'unknown@unknown.bar' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.follow!(Fabricate(:account)) + end + + it 'does not immediately change who the account follows' do + expect { subject.call(import) }.to_not(change { account.reload.active_relationships.to_a }) + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id)) + end + + it 'requests to follow all the listed users once the workers have run' do + subject.call(import) + + resolve_account_service_double = double + allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double) + allow(resolve_account_service_double).to receive(:call).with('user@foo.bar', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) } + allow(resolve_account_service_double).to receive(:call).with('unknown@unknown.bar', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) } + + Import::RowWorker.drain + + expect(FollowRequest.includes(:target_account).where(account: account).map(&:target_account).map(&:acct)).to contain_exactly('user@foo.bar', 'unknown@unknown.bar') + end + end + + context 'when importing follows with overwrite' do + let(:import_type) { 'following' } + let(:overwrite) { true } + + let!(:followed) { Fabricate(:account, username: 'followed', domain: 'foo.bar', protocol: :activitypub) } + let!(:to_be_unfollowed) { Fabricate(:account, username: 'to_be_unfollowed', domain: 'foo.bar', protocol: :activitypub) } + + let!(:rows) do + [ + { 'acct' => 'followed@foo.bar', 'show_reblogs' => false, 'notify' => true, 'languages' => ['en'] }, + { 'acct' => 'user@foo.bar' }, + { 'acct' => 'unknown@unknown.bar' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.follow!(followed, reblogs: true, notify: false) + account.follow!(to_be_unfollowed) + end + + it 'unfollows user not present on list' do + subject.call(import) + expect(account.following?(to_be_unfollowed)).to be false + end + + it 'updates the existing follow relationship as expected' do + expect { subject.call(import) }.to change { Follow.where(account: account, target_account: followed).pick(:show_reblogs, :notify, :languages) }.from([true, false, nil]).to([false, true, ['en']]) + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows[1..].map(&:id)) + end + + it 'requests to follow all the expected users once the workers have run' do + subject.call(import) + + resolve_account_service_double = double + allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double) + allow(resolve_account_service_double).to receive(:call).with('user@foo.bar', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) } + allow(resolve_account_service_double).to receive(:call).with('unknown@unknown.bar', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) } + + Import::RowWorker.drain + + expect(FollowRequest.includes(:target_account).where(account: account).map(&:target_account).map(&:acct)).to contain_exactly('user@foo.bar', 'unknown@unknown.bar') + end + end + + context 'when importing blocks' do + let(:import_type) { 'blocking' } + let(:overwrite) { false } + + let!(:rows) do + [ + { 'acct' => 'user@foo.bar' }, + { 'acct' => 'unknown@unknown.bar' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.block!(Fabricate(:account, username: 'already_blocked', domain: 'remote.org')) + end + + it 'does not immediately change who the account blocks' do + expect { subject.call(import) }.to_not(change { account.reload.blocking.to_a }) + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id)) + end + + it 'blocks all the listed users once the workers have run' do + subject.call(import) + + resolve_account_service_double = double + allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double) + allow(resolve_account_service_double).to receive(:call).with('user@foo.bar', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) } + allow(resolve_account_service_double).to receive(:call).with('unknown@unknown.bar', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) } + + Import::RowWorker.drain + + expect(account.blocking.map(&:acct)).to contain_exactly('already_blocked@remote.org', 'user@foo.bar', 'unknown@unknown.bar') + end + end + + context 'when importing blocks with overwrite' do + let(:import_type) { 'blocking' } + let(:overwrite) { true } + + let!(:blocked) { Fabricate(:account, username: 'blocked', domain: 'foo.bar', protocol: :activitypub) } + let!(:to_be_unblocked) { Fabricate(:account, username: 'to_be_unblocked', domain: 'foo.bar', protocol: :activitypub) } + + let!(:rows) do + [ + { 'acct' => 'blocked@foo.bar' }, + { 'acct' => 'user@foo.bar' }, + { 'acct' => 'unknown@unknown.bar' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.block!(blocked) + account.block!(to_be_unblocked) + end + + it 'unblocks user not present on list' do + subject.call(import) + expect(account.blocking?(to_be_unblocked)).to be false + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows[1..].map(&:id)) + end + + it 'requests to follow all the expected users once the workers have run' do + subject.call(import) + + resolve_account_service_double = double + allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double) + allow(resolve_account_service_double).to receive(:call).with('user@foo.bar', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) } + allow(resolve_account_service_double).to receive(:call).with('unknown@unknown.bar', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) } + + Import::RowWorker.drain + + expect(account.blocking.map(&:acct)).to contain_exactly('blocked@foo.bar', 'user@foo.bar', 'unknown@unknown.bar') + end + end + + context 'when importing mutes' do + let(:import_type) { 'muting' } + let(:overwrite) { false } + + let!(:rows) do + [ + { 'acct' => 'user@foo.bar' }, + { 'acct' => 'unknown@unknown.bar' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.mute!(Fabricate(:account, username: 'already_muted', domain: 'remote.org')) + end + + it 'does not immediately change who the account blocks' do + expect { subject.call(import) }.to_not(change { account.reload.muting.to_a }) + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id)) + end + + it 'mutes all the listed users once the workers have run' do + subject.call(import) + + resolve_account_service_double = double + allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double) + allow(resolve_account_service_double).to receive(:call).with('user@foo.bar', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) } + allow(resolve_account_service_double).to receive(:call).with('unknown@unknown.bar', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) } + + Import::RowWorker.drain + + expect(account.muting.map(&:acct)).to contain_exactly('already_muted@remote.org', 'user@foo.bar', 'unknown@unknown.bar') + end + end + + context 'when importing mutes with overwrite' do + let(:import_type) { 'muting' } + let(:overwrite) { true } + + let!(:muted) { Fabricate(:account, username: 'muted', domain: 'foo.bar', protocol: :activitypub) } + let!(:to_be_unmuted) { Fabricate(:account, username: 'to_be_unmuted', domain: 'foo.bar', protocol: :activitypub) } + + let!(:rows) do + [ + { 'acct' => 'muted@foo.bar', 'hide_notifications' => true }, + { 'acct' => 'user@foo.bar' }, + { 'acct' => 'unknown@unknown.bar' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.mute!(muted, notifications: false) + account.mute!(to_be_unmuted) + end + + it 'updates the existing mute as expected' do + expect { subject.call(import) }.to change { Mute.where(account: account, target_account: muted).pick(:hide_notifications) }.from(false).to(true) + end + + it 'unblocks user not present on list' do + subject.call(import) + expect(account.muting?(to_be_unmuted)).to be false + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows[1..].map(&:id)) + end + + it 'requests to follow all the expected users once the workers have run' do + subject.call(import) + + resolve_account_service_double = double + allow(ResolveAccountService).to receive(:new).and_return(resolve_account_service_double) + allow(resolve_account_service_double).to receive(:call).with('user@foo.bar', any_args) { Fabricate(:account, username: 'user', domain: 'foo.bar', protocol: :activitypub) } + allow(resolve_account_service_double).to receive(:call).with('unknown@unknown.bar', any_args) { Fabricate(:account, username: 'unknown', domain: 'unknown.bar', protocol: :activitypub) } + + Import::RowWorker.drain + + expect(account.muting.map(&:acct)).to contain_exactly('muted@foo.bar', 'user@foo.bar', 'unknown@unknown.bar') + end + end + + context 'when importing domain blocks' do + let(:import_type) { 'domain_blocking' } + let(:overwrite) { false } + + let!(:rows) do + [ + { 'domain' => 'blocked.com' }, + { 'domain' => 'to_block.com' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.block_domain!('alreadyblocked.com') + account.block_domain!('blocked.com') + end + + it 'blocks all the new domains' do + subject.call(import) + expect(account.domain_blocks.pluck(:domain)).to contain_exactly('alreadyblocked.com', 'blocked.com', 'to_block.com') + end + + it 'marks the import as finished' do + subject.call(import) + expect(import.reload.finished?).to be true + end + end + + context 'when importing domain blocks with overwrite' do + let(:import_type) { 'domain_blocking' } + let(:overwrite) { true } + + let!(:rows) do + [ + { 'domain' => 'blocked.com' }, + { 'domain' => 'to_block.com' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.block_domain!('alreadyblocked.com') + account.block_domain!('blocked.com') + end + + it 'blocks all the new domains' do + subject.call(import) + expect(account.domain_blocks.pluck(:domain)).to contain_exactly('blocked.com', 'to_block.com') + end + + it 'marks the import as finished' do + subject.call(import) + expect(import.reload.finished?).to be true + end + end + + context 'when importing bookmarks' do + let(:import_type) { 'bookmarks' } + let(:overwrite) { false } + + let!(:already_bookmarked) { Fabricate(:status, uri: 'https://already.bookmarked/1') } + let!(:status) { Fabricate(:status, uri: 'https://foo.bar/posts/1') } + let!(:inaccessible_status) { Fabricate(:status, uri: 'https://foo.bar/posts/inaccessible', visibility: :direct) } + let!(:bookmarked) { Fabricate(:status, uri: 'https://foo.bar/posts/already-bookmarked') } + + let!(:rows) do + [ + { 'uri' => status.uri }, + { 'uri' => inaccessible_status.uri }, + { 'uri' => bookmarked.uri }, + { 'uri' => 'https://domain.unknown/foo' }, + { 'uri' => 'https://domain.unknown/private' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.bookmarks.create!(status: already_bookmarked) + account.bookmarks.create!(status: bookmarked) + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id)) + end + + it 'updates the bookmarks as expected once the workers have run' do + subject.call(import) + + service_double = double + allow(ActivityPub::FetchRemoteStatusService).to receive(:new).and_return(service_double) + allow(service_double).to receive(:call).with('https://domain.unknown/foo') { Fabricate(:status, uri: 'https://domain.unknown/foo') } + allow(service_double).to receive(:call).with('https://domain.unknown/private') { Fabricate(:status, uri: 'https://domain.unknown/private', visibility: :direct) } + + Import::RowWorker.drain + + expect(account.bookmarks.map(&:status).map(&:uri)).to contain_exactly(already_bookmarked.uri, status.uri, bookmarked.uri, 'https://domain.unknown/foo') + end + end + + context 'when importing bookmarks with overwrite' do + let(:import_type) { 'bookmarks' } + let(:overwrite) { true } + + let!(:already_bookmarked) { Fabricate(:status, uri: 'https://already.bookmarked/1') } + let!(:status) { Fabricate(:status, uri: 'https://foo.bar/posts/1') } + let!(:inaccessible_status) { Fabricate(:status, uri: 'https://foo.bar/posts/inaccessible', visibility: :direct) } + let!(:bookmarked) { Fabricate(:status, uri: 'https://foo.bar/posts/already-bookmarked') } + + let!(:rows) do + [ + { 'uri' => status.uri }, + { 'uri' => inaccessible_status.uri }, + { 'uri' => bookmarked.uri }, + { 'uri' => 'https://domain.unknown/foo' }, + { 'uri' => 'https://domain.unknown/private' }, + ].map { |data| import.rows.create!(data: data) } + end + + before do + account.bookmarks.create!(status: already_bookmarked) + account.bookmarks.create!(status: bookmarked) + end + + it 'enqueues workers for the expected rows' do + subject.call(import) + expect(Import::RowWorker.jobs.pluck('args').flatten).to match_array(rows.map(&:id)) + end + + it 'updates the bookmarks as expected once the workers have run' do + subject.call(import) + + service_double = double + allow(ActivityPub::FetchRemoteStatusService).to receive(:new).and_return(service_double) + allow(service_double).to receive(:call).with('https://domain.unknown/foo') { Fabricate(:status, uri: 'https://domain.unknown/foo') } + allow(service_double).to receive(:call).with('https://domain.unknown/private') { Fabricate(:status, uri: 'https://domain.unknown/private', visibility: :direct) } + + Import::RowWorker.drain + + expect(account.bookmarks.map(&:status).map(&:uri)).to contain_exactly(status.uri, bookmarked.uri, 'https://domain.unknown/foo') + end + end + end +end diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb new file mode 100644 index 0000000000..91f51fbb42 --- /dev/null +++ b/spec/workers/bulk_import_worker_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'rails_helper' + +describe BulkImportWorker do + subject { described_class.new } + + let(:import) { Fabricate(:bulk_import, state: :scheduled) } + + describe '#perform' do + let(:service_double) { instance_double(BulkImportService, call: nil) } + + before do + allow(BulkImportService).to receive(:new).and_return(service_double) + end + + it 'changes the import\'s state as appropriate' do + expect { subject.perform(import.id) }.to change { import.reload.state.to_sym }.from(:scheduled).to(:in_progress) + end + + it 'calls BulkImportService' do + subject.perform(import.id) + expect(service_double).to have_received(:call).with(import) + end + end +end diff --git a/spec/workers/import/row_worker_spec.rb b/spec/workers/import/row_worker_spec.rb new file mode 100644 index 0000000000..0a71a838fc --- /dev/null +++ b/spec/workers/import/row_worker_spec.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +require 'rails_helper' + +describe Import::RowWorker do + subject { described_class.new } + + let(:row) { Fabricate(:bulk_import_row, bulk_import: import) } + + describe '#perform' do + before do + allow(BulkImportRowService).to receive(:new).and_return(service_double) + end + + shared_examples 'clean failure' do + let(:service_double) { instance_double(BulkImportRowService, call: false) } + + it 'calls BulkImportRowService' do + subject.perform(row.id) + expect(service_double).to have_received(:call).with(row) + end + + it 'increases the number of processed items' do + expect { subject.perform(row.id) }.to(change { import.reload.processed_items }.by(+1)) + end + + it 'does not increase the number of imported items' do + expect { subject.perform(row.id) }.to_not(change { import.reload.imported_items }) + end + + it 'does not delete the row' do + subject.perform(row.id) + expect(BulkImportRow.exists?(row.id)).to be true + end + end + + shared_examples 'unclean failure' do + let(:service_double) { instance_double(BulkImportRowService) } + + before do + allow(service_double).to receive(:call) do + raise 'dummy error' + end + end + + it 'raises an error and does not change processed items count' do + expect { subject.perform(row.id) }.to raise_error(StandardError, 'dummy error').and(not_change { import.reload.processed_items }) + end + + it 'does not delete the row' do + expect { subject.perform(row.id) }.to raise_error(StandardError, 'dummy error').and(not_change { BulkImportRow.exists?(row.id) }) + end + end + + shared_examples 'clean success' do + let(:service_double) { instance_double(BulkImportRowService, call: true) } + + it 'calls BulkImportRowService' do + subject.perform(row.id) + expect(service_double).to have_received(:call).with(row) + end + + it 'increases the number of processed items' do + expect { subject.perform(row.id) }.to(change { import.reload.processed_items }.by(+1)) + end + + it 'increases the number of imported items' do + expect { subject.perform(row.id) }.to(change { import.reload.imported_items }.by(+1)) + end + + it 'deletes the row' do + expect { subject.perform(row.id) }.to change { BulkImportRow.exists?(row.id) }.from(true).to(false) + end + end + + context 'when there are multiple rows to process' do + let(:import) { Fabricate(:bulk_import, total_items: 2, processed_items: 0, imported_items: 0, state: :in_progress) } + + context 'with a clean failure' do + include_examples 'clean failure' + + it 'does not mark the import as finished' do + expect { subject.perform(row.id) }.to_not(change { import.reload.state.to_sym }) + end + end + + context 'with an unclean failure' do + include_examples 'unclean failure' + + it 'does not mark the import as finished' do + expect { subject.perform(row.id) }.to raise_error(StandardError).and(not_change { import.reload.state.to_sym }) + end + end + + context 'with a clean success' do + include_examples 'clean success' + + it 'does not mark the import as finished' do + expect { subject.perform(row.id) }.to_not(change { import.reload.state.to_sym }) + end + end + end + + context 'when this is the last row to process' do + let(:import) { Fabricate(:bulk_import, total_items: 2, processed_items: 1, imported_items: 0, state: :in_progress) } + + context 'with a clean failure' do + include_examples 'clean failure' + + it 'marks the import as finished' do + expect { subject.perform(row.id) }.to change { import.reload.state.to_sym }.from(:in_progress).to(:finished) + end + end + + # NOTE: sidekiq retry logic may be a bit too difficult to test, so leaving this blind spot for now + it_behaves_like 'unclean failure' + + context 'with a clean success' do + include_examples 'clean success' + + it 'marks the import as finished' do + expect { subject.perform(row.id) }.to change { import.reload.state.to_sym }.from(:in_progress).to(:finished) + end + end + end + end +end From 5a5975d7f7c470b8098ed687cdbae3261834284a Mon Sep 17 00:00:00 2001 From: fusagiko / takayamaki <24884114+takayamaki@users.noreply.github.com> Date: Tue, 2 May 2023 19:53:32 +0900 Subject: [PATCH 02/96] Add type annotation for IconButton component (#24753) --- .../{icon_button.jsx => icon_button.tsx} | 72 ++++++++++--------- 1 file changed, 37 insertions(+), 35 deletions(-) rename app/javascript/mastodon/components/{icon_button.jsx => icon_button.tsx} (68%) diff --git a/app/javascript/mastodon/components/icon_button.jsx b/app/javascript/mastodon/components/icon_button.tsx similarity index 68% rename from app/javascript/mastodon/components/icon_button.jsx rename to app/javascript/mastodon/components/icon_button.tsx index 989cae4401..ec11ab7011 100644 --- a/app/javascript/mastodon/components/icon_button.jsx +++ b/app/javascript/mastodon/components/icon_button.tsx @@ -1,34 +1,36 @@ import React from 'react'; -import PropTypes from 'prop-types'; import classNames from 'classnames'; -import Icon from 'mastodon/components/icon'; -import AnimatedNumber from 'mastodon/components/animated_number'; +import { Icon } from './icon'; +import { AnimatedNumber } from './animated_number'; -export default class IconButton extends React.PureComponent { - - static propTypes = { - className: PropTypes.string, - title: PropTypes.string.isRequired, - icon: PropTypes.string.isRequired, - onClick: PropTypes.func, - onMouseDown: PropTypes.func, - onKeyDown: PropTypes.func, - onKeyPress: PropTypes.func, - size: PropTypes.number, - active: PropTypes.bool, - expanded: PropTypes.bool, - style: PropTypes.object, - activeStyle: PropTypes.object, - disabled: PropTypes.bool, - inverted: PropTypes.bool, - animate: PropTypes.bool, - overlay: PropTypes.bool, - tabIndex: PropTypes.number, - counter: PropTypes.number, - obfuscateCount: PropTypes.bool, - href: PropTypes.string, - ariaHidden: PropTypes.bool, - }; +type Props = { + className?: string; + title: string; + icon: string; + onClick?: React.MouseEventHandler; + onMouseDown?: React.MouseEventHandler; + onKeyDown?: React.KeyboardEventHandler; + onKeyPress?: React.KeyboardEventHandler; + size: number; + active: boolean; + expanded?: boolean; + style?: React.CSSProperties; + activeStyle?: React.CSSProperties; + disabled: boolean; + inverted?: boolean; + animate: boolean; + overlay: boolean; + tabIndex: number; + counter?: number; + obfuscateCount?: boolean; + href?: string; + ariaHidden: boolean; +} +type States = { + activate: boolean, + deactivate: boolean, +} +export default class IconButton extends React.PureComponent { static defaultProps = { size: 18, @@ -45,7 +47,7 @@ export default class IconButton extends React.PureComponent { deactivate: false, }; - componentWillReceiveProps (nextProps) { + UNSAFE_componentWillReceiveProps (nextProps: Props) { if (!nextProps.animate) return; if (this.props.active && !nextProps.active) { @@ -55,27 +57,27 @@ export default class IconButton extends React.PureComponent { } } - handleClick = (e) => { + handleClick: React.MouseEventHandler = (e) => { e.preventDefault(); - if (!this.props.disabled) { + if (!this.props.disabled && this.props.onClick != null) { this.props.onClick(e); } }; - handleKeyPress = (e) => { + handleKeyPress: React.KeyboardEventHandler = (e) => { if (this.props.onKeyPress && !this.props.disabled) { this.props.onKeyPress(e); } }; - handleMouseDown = (e) => { + handleMouseDown: React.MouseEventHandler = (e) => { if (!this.props.disabled && this.props.onMouseDown) { this.props.onMouseDown(e); } }; - handleKeyDown = (e) => { + handleKeyDown: React.KeyboardEventHandler = (e) => { if (!this.props.disabled && this.props.onKeyDown) { this.props.onKeyDown(e); } @@ -132,7 +134,7 @@ export default class IconButton extends React.PureComponent { ); - if (href && !this.prop) { + if (href != null) { contents = ( {contents} From f50105779b818dd214ea34d0a06a798c80c202a4 Mon Sep 17 00:00:00 2001 From: fusagiko / takayamaki <24884114+takayamaki@users.noreply.github.com> Date: Tue, 2 May 2023 19:54:00 +0900 Subject: [PATCH 03/96] Add type annotation for Blurhash component (#24750) --- .../mastodon/components/blurhash.jsx | 65 ------------------- .../mastodon/components/blurhash.tsx | 45 +++++++++++++ 2 files changed, 45 insertions(+), 65 deletions(-) delete mode 100644 app/javascript/mastodon/components/blurhash.jsx create mode 100644 app/javascript/mastodon/components/blurhash.tsx diff --git a/app/javascript/mastodon/components/blurhash.jsx b/app/javascript/mastodon/components/blurhash.jsx deleted file mode 100644 index f5c58e04ef..0000000000 --- a/app/javascript/mastodon/components/blurhash.jsx +++ /dev/null @@ -1,65 +0,0 @@ -// @ts-check - -import { decode } from 'blurhash'; -import React, { useRef, useEffect } from 'react'; -import PropTypes from 'prop-types'; - -/** - * @typedef BlurhashPropsBase - * @property {string?} hash Hash to render - * @property {number} width - * Width of the blurred region in pixels. Defaults to 32 - * @property {number} [height] - * Height of the blurred region in pixels. Defaults to width - * @property {boolean} [dummy] - * Whether dummy mode is enabled. If enabled, nothing is rendered - * and canvas left untouched - */ - -/** @typedef {JSX.IntrinsicElements['canvas'] & BlurhashPropsBase} BlurhashProps */ - -/** - * Component that is used to render blurred of blurhash string - * @param {BlurhashProps} param1 Props of the component - * @returns {JSX.Element} Canvas which will render blurred region element to embed - */ -function Blurhash({ - hash, - width = 32, - height = width, - dummy = false, - ...canvasProps -}) { - const canvasRef = /** @type {import('react').MutableRefObject} */ (useRef()); - - useEffect(() => { - const { current: canvas } = canvasRef; - canvas.width = canvas.width; // resets canvas - - if (dummy || !hash) return; - - try { - const pixels = decode(hash, width, height); - const ctx = canvas.getContext('2d'); - const imageData = new ImageData(pixels, width, height); - - // @ts-expect-error - ctx.putImageData(imageData, 0, 0); - } catch (err) { - console.error('Blurhash decoding failure', { err, hash }); - } - }, [dummy, hash, width, height]); - - return ( - - ); -} - -Blurhash.propTypes = { - hash: PropTypes.string.isRequired, - width: PropTypes.number, - height: PropTypes.number, - dummy: PropTypes.bool, -}; - -export default React.memo(Blurhash); diff --git a/app/javascript/mastodon/components/blurhash.tsx b/app/javascript/mastodon/components/blurhash.tsx new file mode 100644 index 0000000000..6fec6e1ef7 --- /dev/null +++ b/app/javascript/mastodon/components/blurhash.tsx @@ -0,0 +1,45 @@ +import { decode } from 'blurhash'; +import React, { useRef, useEffect } from 'react'; + +type Props = { + hash: string; + width?: number; + height?: number; + dummy?: boolean; // Whether dummy mode is enabled. If enabled, nothing is rendered and canvas left untouched + children?: never; + [key: string]: any; +} +function Blurhash({ + hash, + width = 32, + height = width, + dummy = false, + ...canvasProps +}: Props) { + const canvasRef = useRef(null); + + useEffect(() => { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const canvas = canvasRef.current!; + // eslint-disable-next-line no-self-assign + canvas.width = canvas.width; // resets canvas + + if (dummy || !hash) return; + + try { + const pixels = decode(hash, width, height); + const ctx = canvas.getContext('2d'); + const imageData = new ImageData(pixels, width, height); + + ctx?.putImageData(imageData, 0, 0); + } catch (err) { + console.error('Blurhash decoding failure', { err, hash }); + } + }, [dummy, hash, width, height]); + + return ( + + ); +} + +export default React.memo(Blurhash); From 88d33f361fcd5de267caa23bd4ad40b5b14dbd45 Mon Sep 17 00:00:00 2001 From: Matt Jankowski Date: Tue, 2 May 2023 06:57:11 -0400 Subject: [PATCH 04/96] Fix Lint/DuplicateBranch cop (#24766) --- .rubocop_todo.yml | 9 ------ app/lib/permalink_redirector.rb | 46 ++++++++++++++++++++++----- app/models/account_statuses_filter.rb | 6 ++-- app/validators/email_mx_validator.rb | 4 +-- app/validators/vote_validator.rb | 18 ++++++++--- lib/mastodon/maintenance_cli.rb | 4 +-- 6 files changed, 56 insertions(+), 31 deletions(-) diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index 446e38b0a7..a3b80f141b 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -119,15 +119,6 @@ Lint/ConstantDefinitionInBlock: - 'spec/lib/connection_pool/shared_timed_stack_spec.rb' - 'spec/models/concerns/remotable_spec.rb' -# Configuration parameters: IgnoreLiteralBranches, IgnoreConstantBranches. -Lint/DuplicateBranch: - Exclude: - - 'app/lib/permalink_redirector.rb' - - 'app/models/account_statuses_filter.rb' - - 'app/validators/email_mx_validator.rb' - - 'app/validators/vote_validator.rb' - - 'lib/mastodon/maintenance_cli.rb' - # Configuration parameters: AllowComments, AllowEmptyLambdas. Lint/EmptyBlock: Exclude: diff --git a/app/lib/permalink_redirector.rb b/app/lib/permalink_redirector.rb index cf1a376251..063a2188b5 100644 --- a/app/lib/permalink_redirector.rb +++ b/app/lib/permalink_redirector.rb @@ -8,19 +8,49 @@ class PermalinkRedirector end def redirect_path - if path_segments[0].present? && path_segments[0].start_with?('@') && path_segments[1] =~ /\d/ - find_status_url_by_id(path_segments[1]) - elsif path_segments[0].present? && path_segments[0].start_with?('@') - find_account_url_by_name(path_segments[0]) - elsif path_segments[0] == 'statuses' && path_segments[1] =~ /\d/ - find_status_url_by_id(path_segments[1]) - elsif path_segments[0] == 'accounts' && path_segments[1] =~ /\d/ - find_account_url_by_id(path_segments[1]) + if at_username_status_request? || statuses_status_request? + find_status_url_by_id(second_segment) + elsif at_username_request? + find_account_url_by_name(first_segment) + elsif accounts_request? && record_integer_id_request? + find_account_url_by_id(second_segment) end end private + def at_username_status_request? + at_username_request? && record_integer_id_request? + end + + def statuses_status_request? + statuses_request? && record_integer_id_request? + end + + def at_username_request? + first_segment.present? && first_segment.start_with?('@') + end + + def statuses_request? + first_segment == 'statuses' + end + + def accounts_request? + first_segment == 'accounts' + end + + def record_integer_id_request? + second_segment =~ /\d/ + end + + def first_segment + path_segments.first + end + + def second_segment + path_segments.second + end + def path_segments @path_segments ||= @path.gsub(/\A\//, '').split('/') end diff --git a/app/models/account_statuses_filter.rb b/app/models/account_statuses_filter.rb index 211f414787..c6dc1385f5 100644 --- a/app/models/account_statuses_filter.rb +++ b/app/models/account_statuses_filter.rb @@ -32,9 +32,9 @@ class AccountStatusesFilter private def initial_scope - if suspended? - Status.none - elsif anonymous? + return Status.none if suspended? + + if anonymous? account.statuses.where(visibility: %i(public unlisted)) elsif author? account.statuses.all # NOTE: #merge! does not work without the #all diff --git a/app/validators/email_mx_validator.rb b/app/validators/email_mx_validator.rb index 19c57bdf66..a30a0c820d 100644 --- a/app/validators/email_mx_validator.rb +++ b/app/validators/email_mx_validator.rb @@ -8,9 +8,7 @@ class EmailMxValidator < ActiveModel::Validator domain = get_domain(user.email) - if domain.blank? - user.errors.add(:email, :invalid) - elsif domain.include?('..') + if domain.blank? || domain.include?('..') user.errors.add(:email, :invalid) elsif !on_allowlist?(domain) resolved_ips, resolved_domains = resolve_mx(domain) diff --git a/app/validators/vote_validator.rb b/app/validators/vote_validator.rb index 9c55f9ab6d..9bd17fbe80 100644 --- a/app/validators/vote_validator.rb +++ b/app/validators/vote_validator.rb @@ -6,15 +6,23 @@ class VoteValidator < ActiveModel::Validator vote.errors.add(:base, I18n.t('polls.errors.invalid_choice')) if invalid_choice?(vote) - if vote.poll_multiple? && already_voted_for_same_choice_on_multiple_poll?(vote) - vote.errors.add(:base, I18n.t('polls.errors.already_voted')) - elsif !vote.poll_multiple? && already_voted_on_non_multiple_poll?(vote) - vote.errors.add(:base, I18n.t('polls.errors.already_voted')) - end + vote.errors.add(:base, I18n.t('polls.errors.already_voted')) if additional_voting_not_allowed?(vote) end private + def additional_voting_not_allowed?(vote) + poll_multiple_and_already_voted?(vote) || poll_non_multiple_and_already_voted?(vote) + end + + def poll_multiple_and_already_voted?(vote) + vote.poll_multiple? && already_voted_for_same_choice_on_multiple_poll?(vote) + end + + def poll_non_multiple_and_already_voted?(vote) + !vote.poll_multiple? && already_voted_on_non_multiple_poll?(vote) + end + def invalid_choice?(vote) vote.choice.negative? || vote.choice >= vote.poll.options.size end diff --git a/lib/mastodon/maintenance_cli.rb b/lib/mastodon/maintenance_cli.rb index ff8f6ddda9..88bd191ea9 100644 --- a/lib/mastodon/maintenance_cli.rb +++ b/lib/mastodon/maintenance_cli.rb @@ -664,9 +664,7 @@ module Mastodon def remove_index_if_exists!(table, name) ActiveRecord::Base.connection.remove_index(table, name: name) - rescue ArgumentError - nil - rescue ActiveRecord::StatementInvalid + rescue ArgumentError, ActiveRecord::StatementInvalid nil end end From 1eb51bd7498c91bd58e4ec65255d7665ddb2636e Mon Sep 17 00:00:00 2001 From: Claire Date: Tue, 2 May 2023 13:58:29 +0200 Subject: [PATCH 05/96] Add request specs for caching behavior (#24592) --- spec/rails_helper.rb | 22 ++ spec/requests/cache_spec.rb | 685 ++++++++++++++++++++++++++++++++++++ 2 files changed, 707 insertions(+) create mode 100644 spec/requests/cache_spec.rb diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index 26fc3d9fdf..8f7d294fbd 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -35,6 +35,26 @@ Devise::Test::ControllerHelpers.module_eval do end end +module SignedRequestHelpers + def get(path, headers: nil, sign_with: nil, **args) + return super path, headers: headers, **args if sign_with.nil? + + headers ||= {} + headers['Date'] = Time.now.utc.httpdate + headers['Host'] = ENV.fetch('LOCAL_DOMAIN') + signed_headers = headers.merge('(request-target)' => "get #{path}").slice('(request-target)', 'Host', 'Date') + + key_id = ActivityPub::TagManager.instance.key_uri_for(sign_with) + keypair = sign_with.keypair + signed_string = signed_headers.map { |key, value| "#{key.downcase}: #{value}" }.join("\n") + signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string)) + + headers['Signature'] = "keyId=\"#{key_id}\",algorithm=\"rsa-sha256\",headers=\"#{signed_headers.keys.join(' ').downcase}\",signature=\"#{signature}\"" + + super path, headers: headers, **args + end +end + RSpec.configure do |config| config.fixture_path = "#{Rails.root}/spec/fixtures" config.use_transactional_fixtures = true @@ -46,10 +66,12 @@ RSpec.configure do |config| config.include Devise::Test::ControllerHelpers, type: :helper config.include Devise::Test::ControllerHelpers, type: :view config.include Devise::Test::IntegrationHelpers, type: :feature + config.include Devise::Test::IntegrationHelpers, type: :request config.include Paperclip::Shoulda::Matchers config.include ActiveSupport::Testing::TimeHelpers config.include Chewy::Rspec::Helpers config.include Redisable + config.include SignedRequestHelpers, type: :request config.before :each, type: :feature do https = ENV['LOCAL_HTTPS'] == 'true' diff --git a/spec/requests/cache_spec.rb b/spec/requests/cache_spec.rb new file mode 100644 index 0000000000..902f21db4b --- /dev/null +++ b/spec/requests/cache_spec.rb @@ -0,0 +1,685 @@ +# frozen_string_literal: true + +require 'rails_helper' + +module TestEndpoints + # Endpoints that do not include authorization-dependent results + # and should be cacheable no matter what. + ALWAYS_CACHED = %w( + /.well-known/host-meta + /.well-known/nodeinfo + /nodeinfo/2.0 + /manifest + /custom.css + /actor + /api/v1/instance/extended_description + /api/v1/instance/rules + /api/v1/instance/peers + /api/v1/instance + /api/v2/instance + ).freeze + + # Endpoints that should be cachable when accessed anonymously but have a Vary + # on Cookie to prevent logged-in users from getting values from logged-out cache. + COOKIE_DEPENDENT_CACHABLE = %w( + / + /explore + /public + /about + /privacy-policy + /directory + /@alice + /@alice/110224538612341312 + ).freeze + + # Endpoints that should be cachable when accessed anonymously but have a Vary + # on Authorization to prevent logged-in users from getting values from logged-out cache. + AUTHORIZATION_DEPENDENT_CACHABLE = %w( + /api/v1/accounts/lookup?acct=alice + /api/v1/statuses/110224538612341312 + /api/v1/statuses/110224538612341312/context + /api/v1/polls/12345 + /api/v1/trends/statuses + /api/v1/directory + ).freeze + + # Private status that should only be returned with to a valid signature from + # a specific user. + # Should never be cached. + REQUIRE_SIGNATURE = %w( + /users/alice/statuses/110224538643211312 + ).freeze + + # Pages only available to logged-in users. + # Should never be cached. + REQUIRE_LOGIN = %w( + /settings/preferences/appearance + /settings/profile + /settings/featured_tags + /settings/export + /relationships + /filters + /statuses_cleanup + /auth/edit + /oauth/authorized_applications + /admin/dashboard + ).freeze + + # API endpoints only available to logged-in users. + # Should never be cached. + REQUIRE_TOKEN = %w( + /api/v1/announcements + /api/v1/timelines/home + /api/v1/notifications + /api/v1/bookmarks + /api/v1/favourites + /api/v1/follow_requests + /api/v1/conversations + /api/v1/statuses/110224538643211312 + /api/v1/statuses/110224538643211312/context + /api/v1/lists + /api/v2/filters + ).freeze + + # Pages that are only shown to logged-out users, and should never get cached + # because of CSRF protection. + REQUIRE_LOGGED_OUT = %w( + /invite/abcdef + /auth/sign_in + /auth/sign_up + /auth/password/new + /auth/confirmation/new + ).freeze + + # Non-exhaustive list of endpoints that feature language-dependent results + # and thus need to have a Vary on Accept-Language + LANGUAGE_DEPENDENT = %w( + / + /explore + /about + /api/v1/trends/statuses + ).freeze + + module AuthorizedFetch + # Endpoints that require a signature with AUTHORIZED_FETCH and LIMITED_FEDERATION_MODE + # and thus should not be cached in those modes. + REQUIRE_SIGNATURE = %w( + /users/alice + ).freeze + end + + module DisabledAnonymousAPI + # Endpoints that require a signature with DISALLOW_UNAUTHENTICATED_API_ACCESS + # and thus should not be cached in this mode. + REQUIRE_TOKEN = %w( + /api/v1/custom_emojis + ).freeze + end +end + +describe 'Caching behavior' do + shared_examples 'cachable response' do + it 'does not set cookies' do + expect(response.cookies).to be_empty + end + + it 'sets public cache control' do + # expect(response.cache_control[:max_age]&.to_i).to be_positive + expect(response.cache_control[:public]).to be_truthy + expect(response.cache_control[:private]).to be_falsy + expect(response.cache_control[:no_store]).to be_falsy + expect(response.cache_control[:no_cache]).to be_falsy + end + end + + shared_examples 'non-cacheable response' do + it 'sets private cache control' do + expect(response.cache_control[:private]).to be_truthy + expect(response.cache_control[:no_store]).to be_truthy + end + end + + shared_examples 'non-cacheable error' do + it 'does not return HTTP success' do + expect(response).to_not have_http_status(200) + end + + it 'does not have cache headers' do + expect(response.cache_control[:public]).to be_falsy + end + end + + shared_examples 'language-dependent' do + it 'has a Vary on Accept-Language' do + expect(response.headers['Vary']&.split(',')&.map { |x| x.strip.downcase }).to include('accept-language') + end + end + + # Enable CSRF protection like it is in production, as it can cause cookies + # to be set and thus mess with cache. + around do |example| + old = ActionController::Base.allow_forgery_protection + ActionController::Base.allow_forgery_protection = true + + example.run + + ActionController::Base.allow_forgery_protection = old + end + + let(:alice) { Fabricate(:account, username: 'alice') } + let(:user) { Fabricate(:user, role: UserRole.find_by(name: 'Moderator')) } + + before do + # rubocop:disable Style/NumericLiterals + status = Fabricate(:status, account: alice, id: 110224538612341312) + Fabricate(:status, account: alice, id: 110224538643211312, visibility: :private) + Fabricate(:invite, code: 'abcdef') + Fabricate(:poll, status: status, account: alice, id: 12345) + # rubocop:enable Style/NumericLiterals + + user.account.follow!(alice) + end + + context 'when anonymously accessed' do + TestEndpoints::ALWAYS_CACHED.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'cachable response' + it_behaves_like 'language-dependent' if TestEndpoints::LANGUAGE_DEPENDENT.include?(endpoint) + end + end + + TestEndpoints::COOKIE_DEPENDENT_CACHABLE.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'cachable response' + + it 'has a Vary on Cookie' do + expect(response.headers['Vary']&.split(',')&.map { |x| x.strip.downcase }).to include('cookie') + end + + it_behaves_like 'language-dependent' if TestEndpoints::LANGUAGE_DEPENDENT.include?(endpoint) + end + end + + TestEndpoints::AUTHORIZATION_DEPENDENT_CACHABLE.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'cachable response' + + it 'has a Vary on Authorization' do + expect(response.headers['Vary']&.split(',')&.map { |x| x.strip.downcase }).to include('authorization') + end + + it_behaves_like 'language-dependent' if TestEndpoints::LANGUAGE_DEPENDENT.include?(endpoint) + end + end + + TestEndpoints::REQUIRE_LOGGED_OUT.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'non-cacheable response' + end + end + + (TestEndpoints::REQUIRE_SIGNATURE + TestEndpoints::REQUIRE_LOGIN + TestEndpoints::REQUIRE_TOKEN).each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'non-cacheable error' + end + end + + describe '/api/v1/instance/domain_blocks' do + around do |example| + old_setting = Setting.show_domain_blocks + Setting.show_domain_blocks = show_domain_blocks + + example.run + + Setting.show_domain_blocks = old_setting + end + + before { get '/api/v1/instance/domain_blocks' } + + context 'when set to be publicly-available' do + let(:show_domain_blocks) { 'all' } + + it_behaves_like 'cachable response' + end + + context 'when allowed for local users only' do + let(:show_domain_blocks) { 'users' } + + it_behaves_like 'non-cacheable error' + end + + context 'when disabled' do + let(:show_domain_blocks) { 'disabled' } + + it_behaves_like 'non-cacheable error' + end + end + end + + context 'when logged in' do + before do + sign_in user, scope: :user + + # Unfortunately, devise's `sign_in` helper causes the `session` to be + # loaded in the next request regardless of whether it's actually accessed + # by the client code. + # + # So, we make an extra query to clear issue a session cookie instead. + # + # A less resource-intensive way to deal with that would be to generate the + # session cookie manually, but this seems pretty involved. + get '/' + end + + TestEndpoints::ALWAYS_CACHED.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'cachable response' + it_behaves_like 'language-dependent' if TestEndpoints::LANGUAGE_DEPENDENT.include?(endpoint) + end + end + + TestEndpoints::COOKIE_DEPENDENT_CACHABLE.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'non-cacheable response' + + it 'has a Vary on Cookie' do + expect(response.headers['Vary']&.split(',')&.map { |x| x.strip.downcase }).to include('cookie') + end + end + end + + TestEndpoints::REQUIRE_LOGIN.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'non-cacheable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + end + + TestEndpoints::REQUIRE_LOGGED_OUT.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'non-cacheable error' + end + end + end + + context 'with an auth token' do + let!(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read') } + + TestEndpoints::ALWAYS_CACHED.each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Authorization' => "Bearer #{token.token}" } + end + + it_behaves_like 'cachable response' + it_behaves_like 'language-dependent' if TestEndpoints::LANGUAGE_DEPENDENT.include?(endpoint) + end + end + + TestEndpoints::AUTHORIZATION_DEPENDENT_CACHABLE.each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Authorization' => "Bearer #{token.token}" } + end + + it_behaves_like 'non-cacheable response' + + it 'has a Vary on Authorization' do + expect(response.headers['Vary']&.split(',')&.map { |x| x.strip.downcase }).to include('authorization') + end + end + end + + (TestEndpoints::REQUIRE_LOGGED_OUT + TestEndpoints::REQUIRE_TOKEN).each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Authorization' => "Bearer #{token.token}" } + end + + it_behaves_like 'non-cacheable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + end + + describe '/api/v1/instance/domain_blocks' do + around do |example| + old_setting = Setting.show_domain_blocks + Setting.show_domain_blocks = show_domain_blocks + + example.run + + Setting.show_domain_blocks = old_setting + end + + before do + get '/api/v1/instance/domain_blocks', headers: { 'Authorization' => "Bearer #{token.token}" } + end + + context 'when set to be publicly-available' do + let(:show_domain_blocks) { 'all' } + + it_behaves_like 'cachable response' + end + + context 'when allowed for local users only' do + let(:show_domain_blocks) { 'users' } + + it_behaves_like 'non-cacheable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + + context 'when disabled' do + let(:show_domain_blocks) { 'disabled' } + + it_behaves_like 'non-cacheable error' + end + end + end + + context 'with a Signature header' do + let(:remote_actor) { Fabricate(:account, domain: 'example.org', uri: 'https://example.org/remote', protocol: :activitypub) } + let(:dummy_signature) { 'dummy-signature' } + + before do + remote_actor.follow!(alice) + end + + describe '/actor' do + before do + get '/actor', sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'cachable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + + TestEndpoints::REQUIRE_SIGNATURE.each do |endpoint| + describe endpoint do + before do + get endpoint, sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'non-cacheable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + end + end + + context 'when enabling AUTHORIZED_FETCH mode' do + around do |example| + ClimateControl.modify AUTHORIZED_FETCH: 'true' do + example.run + end + end + + context 'when not providing a Signature' do + describe '/actor' do + before do + get '/actor', headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'cachable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + + (TestEndpoints::REQUIRE_SIGNATURE + TestEndpoints::AuthorizedFetch::REQUIRE_SIGNATURE).each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'non-cacheable error' + end + end + end + + context 'when providing a Signature' do + let(:remote_actor) { Fabricate(:account, domain: 'example.org', uri: 'https://example.org/remote', protocol: :activitypub) } + let(:dummy_signature) { 'dummy-signature' } + + before do + remote_actor.follow!(alice) + end + + describe '/actor' do + before do + get '/actor', sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'cachable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + + (TestEndpoints::REQUIRE_SIGNATURE + TestEndpoints::AuthorizedFetch::REQUIRE_SIGNATURE).each do |endpoint| + describe endpoint do + before do + get endpoint, sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'non-cacheable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + end + end + end + + context 'when enabling LIMITED_FEDERATION_MODE mode' do + around do |example| + ClimateControl.modify LIMITED_FEDERATION_MODE: 'true' do + old_whitelist_mode = Rails.configuration.x.whitelist_mode + Rails.configuration.x.whitelist_mode = true + + example.run + + Rails.configuration.x.whitelist_mode = old_whitelist_mode + end + end + + context 'when not providing a Signature' do + describe '/actor' do + before do + get '/actor', headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'cachable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + + (TestEndpoints::REQUIRE_SIGNATURE + TestEndpoints::AuthorizedFetch::REQUIRE_SIGNATURE).each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'non-cacheable error' + end + end + end + + context 'when providing a Signature from an allowed domain' do + let(:remote_actor) { Fabricate(:account, domain: 'example.org', uri: 'https://example.org/remote', protocol: :activitypub) } + let(:dummy_signature) { 'dummy-signature' } + + before do + DomainAllow.create!(domain: remote_actor.domain) + remote_actor.follow!(alice) + end + + describe '/actor' do + before do + get '/actor', sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'cachable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + + (TestEndpoints::REQUIRE_SIGNATURE + TestEndpoints::AuthorizedFetch::REQUIRE_SIGNATURE).each do |endpoint| + describe endpoint do + before do + get endpoint, sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'non-cacheable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + end + end + + context 'when providing a Signature from a non-allowed domain' do + let(:remote_actor) { Fabricate(:account, domain: 'example.org', uri: 'https://example.org/remote', protocol: :activitypub) } + let(:dummy_signature) { 'dummy-signature' } + + describe '/actor' do + before do + get '/actor', sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'cachable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + + (TestEndpoints::REQUIRE_SIGNATURE + TestEndpoints::AuthorizedFetch::REQUIRE_SIGNATURE).each do |endpoint| + describe endpoint do + before do + get endpoint, sign_with: remote_actor, headers: { 'Accept' => 'application/activity+json' } + end + + it_behaves_like 'non-cacheable error' + end + end + end + end + + context 'when enabling DISALLOW_UNAUTHENTICATED_API_ACCESS' do + around do |example| + ClimateControl.modify DISALLOW_UNAUTHENTICATED_API_ACCESS: 'true' do + example.run + end + end + + context 'when anonymously accessed' do + TestEndpoints::ALWAYS_CACHED.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'cachable response' + it_behaves_like 'language-dependent' if TestEndpoints::LANGUAGE_DEPENDENT.include?(endpoint) + end + end + + TestEndpoints::REQUIRE_LOGGED_OUT.each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'non-cacheable response' + end + end + + (TestEndpoints::REQUIRE_TOKEN + TestEndpoints::AUTHORIZATION_DEPENDENT_CACHABLE + TestEndpoints::DisabledAnonymousAPI::REQUIRE_TOKEN).each do |endpoint| + describe endpoint do + before { get endpoint } + + it_behaves_like 'non-cacheable error' + end + end + end + + context 'with an auth token' do + let!(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read') } + + TestEndpoints::ALWAYS_CACHED.each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Authorization' => "Bearer #{token.token}" } + end + + it_behaves_like 'cachable response' + it_behaves_like 'language-dependent' if TestEndpoints::LANGUAGE_DEPENDENT.include?(endpoint) + end + end + + TestEndpoints::AUTHORIZATION_DEPENDENT_CACHABLE.each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Authorization' => "Bearer #{token.token}" } + end + + it_behaves_like 'non-cacheable response' + + it 'has a Vary on Authorization' do + expect(response.headers['Vary']&.split(',')&.map { |x| x.strip.downcase }).to include('authorization') + end + end + end + + (TestEndpoints::REQUIRE_LOGGED_OUT + TestEndpoints::REQUIRE_TOKEN + TestEndpoints::DisabledAnonymousAPI::REQUIRE_TOKEN).each do |endpoint| + describe endpoint do + before do + get endpoint, headers: { 'Authorization' => "Bearer #{token.token}" } + end + + it_behaves_like 'non-cacheable response' + + it 'returns HTTP success' do + expect(response).to have_http_status(200) + end + end + end + end + end +end From 598e63dad262ba454deda410f56c8f1b49ed96f8 Mon Sep 17 00:00:00 2001 From: Claire Date: Tue, 2 May 2023 13:58:48 +0200 Subject: [PATCH 06/96] Change media elements to use aspect-ratio rather than compute height themselves (#24686) --- .../mastodon/components/media_gallery.jsx | 10 ++-- .../picture_in_picture_placeholder.jsx | 42 +---------------- app/javascript/mastodon/components/status.jsx | 7 +-- .../mastodon/features/audio/index.jsx | 19 +++++--- .../features/status/components/card.jsx | 47 +++++-------------- .../mastodon/features/video/index.jsx | 46 ++---------------- .../styles/mastodon/components.scss | 5 ++ 7 files changed, 40 insertions(+), 136 deletions(-) diff --git a/app/javascript/mastodon/components/media_gallery.jsx b/app/javascript/mastodon/components/media_gallery.jsx index 5be0070a33..859d16a32f 100644 --- a/app/javascript/mastodon/components/media_gallery.jsx +++ b/app/javascript/mastodon/components/media_gallery.jsx @@ -313,7 +313,7 @@ class MediaGallery extends React.PureComponent { } render () { - const { media, lang, intl, sensitive, height, defaultWidth, standalone, autoplay } = this.props; + const { media, lang, intl, sensitive, defaultWidth, standalone, autoplay } = this.props; const { visible } = this.state; const width = this.state.width || defaultWidth; @@ -322,13 +322,9 @@ class MediaGallery extends React.PureComponent { const style = {}; if (this.isFullSizeEligible() && (standalone || !cropImages)) { - if (width) { - style.height = width / this.props.media.getIn([0, 'meta', 'small', 'aspect']); - } - } else if (width) { - style.height = width / (16/9); + style.aspectRatio = `${this.props.media.getIn([0, 'meta', 'small', 'aspect'])}`; } else { - style.height = height; + style.aspectRatio = '16 / 9'; } const size = media.take(4).size; diff --git a/app/javascript/mastodon/components/picture_in_picture_placeholder.jsx b/app/javascript/mastodon/components/picture_in_picture_placeholder.jsx index 6322b1c668..a51c974017 100644 --- a/app/javascript/mastodon/components/picture_in_picture_placeholder.jsx +++ b/app/javascript/mastodon/components/picture_in_picture_placeholder.jsx @@ -3,62 +3,22 @@ import PropTypes from 'prop-types'; import Icon from 'mastodon/components/icon'; import { removePictureInPicture } from 'mastodon/actions/picture_in_picture'; import { connect } from 'react-redux'; -import { debounce } from 'lodash'; import { FormattedMessage } from 'react-intl'; class PictureInPicturePlaceholder extends React.PureComponent { static propTypes = { - width: PropTypes.number, dispatch: PropTypes.func.isRequired, }; - state = { - width: this.props.width, - height: this.props.width && (this.props.width / (16/9)), - }; - handleClick = () => { const { dispatch } = this.props; dispatch(removePictureInPicture()); }; - setRef = c => { - this.node = c; - - if (this.node) { - this._setDimensions(); - } - }; - - _setDimensions () { - const width = this.node.offsetWidth; - const height = width / (16/9); - - this.setState({ width, height }); - } - - componentDidMount () { - window.addEventListener('resize', this.handleResize, { passive: true }); - } - - componentWillUnmount () { - window.removeEventListener('resize', this.handleResize); - } - - handleResize = debounce(() => { - if (this.node) { - this._setDimensions(); - } - }, 250, { - trailing: true, - }); - render () { - const { height } = this.state; - return ( -
+
diff --git a/app/javascript/mastodon/components/status.jsx b/app/javascript/mastodon/components/status.jsx index cd8423b2f4..b5242e7691 100644 --- a/app/javascript/mastodon/components/status.jsx +++ b/app/javascript/mastodon/components/status.jsx @@ -411,7 +411,7 @@ class Status extends ImmutablePureComponent { } if (pictureInPicture.get('inUse')) { - media = ; + media = ; } else if (status.get('media_attachments').size > 0) { if (this.props.muted) { media = ( @@ -460,12 +460,9 @@ class Status extends ImmutablePureComponent { src={attachment.get('url')} alt={attachment.get('description')} lang={status.get('language')} - width={this.props.cachedMediaWidth} - height={110} inline sensitive={status.get('sensitive')} onOpenVideo={this.handleOpenVideo} - cacheWidth={this.props.cacheMediaWidth} deployPictureInPicture={pictureInPicture.get('available') ? this.handleDeployPictureInPicture : undefined} visible={this.state.showMedia} onToggleVisibility={this.handleToggleMediaVisibility} @@ -498,8 +495,6 @@ class Status extends ImmutablePureComponent { onOpenMedia={this.handleOpenMedia} card={status.get('card')} compact - cacheWidth={this.props.cacheMediaWidth} - defaultWidth={this.props.cachedMediaWidth} sensitive={status.get('sensitive')} /> ); diff --git a/app/javascript/mastodon/features/audio/index.jsx b/app/javascript/mastodon/features/audio/index.jsx index 53f24c6a39..e8fe2c4d9a 100644 --- a/app/javascript/mastodon/features/audio/index.jsx +++ b/app/javascript/mastodon/features/audio/index.jsx @@ -384,7 +384,7 @@ class Audio extends React.PureComponent { } _getRadius () { - return parseInt(((this.state.height || this.props.height) - (PADDING * this._getScaleCoefficient()) * 2) / 2); + return parseInt((this.state.height || this.props.height) / 2 - PADDING * this._getScaleCoefficient()); } _getScaleCoefficient () { @@ -396,7 +396,7 @@ class Audio extends React.PureComponent { } _getCY() { - return Math.floor(this._getRadius() + (PADDING * this._getScaleCoefficient())); + return Math.floor((this.state.height || this.props.height) / 2); } _getAccentColor () { @@ -470,7 +470,7 @@ class Audio extends React.PureComponent { } return ( -
+
}
diff --git a/app/javascript/mastodon/features/status/components/card.jsx b/app/javascript/mastodon/features/status/components/card.jsx index b67f671c50..88b38c65ad 100644 --- a/app/javascript/mastodon/features/status/components/card.jsx +++ b/app/javascript/mastodon/features/status/components/card.jsx @@ -8,7 +8,6 @@ import classnames from 'classnames'; import Icon from 'mastodon/components/icon'; import { useBlurhash } from 'mastodon/initial_state'; import Blurhash from 'mastodon/components/blurhash'; -import { debounce } from 'lodash'; const IDNA_PREFIX = 'xn--'; @@ -54,8 +53,6 @@ export default class Card extends React.PureComponent { card: ImmutablePropTypes.map, onOpenMedia: PropTypes.func.isRequired, compact: PropTypes.bool, - defaultWidth: PropTypes.number, - cacheWidth: PropTypes.func, sensitive: PropTypes.bool, }; @@ -64,7 +61,6 @@ export default class Card extends React.PureComponent { }; state = { - width: this.props.defaultWidth || 280, previewLoaded: false, embedded: false, revealed: !this.props.sensitive, @@ -87,24 +83,6 @@ export default class Card extends React.PureComponent { window.removeEventListener('resize', this.handleResize); } - _setDimensions () { - const width = this.node.offsetWidth; - - if (this.props.cacheWidth) { - this.props.cacheWidth(width); - } - - this.setState({ width }); - } - - handleResize = debounce(() => { - if (this.node) { - this._setDimensions(); - } - }, 250, { - trailing: true, - }); - handlePhotoClick = () => { const { card, onOpenMedia } = this.props; @@ -138,10 +116,6 @@ export default class Card extends React.PureComponent { setRef = c => { this.node = c; - - if (this.node) { - this._setDimensions(); - } }; handleImageLoad = () => { @@ -157,36 +131,31 @@ export default class Card extends React.PureComponent { renderVideo () { const { card } = this.props; const content = { __html: addAutoPlay(card.get('html')) }; - const { width } = this.state; - const ratio = card.get('width') / card.get('height'); - const height = width / ratio; return (
); } render () { const { card, compact } = this.props; - const { width, embedded, revealed } = this.state; + const { embedded, revealed } = this.state; if (card === null) { return null; } const provider = card.get('provider_name').length === 0 ? decodeIDNA(getHostname(card.get('url'))) : card.get('provider_name'); - const horizontal = (!compact && card.get('width') > card.get('height') && (card.get('width') + 100 >= width)) || card.get('type') !== 'link' || embedded; + const horizontal = (!compact && card.get('width') > card.get('height')) || card.get('type') !== 'link' || embedded; const interactive = card.get('type') !== 'link'; const className = classnames('status-card', { horizontal, compact, interactive }); const title = interactive ? {card.get('title')} : {card.get('title')}; const language = card.get('language') || ''; - const ratio = card.get('width') / card.get('height'); - const height = (compact && !embedded) ? (width / (16 / 9)) : (width / ratio); const description = (
@@ -196,6 +165,14 @@ export default class Card extends React.PureComponent {
); + const thumbnailStyle = { + visibility: revealed? null : 'hidden', + }; + + if (horizontal) { + thumbnailStyle.aspectRatio = (compact && !embedded) ? '16 / 9' : `${card.get('width')} / ${card.get('height')}`; + } + let embed = ''; let canvas = ( ); - let thumbnail = ; + let thumbnail = ; let spoilerButton = (