mastodon-glitch/app/models/account.rb

552 lines
16 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
# == Schema Information
#
# Table name: accounts
#
# id :bigint(8) not null, primary key
# username :string default(""), not null
# domain :string
# private_key :text
# public_key :text default(""), not null
# created_at :datetime not null
# updated_at :datetime not null
# note :text default(""), not null
# display_name :string default(""), not null
# uri :string default(""), not null
# url :string
# avatar_file_name :string
# avatar_content_type :string
# avatar_file_size :integer
# avatar_updated_at :datetime
# header_file_name :string
# header_content_type :string
# header_file_size :integer
# header_updated_at :datetime
# avatar_remote_url :string
# locked :boolean default(FALSE), not null
# header_remote_url :string default(""), not null
# last_webfingered_at :datetime
# inbox_url :string default(""), not null
# outbox_url :string default(""), not null
# shared_inbox_url :string default(""), not null
# followers_url :string default(""), not null
# protocol :integer default("ostatus"), not null
# memorial :boolean default(FALSE), not null
# moved_to_account_id :bigint(8)
# featured_collection_url :string
# fields :jsonb
# actor_type :string
# discoverable :boolean
# also_known_as :string is an Array
# silenced_at :datetime
# suspended_at :datetime
# hide_collections :boolean
# avatar_storage_schema_version :integer
# header_storage_schema_version :integer
# suspension_origin :integer
# sensitized_at :datetime
# trendable :boolean
# reviewed_at :datetime
# requested_review_at :datetime
# indexable :boolean default(FALSE), not null
# attribution_domains :string default([]), is an Array
#
2016-08-17 15:56:23 +00:00
class Account < ApplicationRecord
self.ignored_columns += %w(
devices_url
hub_url
remote_url
salmon_url
secret
subscription_expires_at
trust_level
)
BACKGROUND_REFRESH_INTERVAL = 1.week.freeze
DEFAULT_FIELDS_SIZE = 4
INSTANCE_ACTOR_ID = -99
USERNAME_RE = /[a-z0-9_]+([.-]+[a-z0-9_]+)*/i
MENTION_RE = %r{(?<![=/[:word:]])@((#{USERNAME_RE})(?:@[[:word:]]+([.-]+[[:word:]]+)*)?)}
URL_PREFIX_RE = %r{\Ahttp(s?)://[^/]+}
USERNAME_ONLY_RE = /\A#{USERNAME_RE}\z/i
USERNAME_LENGTH_LIMIT = 30
DISPLAY_NAME_LENGTH_LIMIT = 30
NOTE_LENGTH_LIMIT = 500
AUTOMATED_ACTOR_TYPES = %w(Application Service).freeze
include Attachmentable # Load prior to Avatar & Header concerns
include Account::Associations
include Account::Avatar
include Account::Counters
include Account::FinderConcern
include Account::Header
include Account::Interactions
include Account::Merging
include Account::Search
include Account::StatusesSearch
include Account::AttributionDomains
include DomainMaterializable
include DomainNormalizable
include Paginable
include Reviewable
enum :protocol, { ostatus: 0, activitypub: 1 }
enum :suspension_origin, { local: 0, remote: 1 }, prefix: true
validates :username, presence: true
validates_with UniqueUsernameValidator, if: -> { will_save_change_to_username? }
# Remote user validations, also applies to internal actors
validates :username, format: { with: USERNAME_ONLY_RE }, if: -> { (!local? || actor_type == 'Application') && will_save_change_to_username? }
# Remote user validations
validates :uri, presence: true, unless: :local?, on: :create
# Local user validations
validates :username, format: { with: /\A[a-z0-9_]+\z/i }, length: { maximum: USERNAME_LENGTH_LIMIT }, if: -> { local? && will_save_change_to_username? && actor_type != 'Application' }
validates_with UnreservedUsernameValidator, if: -> { local? && will_save_change_to_username? && actor_type != 'Application' }
validates :display_name, length: { maximum: DISPLAY_NAME_LENGTH_LIMIT }, if: -> { local? && will_save_change_to_display_name? }
validates :note, note_length: { maximum: NOTE_LENGTH_LIMIT }, if: -> { local? && will_save_change_to_note? }
validates :fields, length: { maximum: DEFAULT_FIELDS_SIZE }, if: -> { local? && will_save_change_to_fields? }
with_options on: :create do
validates :uri, absence: true, if: :local?
validates :inbox_url, absence: true, if: :local?
validates :shared_inbox_url, absence: true, if: :local?
validates :followers_url, absence: true, if: :local?
end
2016-03-16 10:18:09 +00:00
normalizes :username, with: ->(username) { username.squish }
scope :without_internal, -> { where(id: 1...) }
scope :remote, -> { where.not(domain: nil) }
scope :local, -> { where(domain: nil) }
scope :partitioned, -> { order(Arel.sql('row_number() over (partition by domain)')) }
scope :silenced, -> { where.not(silenced_at: nil) }
scope :suspended, -> { where.not(suspended_at: nil) }
scope :sensitized, -> { where.not(sensitized_at: nil) }
scope :without_suspended, -> { where(suspended_at: nil) }
scope :without_silenced, -> { where(silenced_at: nil) }
scope :without_instance_actor, -> { where.not(id: INSTANCE_ACTOR_ID) }
2017-01-08 01:55:40 +00:00
scope :recent, -> { reorder(id: :desc) }
scope :bots, -> { where(actor_type: AUTOMATED_ACTOR_TYPES) }
scope :non_automated, -> { where.not(actor_type: AUTOMATED_ACTOR_TYPES) }
scope :groups, -> { where(actor_type: 'Group') }
2017-01-08 01:55:40 +00:00
scope :alphabetic, -> { order(domain: :asc, username: :asc) }
scope :matches_uri_prefix, ->(value) { where(arel_table[:uri].matches("#{sanitize_sql_like(value)}/%", false, true)).or(where(uri: value)) }
scope :matches_username, ->(value) { where('lower((username)::text) LIKE lower(?)', "#{value}%") }
scope :matches_display_name, ->(value) { where(arel_table[:display_name].matches("#{value}%")) }
scope :without_unapproved, -> { left_outer_joins(:user).merge(User.approved.confirmed).or(remote) }
scope :auditable, -> { where(id: Admin::ActionLog.select(:account_id).distinct) }
scope :searchable, -> { without_unapproved.without_suspended.where(moved_to_account_id: nil) }
scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).joins(:account_stat) }
scope :by_recent_status, -> { includes(:account_stat).merge(AccountStat.by_recent_status).references(:account_stat) }
scope :by_recent_activity, -> { left_joins(:user, :account_stat).order(coalesced_activity_timestamps.desc).order(id: :desc) }
scope :by_domain_and_subdomains, ->(domain) { where(domain: Instance.by_domain_and_subdomains(domain).select(:domain)) }
scope :not_excluded_by_account, ->(account) { where.not(id: account.excluded_from_timeline_account_ids) }
scope :not_domain_blocked_by_account, ->(account) { where(arel_table[:domain].eq(nil).or(arel_table[:domain].not_in(account.excluded_from_timeline_domains))) }
scope :dormant, -> { joins(:account_stat).merge(AccountStat.without_recent_activity) }
scope :with_username, ->(value) { where arel_table[:username].lower.eq(value.to_s.downcase) }
scope :with_domain, ->(value) { where arel_table[:domain].lower.eq(value&.to_s&.downcase) }
scope :without_memorial, -> { where(memorial: false) }
scope :duplicate_uris, -> { select(:uri, Arel.star.count).group(:uri).having(Arel.star.count.gt(1)) }
after_update_commit :trigger_update_webhooks
delegate :email,
:unconfirmed_email,
:current_sign_in_at,
:created_at,
:sign_up_ip,
:confirmed?,
:approved?,
:pending?,
:disabled?,
:unconfirmed?,
:unconfirmed_or_pending?,
:role,
:locale,
:shows_application?,
:prefers_noindex?,
2023-06-10 01:29:37 +00:00
:time_zone,
to: :user,
prefix: true,
allow_nil: true
delegate :chosen_languages, to: :user, prefix: false, allow_nil: true
update_index('accounts', :self)
2016-02-22 15:00:20 +00:00
def local?
2016-09-29 19:28:21 +00:00
domain.nil?
2016-02-22 15:00:20 +00:00
end
def moved?
moved_to_account_id.present?
end
def bot?
AUTOMATED_ACTOR_TYPES.include?(actor_type)
end
def instance_actor?
id == INSTANCE_ACTOR_ID
end
alias bot bot?
def bot=(val)
self.actor_type = ActiveModel::Type::Boolean.new.cast(val) ? 'Service' : 'Person'
end
def group?
actor_type == 'Group'
end
alias group group?
2016-02-22 17:10:30 +00:00
def acct
2016-09-29 19:28:21 +00:00
local? ? username : "#{username}@#{domain}"
2016-02-22 17:10:30 +00:00
end
def pretty_acct
local? ? username : "#{username}@#{Addressable::IDNA.to_unicode(domain)}"
end
def local_username_and_domain
"#{username}@#{Rails.configuration.x.local_domain}"
end
def local_followers_count
Follow.where(target_account_id: id).count
end
def to_webfinger_s
"acct:#{local_username_and_domain}"
end
def possibly_stale?
last_webfingered_at.nil? || last_webfingered_at <= 1.day.ago
end
def schedule_refresh_if_stale!
return unless last_webfingered_at.present? && last_webfingered_at <= BACKGROUND_REFRESH_INTERVAL.ago
AccountRefreshWorker.perform_in(rand(6.hours.to_i), id)
end
def refresh!
ResolveAccountService.new.call(acct) unless local?
end
def silenced?
silenced_at.present?
end
def silence!(date = Time.now.utc)
update!(silenced_at: date)
end
def unsilence!
update!(silenced_at: nil)
end
def suspended?
suspended_at.present? && !instance_actor?
end
def suspended_locally?
suspended? && suspension_origin_local?
end
def suspended_permanently?
suspended? && deletion_request.nil?
end
def suspended_temporarily?
suspended? && deletion_request.present?
end
alias unavailable? suspended?
alias permanently_unavailable? suspended_permanently?
def suspend!(date: Time.now.utc, origin: :local, block_email: true)
transaction do
create_deletion_request!
update!(suspended_at: date, suspension_origin: origin)
create_canonical_email_block! if block_email
end
end
def unsuspend!
transaction do
deletion_request&.destroy!
update!(suspended_at: nil, suspension_origin: nil)
destroy_canonical_email_block!
end
end
def sensitized?
sensitized_at.present?
end
def sensitize!(date = Time.now.utc)
update!(sensitized_at: date)
end
def unsensitize!
update!(sensitized_at: nil)
end
def memorialize!
update!(memorial: true)
end
def trendable?
boolean_with_default('trendable', Setting.trendable_by_default)
end
def sign?
true
end
def previous_strikes_count
strikes.where(overruled_at: nil).count
end
2016-02-22 15:00:20 +00:00
def keypair
@keypair ||= OpenSSL::PKey::RSA.new(private_key || public_key)
2016-02-22 15:00:20 +00:00
end
def tags_as_strings=(tag_names)
hashtags_map = Tag.find_or_create_by_names(tag_names).index_by(&:name)
# Remove hashtags that are to be deleted
tags.each do |tag|
if hashtags_map.key?(tag.name)
hashtags_map.delete(tag.name)
else
tags.delete(tag)
end
end
# Add hashtags that were so far missing
hashtags_map.each_value do |tag|
tags << tag
end
end
2018-12-29 01:24:36 +00:00
def also_known_as
self[:also_known_as] || []
end
def fields
(self[:fields] || []).filter_map do |f|
Account::Field.new(self, f)
rescue
nil
end
end
def fields_attributes=(attributes)
fields = []
old_fields = self[:fields] || []
old_fields = [] if old_fields.is_a?(Hash)
if attributes.is_a?(Hash)
attributes.each_value do |attr|
next if attr[:name].blank?
previous = old_fields.find { |item| item['value'] == attr[:value] }
attr[:verified_at] = previous['verified_at'] if previous && previous['verified_at'].present?
fields << attr
end
end
self[:fields] = fields
end
def build_fields
return if fields.size >= DEFAULT_FIELDS_SIZE
tmp = self[:fields] || []
tmp = [] if tmp.is_a?(Hash)
(DEFAULT_FIELDS_SIZE - tmp.size).times do
tmp << { name: '', value: '' }
end
self.fields = tmp
end
def save_with_optional_media!
save!
rescue ActiveRecord::RecordInvalid => e
errors = e.record.errors.errors
errors.each do |err|
if err.attribute == :avatar
self.avatar = nil
elsif err.attribute == :header
self.header = nil
end
end
2017-01-19 01:14:57 +00:00
save!
end
def hides_followers?
hide_collections?
end
def hides_following?
hide_collections?
end
def object_type
:person
end
def to_param
2016-09-29 19:28:21 +00:00
username
end
def to_log_human_identifier
acct
end
def excluded_from_timeline_account_ids
Rails.cache.fetch("exclude_account_ids_for:#{id}") { block_relationships.pluck(:target_account_id) + blocked_by_relationships.pluck(:account_id) + mute_relationships.pluck(:target_account_id) }
end
Account domain blocks (#2381) * Add <ostatus:conversation /> tag to Atom input/output Only uses ref attribute (not href) because href would be the alternate link that's always included also. Creates new conversation for every non-reply status. Carries over conversation for every reply. Keeps remote URIs verbatim, generates local URIs on the fly like the rest of them. * Conversation muting - prevents notifications that reference a conversation (including replies, favourites, reblogs) from being created. API endpoints /api/v1/statuses/:id/mute and /api/v1/statuses/:id/unmute Currently no way to tell when a status/conversation is muted, so the web UI only has a "disable notifications" button, doesn't work as a toggle * Display "Dismiss notifications" on all statuses in notifications column, not just own * Add "muted" as a boolean attribute on statuses JSON For now always false on contained reblogs, since it's only relevant for statuses returned from the notifications endpoint, which are not nested Remove "Disable notifications" from detailed status view, since it's only relevant in the notifications column * Up max class length * Remove pending test for conversation mute * Add tests, clean up * Rename to "mute conversation" and "unmute conversation" * Raise validation error when trying to mute/unmute status without conversation * Adding account domain blocks that filter notifications and public timelines * Add tests for domain blocks in notifications, public timelines Filter reblogs of blocked domains from home * Add API for listing and creating account domain blocks * API for creating/deleting domain blocks, tests for Status#ancestors and Status#descendants, filter domain blocks from them * Filter domains in streaming API * Update account_domain_block_spec.rb
2017-05-18 23:14:30 +00:00
def excluded_from_timeline_domains
Rails.cache.fetch("exclude_domains_for:#{id}") { domain_blocks.pluck(:domain) }
end
def preferred_inbox_url
shared_inbox_url.presence || inbox_url
end
Add follower synchronization mechanism (#14510) * Add support for followers synchronization on the receiving end Check the `collectionSynchronization` attribute on `Create` and `Announce` activities and synchronize followers from provided collection if possible. * Add tests for followers synchronization on the receiving end * Add support for follower synchronization on the sender's end * Add tests for the sending end * Switch from AS attributes to HTTP header Replace the custom `collectionSynchronization` ActivityStreams attribute by an HTTP header (`X-AS-Collection-Synchronization`) with the same syntax as the `Signature` header and the following fields: - `collectionId` to specify which collection to synchronize - `digest` for the SHA256 hex-digest of the list of followers known on the receiving instance (where “receiving instance” is determined by accounts sharing the same host name for their ActivityPub actor `id`) - `url` of a collection that should be fetched by the instance actor Internally, move away from the webfinger-based `domain` attribute and use account `uri` prefix to group accounts. * Add environment variable to disable followers synchronization Since the whole mechanism relies on some new preconditions that, in some extremely rare cases, might not be met, add an environment variable (DISABLE_FOLLOWERS_SYNCHRONIZATION) to disable the mechanism altogether and avoid followers being incorrectly removed. The current conditions are: 1. all managed accounts' actor `id` and inbox URL have the same URI scheme and netloc. 2. all accounts whose actor `id` or inbox URL share the same URI scheme and netloc as a managed account must be managed by the same Mastodon instance as well. As far as Mastodon is concerned, breaking those preconditions require extensive configuration changes in the reverse proxy and might also cause other issues. Therefore, this environment variable provides a way out for people with highly unusual configurations, and can be safely ignored for the overwhelming majority of Mastodon administrators. * Only set follower synchronization header on non-public statuses This is to avoid unnecessary computations and allow Follow-related activities to be handled by the usual codepath instead of going through the synchronization mechanism (otherwise, any Follow/Undo/Accept activity would trigger the synchronization mechanism even if processing the activity itself would be enough to re-introduce synchronization) * Change how ActivityPub::SynchronizeFollowersService handles follow requests If the remote lists a local follower which we only know has sent a follow request, consider the follow request as accepted instead of sending an Undo. * Integrate review feeback - rename X-AS-Collection-Synchronization to Collection-Synchronization - various minor refactoring and code style changes * Only select required fields when computing followers_hash * Use actor URI rather than webfinger domain in synchronization endpoint * Change hash computation to be a XOR of individual hashes Makes it much easier to be memory-efficient, and avoid sorting discrepancy issues. * Marginally improve followers_hash computation speed * Further improve hash computation performances by using pluck_each
2020-10-21 16:04:09 +00:00
def synchronization_uri_prefix
return 'local' if local?
@synchronization_uri_prefix ||= "#{uri[URL_PREFIX_RE]}/"
Add follower synchronization mechanism (#14510) * Add support for followers synchronization on the receiving end Check the `collectionSynchronization` attribute on `Create` and `Announce` activities and synchronize followers from provided collection if possible. * Add tests for followers synchronization on the receiving end * Add support for follower synchronization on the sender's end * Add tests for the sending end * Switch from AS attributes to HTTP header Replace the custom `collectionSynchronization` ActivityStreams attribute by an HTTP header (`X-AS-Collection-Synchronization`) with the same syntax as the `Signature` header and the following fields: - `collectionId` to specify which collection to synchronize - `digest` for the SHA256 hex-digest of the list of followers known on the receiving instance (where “receiving instance” is determined by accounts sharing the same host name for their ActivityPub actor `id`) - `url` of a collection that should be fetched by the instance actor Internally, move away from the webfinger-based `domain` attribute and use account `uri` prefix to group accounts. * Add environment variable to disable followers synchronization Since the whole mechanism relies on some new preconditions that, in some extremely rare cases, might not be met, add an environment variable (DISABLE_FOLLOWERS_SYNCHRONIZATION) to disable the mechanism altogether and avoid followers being incorrectly removed. The current conditions are: 1. all managed accounts' actor `id` and inbox URL have the same URI scheme and netloc. 2. all accounts whose actor `id` or inbox URL share the same URI scheme and netloc as a managed account must be managed by the same Mastodon instance as well. As far as Mastodon is concerned, breaking those preconditions require extensive configuration changes in the reverse proxy and might also cause other issues. Therefore, this environment variable provides a way out for people with highly unusual configurations, and can be safely ignored for the overwhelming majority of Mastodon administrators. * Only set follower synchronization header on non-public statuses This is to avoid unnecessary computations and allow Follow-related activities to be handled by the usual codepath instead of going through the synchronization mechanism (otherwise, any Follow/Undo/Accept activity would trigger the synchronization mechanism even if processing the activity itself would be enough to re-introduce synchronization) * Change how ActivityPub::SynchronizeFollowersService handles follow requests If the remote lists a local follower which we only know has sent a follow request, consider the follow request as accepted instead of sending an Undo. * Integrate review feeback - rename X-AS-Collection-Synchronization to Collection-Synchronization - various minor refactoring and code style changes * Only select required fields when computing followers_hash * Use actor URI rather than webfinger domain in synchronization endpoint * Change hash computation to be a XOR of individual hashes Makes it much easier to be memory-efficient, and avoid sorting discrepancy issues. * Marginally improve followers_hash computation speed * Further improve hash computation performances by using pluck_each
2020-10-21 16:04:09 +00:00
end
class << self
def readonly_attributes
super - %w(statuses_count following_count followers_count)
end
def inboxes
urls = reorder(nil).activitypub.group(:preferred_inbox_url).pluck(Arel.sql("coalesce(nullif(accounts.shared_inbox_url, ''), accounts.inbox_url) AS preferred_inbox_url"))
DeliveryFailureTracker.without_unavailable(urls)
end
def coalesced_activity_timestamps
Arel.sql(
<<~SQL.squish
COALESCE(users.current_sign_in_at, account_stats.last_status_at, to_timestamp(0))
SQL
)
end
def from_text(text)
return [] if text.blank?
text.scan(MENTION_RE).map { |match| match.first.split('@', 2) }.uniq.filter_map do |(username, domain)|
domain = if TagManager.instance.local_domain?(domain)
nil
else
TagManager.instance.normalize_domain(domain)
end
EntityCache.instance.mention(username, domain)
end
end
def inverse_alias(key, original_key)
define_method(:"#{key}=") do |value|
public_send(:"#{original_key}=", !ActiveModel::Type::Boolean.new.cast(value))
end
define_method(key) do
!public_send(original_key)
end
end
end
inverse_alias :show_collections, :hide_collections
inverse_alias :unlocked, :locked
def emojis
@emojis ||= CustomEmoji.from_text(emojifiable_text, domain)
end
before_validation :prepare_contents, if: :local?
before_create :generate_keys
before_destroy :clean_feed_manager
def ensure_keys!
return unless local? && private_key.blank? && public_key.blank?
generate_keys
save!
end
private
def prepare_contents
display_name&.strip!
note&.strip!
end
def generate_keys
return unless local? && private_key.blank? && public_key.blank?
keypair = OpenSSL::PKey::RSA.new(2048)
self.private_key = keypair.to_pem
self.public_key = keypair.public_key.to_pem
end
def normalize_domain
return if local?
super
end
def emojifiable_text
[note, display_name, fields.map(&:name), fields.map(&:value)].join(' ')
end
def clean_feed_manager
FeedManager.instance.clean_feeds!(:home, [id])
end
def create_canonical_email_block!
return unless local? && user_email.present?
begin
CanonicalEmailBlock.create(reference_account: self, email: user_email)
rescue ActiveRecord::RecordNotUnique
# A canonical e-mail block may already exist for the same e-mail
end
end
def destroy_canonical_email_block!
return unless local?
CanonicalEmailBlock.where(reference_account: self).delete_all
end
# NOTE: the `account.created` webhook is triggered by the `User` model, not `Account`.
def trigger_update_webhooks
TriggerWebhookWorker.perform_async('account.updated', 'Account', id) if local?
end
2016-02-20 21:53:20 +00:00
end