Merge pull request #2797 from ClearlyClaire/glitch-soc/merge-upstream

Merge upstream changes up to 887e64efd4
pull/2798/head
Claire 2024-07-25 19:06:12 +02:00 committed by GitHub
commit 8af71d021b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 79 additions and 39 deletions

View File

@ -23,7 +23,6 @@ class ApplicationController < ActionController::Base
helper_method :current_theme helper_method :current_theme
helper_method :single_user_mode? helper_method :single_user_mode?
helper_method :use_seamless_external_login? helper_method :use_seamless_external_login?
helper_method :omniauth_only?
helper_method :sso_account_settings helper_method :sso_account_settings
helper_method :limited_federation_mode? helper_method :limited_federation_mode?
helper_method :body_class_string helper_method :body_class_string
@ -140,10 +139,6 @@ class ApplicationController < ActionController::Base
Devise.pam_authentication || Devise.ldap_authentication Devise.pam_authentication || Devise.ldap_authentication
end end
def omniauth_only?
ENV['OMNIAUTH_ONLY'] == 'true'
end
def sso_account_settings def sso_account_settings
ENV.fetch('SSO_ACCOUNT_SETTINGS', nil) ENV.fetch('SSO_ACCOUNT_SETTINGS', nil)
end end

View File

@ -101,7 +101,7 @@ class LinkDetailsExtractor
end end
def json def json
@json ||= root_array(Oj.load(@data)).find { |obj| SUPPORTED_TYPES.include?(obj['@type']) } || {} @json ||= root_array(Oj.load(@data)).compact.find { |obj| SUPPORTED_TYPES.include?(obj['@type']) } || {}
end end
end end

View File

@ -68,7 +68,7 @@ class Account < ApplicationRecord
INSTANCE_ACTOR_ID = -99 INSTANCE_ACTOR_ID = -99
USERNAME_RE = /[a-z0-9_]+([a-z0-9_.-]+[a-z0-9_]+)?/i USERNAME_RE = /[a-z0-9_]+([a-z0-9_.-]+[a-z0-9_]+)?/i
MENTION_RE = %r{(?<![=/[:word:]])@((#{USERNAME_RE})(?:@[[:word:].-]+[[:word:]]+)?)}i MENTION_RE = %r{(?<![=/[:word:]])@((#{USERNAME_RE})(?:@[[:word:].-]+[[:word:]]+)?)}
URL_PREFIX_RE = %r{\Ahttp(s?)://[^/]+} URL_PREFIX_RE = %r{\Ahttp(s?)://[^/]+}
USERNAME_ONLY_RE = /\A#{USERNAME_RE}\z/i USERNAME_ONLY_RE = /\A#{USERNAME_RE}\z/i
USERNAME_LENGTH_LIMIT = 30 USERNAME_LENGTH_LIMIT = 30

View File

@ -37,7 +37,7 @@ class Tag < ApplicationRecord
HASHTAG_LAST_SEQUENCE = '([[:word:]_]*[[:alpha:]][[:word:]_]*)' HASHTAG_LAST_SEQUENCE = '([[:word:]_]*[[:alpha:]][[:word:]_]*)'
HASHTAG_NAME_PAT = "#{HASHTAG_FIRST_SEQUENCE}|#{HASHTAG_LAST_SEQUENCE}" HASHTAG_NAME_PAT = "#{HASHTAG_FIRST_SEQUENCE}|#{HASHTAG_LAST_SEQUENCE}"
HASHTAG_RE = %r{(?<![=/)\p{Alnum}])#(#{HASHTAG_NAME_PAT})}i HASHTAG_RE = %r{(?<![=/)\p{Alnum}])#(#{HASHTAG_NAME_PAT})}
HASHTAG_NAME_RE = /\A(#{HASHTAG_NAME_PAT})\z/i HASHTAG_NAME_RE = /\A(#{HASHTAG_NAME_PAT})\z/i
HASHTAG_INVALID_CHARS_RE = /[^[:alnum:]\u0E47-\u0E4E#{HASHTAG_SEPARATORS}]/ HASHTAG_INVALID_CHARS_RE = /[^[:alnum:]\u0E47-\u0E4E#{HASHTAG_SEPARATORS}]/

View File

@ -117,6 +117,7 @@ class User < ApplicationRecord
scope :pending, -> { where(approved: false) } scope :pending, -> { where(approved: false) }
scope :approved, -> { where(approved: true) } scope :approved, -> { where(approved: true) }
scope :confirmed, -> { where.not(confirmed_at: nil) } scope :confirmed, -> { where.not(confirmed_at: nil) }
scope :unconfirmed, -> { where(confirmed_at: nil) }
scope :enabled, -> { where(disabled: false) } scope :enabled, -> { where(disabled: false) }
scope :disabled, -> { where(disabled: true) } scope :disabled, -> { where(disabled: true) }
scope :active, -> { confirmed.signed_in_recently.account_not_suspended } scope :active, -> { confirmed.signed_in_recently.account_not_suspended }

View File

@ -16,7 +16,7 @@ class Scheduler::UserCleanupScheduler
private private
def clean_unconfirmed_accounts! def clean_unconfirmed_accounts!
User.where('confirmed_at is NULL AND confirmation_sent_at <= ?', UNCONFIRMED_ACCOUNTS_MAX_AGE_DAYS.days.ago).reorder(nil).find_in_batches do |batch| User.unconfirmed.where(confirmation_sent_at: ..UNCONFIRMED_ACCOUNTS_MAX_AGE_DAYS.days.ago).reorder(nil).find_in_batches do |batch|
# We have to do it separately because of missing database constraints # We have to do it separately because of missing database constraints
AccountModerationNote.where(target_account_id: batch.map(&:account_id)).delete_all AccountModerationNote.where(target_account_id: batch.map(&:account_id)).delete_all
Account.where(id: batch.map(&:account_id)).delete_all Account.where(id: batch.map(&:account_id)).delete_all

View File

@ -9,7 +9,7 @@ module Twitter::TwitterText
class Regex class Regex
REGEXEN[:valid_general_url_path_chars] = /[^\p{White_Space}<>()?]/iou REGEXEN[:valid_general_url_path_chars] = /[^\p{White_Space}<>()?]/iou
REGEXEN[:valid_url_path_ending_chars] = /[^\p{White_Space}()?!*"'「」<>;:=,.$%\[\]~&|@]|(?:#{REGEXEN[:valid_url_balanced_parens]})/iou REGEXEN[:valid_url_path_ending_chars] = /[^\p{White_Space}()?!*"'「」<>;:=,.$%\[\]~&|]|(?:#{REGEXEN[:valid_url_balanced_parens]})/iou
REGEXEN[:valid_url_balanced_parens] = / REGEXEN[:valid_url_balanced_parens] = /
\( \(
(?: (?:

View File

@ -194,8 +194,8 @@ ActiveRecord::Schema[7.1].define(version: 2024_07_13_171909) do
t.integer "avatar_storage_schema_version" t.integer "avatar_storage_schema_version"
t.integer "header_storage_schema_version" t.integer "header_storage_schema_version"
t.string "devices_url" t.string "devices_url"
t.integer "suspension_origin"
t.datetime "sensitized_at", precision: nil t.datetime "sensitized_at", precision: nil
t.integer "suspension_origin"
t.boolean "trendable" t.boolean "trendable"
t.datetime "reviewed_at", precision: nil t.datetime "reviewed_at", precision: nil
t.datetime "requested_review_at", precision: nil t.datetime "requested_review_at", precision: nil
@ -579,12 +579,12 @@ ActiveRecord::Schema[7.1].define(version: 2024_07_13_171909) do
end end
create_table "ip_blocks", force: :cascade do |t| create_table "ip_blocks", force: :cascade do |t|
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "expires_at", precision: nil
t.inet "ip", default: "0.0.0.0", null: false t.inet "ip", default: "0.0.0.0", null: false
t.integer "severity", default: 0, null: false t.integer "severity", default: 0, null: false
t.datetime "expires_at", precision: nil
t.text "comment", default: "", null: false t.text "comment", default: "", null: false
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.index ["ip"], name: "index_ip_blocks_on_ip", unique: true t.index ["ip"], name: "index_ip_blocks_on_ip", unique: true
end end
@ -1424,9 +1424,9 @@ ActiveRecord::Schema[7.1].define(version: 2024_07_13_171909) do
add_index "instances", ["domain"], name: "index_instances_on_domain", unique: true add_index "instances", ["domain"], name: "index_instances_on_domain", unique: true
create_view "user_ips", sql_definition: <<-SQL create_view "user_ips", sql_definition: <<-SQL
SELECT t0.user_id, SELECT user_id,
t0.ip, ip,
max(t0.used_at) AS used_at max(used_at) AS used_at
FROM ( SELECT users.id AS user_id, FROM ( SELECT users.id AS user_id,
users.sign_up_ip AS ip, users.sign_up_ip AS ip,
users.created_at AS used_at users.created_at AS used_at
@ -1443,7 +1443,7 @@ ActiveRecord::Schema[7.1].define(version: 2024_07_13_171909) do
login_activities.created_at login_activities.created_at
FROM login_activities FROM login_activities
WHERE (login_activities.success = true)) t0 WHERE (login_activities.success = true)) t0
GROUP BY t0.user_id, t0.ip; GROUP BY user_id, ip;
SQL SQL
create_view "account_summaries", materialized: true, sql_definition: <<-SQL create_view "account_summaries", materialized: true, sql_definition: <<-SQL
SELECT accounts.id AS account_id, SELECT accounts.id AS account_id,
@ -1464,9 +1464,9 @@ ActiveRecord::Schema[7.1].define(version: 2024_07_13_171909) do
add_index "account_summaries", ["account_id"], name: "index_account_summaries_on_account_id", unique: true add_index "account_summaries", ["account_id"], name: "index_account_summaries_on_account_id", unique: true
create_view "global_follow_recommendations", materialized: true, sql_definition: <<-SQL create_view "global_follow_recommendations", materialized: true, sql_definition: <<-SQL
SELECT t0.account_id, SELECT account_id,
sum(t0.rank) AS rank, sum(rank) AS rank,
array_agg(t0.reason) AS reason array_agg(reason) AS reason
FROM ( SELECT account_summaries.account_id, FROM ( SELECT account_summaries.account_id,
((count(follows.id))::numeric / (1.0 + (count(follows.id))::numeric)) AS rank, ((count(follows.id))::numeric / (1.0 + (count(follows.id))::numeric)) AS rank,
'most_followed'::text AS reason 'most_followed'::text AS reason
@ -1490,8 +1490,8 @@ ActiveRecord::Schema[7.1].define(version: 2024_07_13_171909) do
WHERE (follow_recommendation_suppressions.account_id = statuses.account_id))))) WHERE (follow_recommendation_suppressions.account_id = statuses.account_id)))))
GROUP BY account_summaries.account_id GROUP BY account_summaries.account_id
HAVING (sum((status_stats.reblogs_count + status_stats.favourites_count)) >= (5)::numeric)) t0 HAVING (sum((status_stats.reblogs_count + status_stats.favourites_count)) >= (5)::numeric)) t0
GROUP BY t0.account_id GROUP BY account_id
ORDER BY (sum(t0.rank)) DESC; ORDER BY (sum(rank)) DESC;
SQL SQL
add_index "global_follow_recommendations", ["account_id"], name: "index_global_follow_recommendations_on_account_id", unique: true add_index "global_follow_recommendations", ["account_id"], name: "index_global_follow_recommendations_on_account_id", unique: true

View File

@ -129,6 +129,24 @@ RSpec.describe LinkDetailsExtractor do
include_examples 'structured data' include_examples 'structured data'
end end
context 'with the first tag is null' do
let(:html) { <<~HTML }
<!doctype html>
<html>
<body>
<script type="application/ld+json">
null
</script>
<script type="application/ld+json">
#{ld_json}
</script>
</body>
</html>
HTML
include_examples 'structured data'
end
context 'with preceding block of unsupported LD+JSON' do context 'with preceding block of unsupported LD+JSON' do
let(:html) { <<~HTML } let(:html) { <<~HTML }
<!doctype html> <!doctype html>

View File

@ -224,6 +224,14 @@ RSpec.describe TextFormatter do
end end
end end
context 'when given a URL with trailing @ symbol' do
let(:text) { 'https://gta.fandom.com/wiki/TW@ Content' }
it 'matches the full URL' do
expect(subject).to include 'href="https://gta.fandom.com/wiki/TW@"'
end
end
context 'when given a URL containing unsafe code (XSS attack, visible part)' do context 'when given a URL containing unsafe code (XSS attack, visible part)' do
let(:text) { 'http://example.com/b<del>b</del>' } let(:text) { 'http://example.com/b<del>b</del>' }

View File

@ -711,6 +711,14 @@ RSpec.describe Account do
it 'does not match URL query string' do it 'does not match URL query string' do
expect(subject.match('https://example.com/?x=@alice')).to be_nil expect(subject.match('https://example.com/?x=@alice')).to be_nil
end end
it 'matches usernames immediately following the letter ß' do
expect(subject.match('Hello toß @alice from me')[1]).to eq 'alice'
end
it 'matches usernames containing uppercase characters' do
expect(subject.match('Hello to @aLice@Example.com from me')[1]).to eq 'aLice@Example.com'
end
end end
describe 'validations' do describe 'validations' do

View File

@ -95,6 +95,14 @@ RSpec.describe Tag do
it 'does not match purely-numeric hashtags' do it 'does not match purely-numeric hashtags' do
expect(subject.match('hello #0123456')).to be_nil expect(subject.match('hello #0123456')).to be_nil
end end
it 'matches hashtags immediately following the letter ß' do
expect(subject.match('Hello toß #ruby').to_s).to eq '#ruby'
end
it 'matches hashtags containing uppercase characters' do
expect(subject.match('Hello #rubyOnRails').to_s).to eq '#rubyOnRails'
end
end end
describe '#to_param' do describe '#to_param' do

View File

@ -7,7 +7,7 @@ RSpec::Matchers.define :include_pagination_headers do |links|
end.all? end.all?
end end
failure_message do |header| failure_message do |response|
"expected that #{header} would have the same values as #{links}." "expected that #{response.headers['Link']} would have the same values as #{links}."
end end
end end

View File

@ -12,29 +12,31 @@ describe Scheduler::UserCleanupScheduler do
describe '#perform' do describe '#perform' do
before do before do
# Need to update the already-existing users because their initialization overrides confirmation_sent_at # Update already-existing users because initialization overrides `confirmation_sent_at`
new_unconfirmed_user.update!(confirmed_at: nil, confirmation_sent_at: Time.now.utc) new_unconfirmed_user.update!(confirmed_at: nil, confirmation_sent_at: Time.now.utc)
old_unconfirmed_user.update!(confirmed_at: nil, confirmation_sent_at: 10.days.ago) old_unconfirmed_user.update!(confirmed_at: nil, confirmation_sent_at: 10.days.ago)
confirmed_user.update!(confirmed_at: 1.day.ago) confirmed_user.update!(confirmed_at: 1.day.ago)
end end
it 'deletes the old unconfirmed user, their account, and the moderation note' do it 'deletes the old unconfirmed user and metadata while preserving confirmed user and newer unconfirmed user' do
expect { subject.perform } expect { subject.perform }
.to change { User.exists?(old_unconfirmed_user.id) }.from(true).to(false) .to change { User.exists?(old_unconfirmed_user.id) }
.and change { Account.exists?(old_unconfirmed_user.account_id) }.from(true).to(false) .from(true).to(false)
expect { moderation_note.reload }.to raise_error(ActiveRecord::RecordNotFound) .and change { Account.exists?(old_unconfirmed_user.account_id) }
.from(true).to(false)
expect { moderation_note.reload }
.to raise_error(ActiveRecord::RecordNotFound)
expect_preservation_of(new_unconfirmed_user)
expect_preservation_of(confirmed_user)
end end
it 'does not delete the new unconfirmed user or their account' do private
subject.perform
expect(User.exists?(new_unconfirmed_user.id)).to be true
expect(Account.exists?(new_unconfirmed_user.account_id)).to be true
end
it 'does not delete the confirmed user or their account' do def expect_preservation_of(user)
subject.perform expect(User.exists?(user.id))
expect(User.exists?(confirmed_user.id)).to be true .to be true
expect(Account.exists?(confirmed_user.account_id)).to be true expect(Account.exists?(user.account_id))
.to be true
end end
end end
end end