has_one :notification, as: :activity, dependent: :destroy
validates :account_id, uniqueness: { scope: :target_account_id }
- validates_with FollowLimitValidator, on: :create
+ validates_with FollowLimitValidator, on: :create, if: :rate_limit?
scope :recent, -> { reorder(id: :desc) }
has_one :notification, as: :activity, dependent: :destroy
validates :account_id, uniqueness: { scope: :target_account_id }
- validates_with FollowLimitValidator, on: :create
+ validates_with FollowLimitValidator, on: :create, if: :rate_limit?
def authorize!
account.follow!(target_account, reblogs: show_reblogs, notify: notify, uri: uri)
enum type: [:following, :blocking, :muting, :domain_blocking, :bookmarks]
validates :type, presence: true
+ validates_with ImportValidator, on: :create
has_attached_file :data
validates_attachment_content_type :data, content_type: FILE_TYPES
def import_follows!
parse_import_data!(['Account address'])
- import_relationships!('follow', 'unfollow', @account.following, follow_limit, reblogs: { header: 'Show boosts', default: true })
+ import_relationships!('follow', 'unfollow', @account.following, ROWS_PROCESSING_LIMIT, reblogs: { header: 'Show boosts', default: true })
end
def import_blocks!
head_items = items.uniq { |acct, _| acct.split('@')[1] }
tail_items = items - head_items
+
Import::RelationshipWorker.push_bulk(head_items + tail_items) do |acct, extra|
[@account.id, acct, action, extra]
end
Paperclip.io_adapters.for(@import.data).read
end
- def follow_limit
- FollowLimitValidator.limit_for_account(@account)
- end
-
def relations_map_for_account(account, account_ids)
{
blocking: {},
--- /dev/null
+# frozen_string_literal: true
+
+class ImportValidator < ActiveModel::Validator
+ KNOWN_HEADERS = [
+ 'Account address',
+ '#domain',
+ '#uri',
+ ].freeze
+
+ def validate(import)
+ return if import.type.blank? || import.data.blank?
+
+ # We parse because newlines could be part of individual rows. This
+ # runs on create so we should be reading the local file here before
+ # it is uploaded to object storage or moved anywhere...
+ csv_data = CSV.parse(import.data.queued_for_write[:original].read)
+
+ row_count = csv_data.size
+ row_count -= 1 if KNOWN_HEADERS.include?(csv_data.first&.first)
+
+ import.errors.add(:data, I18n.t('imports.errors.over_rows_processing_limit', count: ImportService::ROWS_PROCESSING_LIMIT)) if row_count > ImportService::ROWS_PROCESSING_LIMIT
+
+ case import.type
+ when 'following'
+ validate_following_import(import, row_count)
+ end
+ end
+
+ private
+
+ def validate_following_import(import, row_count)
+ base_limit = FollowLimitValidator.limit_for_account(import.account)
+
+ limit = begin
+ if import.overwrite?
+ base_limit
+ else
+ base_limit - import.account.following_count
+ end
+ end
+
+ import.errors.add(:data, I18n.t('users.follow_limit_reached', limit: base_limit)) if row_count > limit
+ end
+end
status: Verification status
view_proof: View proof
imports:
+ errors:
+ over_rows_processing_limit: contains more than %{count} rows
modes:
merge: Merge
merge_long: Keep existing records and add new ones
let(:bob) { Fabricate(:account, username: 'bob') }
describe 'validations' do
- subject { Follow.new(account: alice, target_account: bob) }
+ subject { Follow.new(account: alice, target_account: bob, rate_limit: true) }
it 'has a valid fabricator' do
follow = Fabricate.build(:follow)
import = Import.create(account: account, type: type)
expect(import).to model_have_error_on_field(:data)
end
+
+ it 'is invalid with too many rows in data' do
+ import = Import.create(account: account, type: type, data: StringIO.new("foo@bar.com\n" * (ImportService::ROWS_PROCESSING_LIMIT + 10)))
+ expect(import).to model_have_error_on_field(:data)
+ end
+
+ it 'is invalid when there are more rows when following limit' do
+ import = Import.create(account: account, type: type, data: StringIO.new("foo@bar.com\n" * (FollowLimitValidator.limit_for_account(account) + 10)))
+ expect(import).to model_have_error_on_field(:data)
+ end
end
end