module Admin
class DashboardController < BaseController
+ include Redisable
+
def index
@system_checks = Admin::SystemCheck.perform
@time_period = (29.days.ago.to_date...Time.now.utc.to_date)
def redis_info
@redis_info ||= begin
- if Redis.current.is_a?(Redis::Namespace)
- Redis.current.redis.info
+ if redis.is_a?(Redis::Namespace)
+ redis.redis.info
else
- Redis.current.info
+ redis.info
end
end
end
class MediaProxyController < ApplicationController
include RoutingHelper
include Authorization
+ include Redisable
skip_before_action :store_current_location
skip_before_action :require_functional!
end
def lock_options
- { redis: Redis.current, key: "media_download:#{params[:id]}", autorelease: 15.minutes.seconds }
+ { redis: redis, key: "media_download:#{params[:id]}", autorelease: 15.minutes.seconds }
end
def reject_media?
class Settings::ExportsController < Settings::BaseController
include Authorization
+ include Redisable
skip_before_action :require_functional!
end
def lock_options
- { redis: Redis.current, key: "backup:#{current_user.id}" }
+ { redis: redis, key: "backup:#{current_user.id}" }
end
end
extend ActiveSupport::Concern
included do
+ include Redisable
+
after_commit :push_to_streaming_api
end
end
def push_to_streaming_api
- Redis.current.publish("timeline:access_token:#{id}", Oj.dump(event: :kill)) if revoked? || destroyed?
+ redis.publish("timeline:access_token:#{id}", Oj.dump(event: :kill)) if revoked? || destroyed?
end
end
end
def lock_or_fail(key, expire_after = 15.minutes.seconds)
- RedisLock.acquire({ redis: Redis.current, key: key, autorelease: expire_after }) do |lock|
+ RedisLock.acquire({ redis: redis, key: key, autorelease: expire_after }) do |lock|
if lock.acquired?
yield
else
# frozen_string_literal: true
class DeliveryFailureTracker
+ include Redisable
+
FAILURE_DAYS_THRESHOLD = 7
def initialize(url_or_host)
end
def track_failure!
- Redis.current.sadd(exhausted_deliveries_key, today)
+ redis.sadd(exhausted_deliveries_key, today)
UnavailableDomain.create(domain: @host) if reached_failure_threshold?
end
def track_success!
- Redis.current.del(exhausted_deliveries_key)
+ redis.del(exhausted_deliveries_key)
UnavailableDomain.find_by(domain: @host)&.destroy
end
def clear_failures!
- Redis.current.del(exhausted_deliveries_key)
+ redis.del(exhausted_deliveries_key)
end
def days
- Redis.current.scard(exhausted_deliveries_key) || 0
+ redis.scard(exhausted_deliveries_key) || 0
end
def available?
end
def exhausted_deliveries_days
- @exhausted_deliveries_days ||= Redis.current.smembers(exhausted_deliveries_key).sort.map { |date| Date.new(date.slice(0, 4).to_i, date.slice(4, 2).to_i, date.slice(6, 2).to_i) }
+ @exhausted_deliveries_days ||= redis.smembers(exhausted_deliveries_key).sort.map { |date| Date.new(date.slice(0, 4).to_i, date.slice(4, 2).to_i, date.slice(6, 2).to_i) }
end
alias reset! track_success!
class << self
+ include Redisable
+
def without_unavailable(urls)
unavailable_domains_map = Rails.cache.fetch('unavailable_domains') { UnavailableDomain.pluck(:domain).index_with(true) }
end
def warning_domains
- domains = Redis.current.keys(exhausted_deliveries_key_by('*')).map do |key|
+ domains = redis.keys(exhausted_deliveries_key_by('*')).map do |key|
key.delete_prefix(exhausted_deliveries_key_by(''))
end
end
def warning_domains_map
- warning_domains.index_with { |domain| Redis.current.scard(exhausted_deliveries_key_by(domain)) }
+ warning_domains.index_with { |domain| redis.scard(exhausted_deliveries_key_by(domain)) }
end
private
--- /dev/null
+# frozen_string_literal: true
+
+class RedisConfiguration
+ class << self
+ def with
+ pool.with { |redis| yield redis }
+ end
+
+ def pool
+ @pool ||= ConnectionPool.new(size: pool_size) { new.connection }
+ end
+
+ def pool_size
+ if Sidekiq.server?
+ Sidekiq.options[:concurrency]
+ else
+ ENV['MAX_THREADS'] || 5
+ end
+ end
+ end
+
+ def connection
+ if namespace?
+ Redis::Namespace.new(namespace, redis: raw_connection)
+ else
+ raw_connection
+ end
+ end
+
+ def namespace?
+ namespace.present?
+ end
+
+ def namespace
+ ENV.fetch('REDIS_NAMESPACE', nil)
+ end
+
+ def url
+ ENV['REDIS_URL']
+ end
+
+ private
+
+ def raw_connection
+ Redis.new(url: url, driver: :hiredis)
+ end
+end
#
class AccountConversation < ApplicationRecord
+ include Redisable
+
after_commit :push_to_streaming_api
belongs_to :account
end
def subscribed_to_timeline?
- Redis.current.exists?("subscribed:#{streaming_channel}")
+ redis.exists?("subscribed:#{streaming_channel}")
end
def streaming_channel
# frozen_string_literal: true
class AccountSuggestions::GlobalSource < AccountSuggestions::Source
+ include Redisable
+
def key
:global
end
end
def account_ids_for_locale(locale)
- Redis.current.zrevrange("follow_recommendations:#{locale}", 0, -1).map(&:to_i)
+ redis.zrevrange("follow_recommendations:#{locale}", 0, -1).map(&:to_i)
end
def to_ordered_list_key(account)
private
def redis
- Redis.current
+ Thread.current[:redis] ||= RedisConfiguration.new.connection
end
end
).freeze
include Expireable
+ include Redisable
belongs_to :account
def remove_cache
Rails.cache.delete("filters:#{account_id}")
- Redis.current.publish("timeline:#{account_id}", Oj.dump(event: :filters_changed))
+ redis.publish("timeline:#{account_id}", Oj.dump(event: :filters_changed))
end
def context_must_be_valid
self.inheritance_column = nil
include Paginable
+ include Redisable
scope :up_to, ->(id) { where(arel_table[:id].lteq(id)) }
end
def subscribed_to_timeline?
- Redis.current.exists?("subscribed:#{streaming_channel}")
+ redis.exists?("subscribed:#{streaming_channel}")
end
def streaming_channel
# frozen_string_literal: true
class FollowRecommendationFilter
+ include Redisable
+
KEYS = %i(
language
status
if params['status'] == 'suppressed'
Account.joins(:follow_recommendation_suppression).order(FollowRecommendationSuppression.arel_table[:id].desc).to_a
else
- account_ids = Redis.current.zrevrange("follow_recommendations:#{@language}", 0, -1).map(&:to_i)
+ account_ids = redis.zrevrange("follow_recommendations:#{@language}", 0, -1).map(&:to_i)
accounts = Account.where(id: account_ids).index_by(&:id)
account_ids.map { |id| accounts[id] }.compact
include Settings::Extend
include UserRoles
+ include Redisable
# The home and list feeds will be stored in Redis for this amount
# of time, and status fan-out to followers will include only people
end
def regenerate_feed!
- RegenerationWorker.perform_async(account_id) if Redis.current.set("account:#{account_id}:regeneration", true, nx: true, ex: 1.day.seconds)
+ RegenerationWorker.perform_async(account_id) if redis.set("account:#{account_id}:regeneration", true, nx: true, ex: 1.day.seconds)
end
def needs_feed_update?
class ActivityPub::ProcessAccountService < BaseService
include JsonLdHelper
include DomainControlHelper
+ include Redisable
# Should be called with confirmed valid JSON
# and WebFinger-resolved username and domain
end
def lock_options
- { redis: Redis.current, key: "process_account:#{@uri}", autorelease: 15.minutes.seconds }
+ { redis: redis, key: "process_account:#{@uri}", autorelease: 15.minutes.seconds }
end
def process_tags
class ActivityPub::ProcessStatusUpdateService < BaseService
include JsonLdHelper
+ include Redisable
def call(status, json)
raise ArgumentError, 'Status has unsaved changes' if status.changed?
end
def lock_options
- { redis: Redis.current, key: "create:#{@uri}", autorelease: 15.minutes.seconds }
+ { redis: redis, key: "create:#{@uri}", autorelease: 15.minutes.seconds }
end
def record_previous_edit!
# frozen_string_literal: true
class FanOutOnWriteService < BaseService
+ include Redisable
+
# Push a status into home and mentions feeds
# @param [Status] status
# @param [Hash] options
def broadcast_to_hashtag_streams!
@status.tags.pluck(:name).each do |hashtag|
- Redis.current.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", anonymous_payload)
- Redis.current.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", anonymous_payload) if @status.local?
+ redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", anonymous_payload)
+ redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", anonymous_payload) if @status.local?
end
end
def broadcast_to_public_streams!
return if @status.reply? && @status.in_reply_to_account_id != @account.id && !Setting.show_replies_in_public_timelines
- Redis.current.publish('timeline:public', anonymous_payload)
- Redis.current.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', anonymous_payload)
+ redis.publish('timeline:public', anonymous_payload)
+ redis.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', anonymous_payload)
if @status.with_media?
- Redis.current.publish('timeline:public:media', anonymous_payload)
- Redis.current.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', anonymous_payload)
+ redis.publish('timeline:public:media', anonymous_payload)
+ redis.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', anonymous_payload)
end
end
# frozen_string_literal: true
class FetchLinkCardService < BaseService
+ include Redisable
+
URL_PATTERN = %r{
(#{Twitter::TwitterText::Regex[:valid_url_preceding_chars]}) # $1 preceding chars
( # $2 URL
end
def lock_options
- { redis: Redis.current, key: "fetch:#{@original_url}", autorelease: 15.minutes.seconds }
+ { redis: redis, key: "fetch:#{@original_url}", autorelease: 15.minutes.seconds }
end
end
# frozen_string_literal: true
class PrecomputeFeedService < BaseService
+ include Redisable
+
def call(account)
FeedManager.instance.populate_home(account)
FeedManager.instance.populate_direct_feed(account)
ensure
- Redis.current.del("account:#{account.id}:regeneration")
+ redis.del("account:#{account.id}:regeneration")
end
end
end
def lock_options
- { redis: Redis.current, key: "distribute:#{@status.id}", autorelease: 5.minutes.seconds }
+ { redis: redis, key: "distribute:#{@status.id}", autorelease: 5.minutes.seconds }
end
end
include JsonLdHelper
include DomainControlHelper
include WebfingerHelper
+ include Redisable
# Find or create an account record for a remote user. When creating,
# look up the user's webfinger and fetch ActivityPub data
end
def lock_options
- { redis: Redis.current, key: "resolve:#{@username}@#{@domain}", autorelease: 15.minutes.seconds }
+ { redis: redis, key: "resolve:#{@username}@#{@domain}", autorelease: 15.minutes.seconds }
end
end
class VoteService < BaseService
include Authorization
include Payloadable
+ include Redisable
def call(account, poll, choices)
authorize_with account, poll, :vote?
end
def lock_options
- { redis: Redis.current, key: "vote:#{@poll.id}:#{@account.id}" }
+ { redis: redis, key: "vote:#{@poll.id}:#{@account.id}" }
end
end
class DistributionWorker
include Sidekiq::Worker
+ include Redisable
def perform(status_id, options = {})
- RedisLock.acquire(redis: Redis.current, key: "distribute:#{status_id}", autorelease: 5.minutes.seconds) do |lock|
+ RedisLock.acquire(redis: redis, key: "distribute:#{status_id}", autorelease: 5.minutes.seconds) do |lock|
if lock.acquired?
FanOutOnWriteService.new.call(Status.find(status_id), **options.symbolize_keys)
else
class MergeWorker
include Sidekiq::Worker
+ include Redisable
def perform(from_account_id, into_account_id)
FeedManager.instance.merge_into_home(Account.find(from_account_id), Account.find(into_account_id))
rescue ActiveRecord::RecordNotFound
true
ensure
- Redis.current.del("account:#{into_account_id}:regeneration")
+ redis.del("account:#{into_account_id}:regeneration")
end
end
class Scheduler::AccountsStatusesCleanupScheduler
include Sidekiq::Worker
+ include Redisable
# This limit is mostly to be nice to the fediverse at large and not
# generate too much traffic.
end
def last_processed_id
- Redis.current.get('account_statuses_cleanup_scheduler:last_account_id')
+ redis.get('account_statuses_cleanup_scheduler:last_account_id')
end
def save_last_processed_id(id)
if id.nil?
- Redis.current.del('account_statuses_cleanup_scheduler:last_account_id')
+ redis.del('account_statuses_cleanup_scheduler:last_account_id')
else
- Redis.current.set('account_statuses_cleanup_scheduler:last_account_id', id, ex: 1.hour.seconds)
+ redis.set('account_statuses_cleanup_scheduler:last_account_id', id, ex: 1.hour.seconds)
end
end
end
require_relative '../lib/terrapin/multi_pipe_extensions'
require_relative '../lib/mastodon/snowflake'
require_relative '../lib/mastodon/version'
+require_relative '../lib/mastodon/rack_middleware'
require_relative '../lib/devise/two_factor_ldap_authenticatable'
require_relative '../lib/devise/two_factor_pam_authenticatable'
require_relative '../lib/chewy/strategy/custom_sidekiq'
config.middleware.use Rack::Attack
config.middleware.use Rack::Deflater
+ config.middleware.use Mastodon::RackMiddleware
config.to_prepare do
Doorkeeper::AuthorizationsController.layout 'modal'
+++ /dev/null
-# frozen_string_literal: true
-
-redis_connection = Redis.new(
- url: ENV['REDIS_URL'],
- driver: :hiredis
-)
-
-namespace = ENV.fetch('REDIS_NAMESPACE') { nil }
-
-if namespace
- Redis.current = Redis::Namespace.new(namespace, redis: redis_connection)
-else
- Redis.current = redis_connection
-end
# frozen_string_literal: true
-require_relative '../../lib/sidekiq_error_handler'
+require_relative '../../lib/mastodon/sidekiq_middleware'
Sidekiq.configure_server do |config|
config.redis = REDIS_SIDEKIQ_PARAMS
config.server_middleware do |chain|
- chain.add SidekiqErrorHandler
+ chain.add Mastodon::SidekiqMiddleware
end
config.server_middleware do |chain|
module Mastodon
class FeedsCLI < Thor
include CLIHelper
+ include Redisable
def self.exit_on_failure?
true
desc 'clear', 'Remove all home and list feeds from Redis'
def clear
- keys = Redis.current.keys('feed:*')
+ keys = redis.keys('feed:*')
- Redis.current.pipelined do
- keys.each { |key| Redis.current.del(key) }
+ redis.pipelined do
+ keys.each { |key| redis.del(key) }
end
say('OK', :green)
--- /dev/null
+# frozen_string_literal: true
+
+class Mastodon::RackMiddleware
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ @app.call(env)
+ ensure
+ clean_up_sockets!
+ end
+
+ private
+
+ def clean_up_sockets!
+ clean_up_redis_socket!
+ clean_up_statsd_socket!
+ end
+
+ def clean_up_redis_socket!
+ Thread.current[:redis]&.close
+ Thread.current[:redis] = nil
+ end
+
+ def clean_up_statsd_socket!
+ Thread.current[:statsd_socket]&.close
+ Thread.current[:statsd_socket] = nil
+ end
+end
port = ENV.fetch(prefix + 'REDIS_PORT') { 6379 if defaults }
db = ENV.fetch(prefix + 'REDIS_DB') { 0 if defaults }
- ENV[prefix + 'REDIS_URL'] = if [password, host, port, db].all?(&:nil?)
- ENV['REDIS_URL']
- else
- Addressable::URI.parse("redis://#{host}:#{port}/#{db}").tap do |uri|
- uri.password = password if password.present?
- end.normalize.to_str
- end
+ ENV[prefix + 'REDIS_URL'] = begin
+ if [password, host, port, db].all?(&:nil?)
+ ENV['REDIS_URL']
+ else
+ Addressable::URI.parse("redis://#{host}:#{port}/#{db}").tap do |uri|
+ uri.password = password if password.present?
+ end.normalize.to_str
+ end
+ end
end
setup_redis_env_url
url: ENV['CACHE_REDIS_URL'],
expires_in: 10.minutes,
namespace: cache_namespace,
+ pool_size: Sidekiq.server? ? Sidekiq.options[:concurrency] : Integer(ENV['MAX_THREADS'] || 5),
+ pool_timeout: 5,
}.freeze
REDIS_SIDEKIQ_PARAMS = {
url: ENV['SIDEKIQ_REDIS_URL'],
namespace: sidekiq_namespace,
}.freeze
+
+if Rails.env.test?
+ ENV['REDIS_NAMESPACE'] = "mastodon_test#{ENV['TEST_ENV_NUMBER']}"
+end
# frozen_string_literal: true
-class SidekiqErrorHandler
+class Mastodon::SidekiqMiddleware
BACKTRACE_LIMIT = 3
def call(*)
rescue => e
limit_backtrace_and_raise(e)
ensure
- socket = Thread.current[:statsd_socket]
- socket&.close
- Thread.current[:statsd_socket] = nil
+ clean_up_sockets!
end
private
exception.set_backtrace(exception.backtrace.first(BACKTRACE_LIMIT))
raise exception
end
+
+ def clean_up_sockets!
+ clean_up_redis_socket!
+ clean_up_statsd_socket!
+ end
+
+ def clean_up_redis_socket!
+ Thread.current[:redis]&.close
+ Thread.current[:redis] = nil
+ end
+
+ def clean_up_statsd_socket!
+ Thread.current[:statsd_socket]&.close
+ Thread.current[:statsd_socket] = nil
+ end
end
get :show
expect_updated_sign_in_at(user)
- expect(Redis.current.get("account:#{user.account_id}:regeneration")).to eq 'true'
+ expect(redis.get("account:#{user.account_id}:regeneration")).to eq 'true'
expect(RegenerationWorker).to have_received(:perform_async)
end
it 'sets the regeneration marker to expire' do
allow(RegenerationWorker).to receive(:perform_async)
get :show
- expect(Redis.current.ttl("account:#{user.account_id}:regeneration")).to be >= 0
+ expect(redis.ttl("account:#{user.account_id}:regeneration")).to be >= 0
end
it 'regenerates feed when sign in is older than two weeks' do
get :show
expect_updated_sign_in_at(user)
- expect(Redis.current.zcard(FeedManager.instance.key(:home, user.account_id))).to eq 3
- expect(Redis.current.get("account:#{user.account_id}:regeneration")).to be_nil
+ expect(redis.zcard(FeedManager.instance.key(:home, user.account_id))).to eq 3
+ expect(redis.get("account:#{user.account_id}:regeneration")).to be_nil
end
end
context 'when a Move has been recently processed' do
around do |example|
- Redis.current.set("move_in_progress:#{old_account.id}", true, nx: true, ex: 7.days.seconds)
+ redis.set("move_in_progress:#{old_account.id}", true, nx: true, ex: 7.days.seconds)
example.run
- Redis.current.del("move_in_progress:#{old_account.id}")
+ redis.del("move_in_progress:#{old_account.id}")
end
it 'does not set moved account on old account' do
describe '#track_failure!' do
it 'marks URL as unavailable after 7 days of being called' do
- 6.times { |i| Redis.current.sadd('exhausted_deliveries:example.com', i) }
+ 6.times { |i| redis.sadd('exhausted_deliveries:example.com', i) }
subject.track_failure!
expect(subject.days).to eq 7
account = Fabricate(:account)
status = Fabricate(:status)
members = FeedManager::MAX_ITEMS.times.map { |count| [count, count] }
- Redis.current.zadd("feed:home:#{account.id}", members)
+ redis.zadd("feed:home:#{account.id}", members)
FeedManager.instance.push_to_home(account, status)
- expect(Redis.current.zcard("feed:home:#{account.id}")).to eq FeedManager::MAX_ITEMS
+ expect(redis.zcard("feed:home:#{account.id}")).to eq FeedManager::MAX_ITEMS
end
context 'reblogs' do
FeedManager.instance.merge_into_home(account, reblog.account)
- expect(Redis.current.zscore("feed:home:0", reblog.id)).to eq nil
+ expect(redis.zscore("feed:home:0", reblog.id)).to eq nil
end
end
FeedManager.instance.push_to_home(receiver, status)
# The reblogging status should show up under normal conditions.
- expect(Redis.current.zrange("feed:home:#{receiver.id}", 0, -1)).to include(status.id.to_s)
+ expect(redis.zrange("feed:home:#{receiver.id}", 0, -1)).to include(status.id.to_s)
FeedManager.instance.unpush_from_home(receiver, status)
# Restore original status
- expect(Redis.current.zrange("feed:home:#{receiver.id}", 0, -1)).to_not include(status.id.to_s)
- expect(Redis.current.zrange("feed:home:#{receiver.id}", 0, -1)).to include(reblogged.id.to_s)
+ expect(redis.zrange("feed:home:#{receiver.id}", 0, -1)).to_not include(status.id.to_s)
+ expect(redis.zrange("feed:home:#{receiver.id}", 0, -1)).to include(reblogged.id.to_s)
end
it 'removes a reblogged status if it was only reblogged once' do
FeedManager.instance.push_to_home(receiver, status)
# The reblogging status should show up under normal conditions.
- expect(Redis.current.zrange("feed:home:#{receiver.id}", 0, -1)).to eq [status.id.to_s]
+ expect(redis.zrange("feed:home:#{receiver.id}", 0, -1)).to eq [status.id.to_s]
FeedManager.instance.unpush_from_home(receiver, status)
- expect(Redis.current.zrange("feed:home:#{receiver.id}", 0, -1)).to be_empty
+ expect(redis.zrange("feed:home:#{receiver.id}", 0, -1)).to be_empty
end
it 'leaves a multiply-reblogged status if another reblog was in feed' do
end
# The reblogging status should show up under normal conditions.
- expect(Redis.current.zrange("feed:home:#{receiver.id}", 0, -1)).to eq [reblogs.first.id.to_s]
+ expect(redis.zrange("feed:home:#{receiver.id}", 0, -1)).to eq [reblogs.first.id.to_s]
reblogs[0...-1].each do |reblog|
FeedManager.instance.unpush_from_home(receiver, reblog)
end
- expect(Redis.current.zrange("feed:home:#{receiver.id}", 0, -1)).to eq [reblogs.last.id.to_s]
+ expect(redis.zrange("feed:home:#{receiver.id}", 0, -1)).to eq [reblogs.last.id.to_s]
end
it 'sends push updates' do
FeedManager.instance.push_to_home(receiver, status)
- allow(Redis.current).to receive_messages(publish: nil)
+ allow(redis).to receive_messages(publish: nil)
FeedManager.instance.unpush_from_home(receiver, status)
deletion = Oj.dump(event: :delete, payload: status.id.to_s)
- expect(Redis.current).to have_received(:publish).with("timeline:#{receiver.id}", deletion)
+ expect(redis).to have_received(:publish).with("timeline:#{receiver.id}", deletion)
end
end
before do
[status_1, status_3, status_5, status_6, status_7].each do |status|
- Redis.current.zadd("feed:home:#{account.id}", status.id, status.id)
+ redis.zadd("feed:home:#{account.id}", status.id, status.id)
end
end
it 'correctly cleans the home timeline' do
FeedManager.instance.clear_from_home(account, target_account)
- expect(Redis.current.zrange("feed:home:#{account.id}", 0, -1)).to eq [status_1.id.to_s, status_7.id.to_s]
+ expect(redis.zrange("feed:home:#{account.id}", 0, -1)).to eq [status_1.id.to_s, status_7.id.to_s]
end
end
end
context 'when feed is generated' do
before do
- Redis.current.zadd(
+ redis.zadd(
FeedManager.instance.key(:home, account.id),
[[4, 4], [3, 3], [2, 2], [1, 1]]
)
context 'when feed is being generated' do
before do
- Redis.current.set("account:#{account.id}:regeneration", true)
+ redis.set("account:#{account.id}:regeneration", true)
end
it 'returns nothing' do
ActiveRecord::Migration.maintain_test_schema!
WebMock.disable_net_connect!(allow: Chewy.settings[:host])
-Redis.current = Redis::Namespace.new("mastodon_test#{ENV['TEST_ENV_NUMBER']}", redis: Redis.current)
Sidekiq::Testing.inline!
Sidekiq.logger = nil
config.include Devise::Test::ControllerHelpers, type: :view
config.include Paperclip::Shoulda::Matchers
config.include ActiveSupport::Testing::TimeHelpers
+ config.include Redisable
config.before :each, type: :feature do
https = ENV['LOCAL_HTTPS'] == 'true'
config.after :each do
Rails.cache.clear
- Redis.current.del(Redis.current.keys)
+ redis.del(redis.keys)
end
end
let(:home_timeline_key) { FeedManager.instance.key(:home, account.id) }
before do
- Redis.current.del(home_timeline_key)
+ redis.del(home_timeline_key)
end
it "clears account's statuses" do
FeedManager.instance.push_to_home(account, other_account_reblog)
expect { subject }.to change {
- Redis.current.zrange(home_timeline_key, 0, -1)
+ redis.zrange(home_timeline_key, 0, -1)
}.from([status.id.to_s, other_account_status.id.to_s, other_account_reblog.id.to_s]).to([other_account_status.id.to_s])
end
end
let(:list_timeline_key) { FeedManager.instance.key(:list, list.id) }
before do
- Redis.current.del(list_timeline_key)
+ redis.del(list_timeline_key)
end
it "clears account's statuses" do
FeedManager.instance.push_to_list(list, other_account_reblog)
expect { subject }.to change {
- Redis.current.zrange(list_timeline_key, 0, -1)
+ redis.zrange(list_timeline_key, 0, -1)
}.from([status.id.to_s, other_account_status.id.to_s, other_account_reblog.id.to_s]).to([other_account_status.id.to_s])
end
end
let(:status2) { PostStatusService.new.call(alice, text: 'Another status') }
before do
- allow(Redis.current).to receive_messages(publish: nil)
+ allow(redis).to receive_messages(publish: nil)
stub_request(:post, 'http://example.com/inbox').to_return(status: 200)
end
it 'notifies streaming API of followers' do
- expect(Redis.current).to have_received(:publish).with("timeline:#{jeff.id}", any_args).at_least(:once)
+ expect(redis).to have_received(:publish).with("timeline:#{jeff.id}", any_args).at_least(:once)
end
it 'notifies streaming API of public timeline' do
- expect(Redis.current).to have_received(:publish).with('timeline:public', any_args).at_least(:once)
+ expect(redis).to have_received(:publish).with('timeline:public', any_args).at_least(:once)
end
it 'sends delete activity to followers' do
ProcessMentionsService.new.call(status)
ProcessHashtagsService.new.call(status)
- allow(Redis.current).to receive(:publish)
+ allow(redis).to receive(:publish)
subject.call(status)
end
end
it 'is broadcast to the hashtag stream' do
- expect(Redis.current).to have_received(:publish).with('timeline:hashtag:hoge', anything)
- expect(Redis.current).to have_received(:publish).with('timeline:hashtag:hoge:local', anything)
+ expect(redis).to have_received(:publish).with('timeline:hashtag:hoge', anything)
+ expect(redis).to have_received(:publish).with('timeline:hashtag:hoge:local', anything)
end
it 'is broadcast to the public stream' do
- expect(Redis.current).to have_received(:publish).with('timeline:public', anything)
- expect(Redis.current).to have_received(:publish).with('timeline:public:local', anything)
+ expect(redis).to have_received(:publish).with('timeline:public', anything)
+ expect(redis).to have_received(:publish).with('timeline:public:local', anything)
end
end
end
it 'is not broadcast publicly' do
- expect(Redis.current).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
- expect(Redis.current).to_not have_received(:publish).with('timeline:public', anything)
+ expect(redis).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
+ expect(redis).to_not have_received(:publish).with('timeline:public', anything)
end
end
end
it 'is not broadcast publicly' do
- expect(Redis.current).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
- expect(Redis.current).to_not have_received(:publish).with('timeline:public', anything)
+ expect(redis).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
+ expect(redis).to_not have_received(:publish).with('timeline:public', anything)
end
end
end
it 'is not broadcast publicly' do
- expect(Redis.current).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
- expect(Redis.current).to_not have_received(:publish).with('timeline:public', anything)
+ expect(redis).to_not have_received(:publish).with('timeline:hashtag:hoge', anything)
+ expect(redis).to_not have_received(:publish).with('timeline:public', anything)
end
end
end
let(:home_timeline_key) { FeedManager.instance.key(:home, account.id) }
before do
- Redis.current.del(home_timeline_key)
+ redis.del(home_timeline_key)
end
it "clears account's statuses" do
FeedManager.instance.push_to_home(account, other_account_status)
expect { subject }.to change {
- Redis.current.zrange(home_timeline_key, 0, -1)
+ redis.zrange(home_timeline_key, 0, -1)
}.from([status.id.to_s, other_account_status.id.to_s]).to([other_account_status.id.to_s])
end
end
subject.call(account)
- expect(Redis.current.zscore(FeedManager.instance.key(:home, account.id), status.id)).to be_within(0.1).of(status.id.to_f)
+ expect(redis.zscore(FeedManager.instance.key(:home, account.id), status.id)).to be_within(0.1).of(status.id.to_f)
end
it 'does not raise an error even if it could not find any status' do
subject.call(account)
- expect(Redis.current.zscore(FeedManager.instance.key(:home, account.id), reblog.id)).to eq nil
+ expect(redis.zscore(FeedManager.instance.key(:home, account.id), reblog.id)).to eq nil
end
end
end
let!(:inactive_user) { Fabricate(:user, current_sign_in_at: 22.days.ago) }
it 'clears feeds of inactives' do
- Redis.current.zadd(feed_key_for(inactive_user), 1, 1)
- Redis.current.zadd(feed_key_for(active_user), 1, 1)
- Redis.current.zadd(feed_key_for(inactive_user, 'reblogs'), 2, 2)
- Redis.current.sadd(feed_key_for(inactive_user, 'reblogs:2'), 3)
+ redis.zadd(feed_key_for(inactive_user), 1, 1)
+ redis.zadd(feed_key_for(active_user), 1, 1)
+ redis.zadd(feed_key_for(inactive_user, 'reblogs'), 2, 2)
+ redis.sadd(feed_key_for(inactive_user, 'reblogs:2'), 3)
subject.perform
- expect(Redis.current.zcard(feed_key_for(inactive_user))).to eq 0
- expect(Redis.current.zcard(feed_key_for(active_user))).to eq 1
- expect(Redis.current.exists?(feed_key_for(inactive_user, 'reblogs'))).to be false
- expect(Redis.current.exists?(feed_key_for(inactive_user, 'reblogs:2'))).to be false
+ expect(redis.zcard(feed_key_for(inactive_user))).to eq 0
+ expect(redis.zcard(feed_key_for(active_user))).to eq 1
+ expect(redis.exists?(feed_key_for(inactive_user, 'reblogs'))).to be false
+ expect(redis.exists?(feed_key_for(inactive_user, 'reblogs:2'))).to be false
end
def feed_key_for(user, subtype = nil)