mirror of
https://github.com/mastodon/mastodon.git
synced 2024-11-17 20:16:14 +00:00
Change indexing frequency from 5 minutes to 1 minute, add locks to schedulers (#26304)
This commit is contained in:
parent
f55f0ab0c3
commit
a0fad5c8bb
|
@ -4,7 +4,7 @@ class Scheduler::FollowRecommendationsScheduler
|
|||
include Sidekiq::Worker
|
||||
include Redisable
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
# The maximum number of accounts that can be requested in one page from the
|
||||
# API is 80, and the suggestions API does not allow pagination. This number
|
||||
|
|
|
@ -4,7 +4,7 @@ class Scheduler::IndexingScheduler
|
|||
include Sidekiq::Worker
|
||||
include Redisable
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
IMPORT_BATCH_SIZE = 1000
|
||||
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
|
||||
|
@ -16,9 +16,7 @@ class Scheduler::IndexingScheduler
|
|||
with_redis do |redis|
|
||||
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
|
||||
type.import!(ids)
|
||||
redis.pipelined do |pipeline|
|
||||
pipeline.srem("chewy:queue:#{type.name}", ids)
|
||||
end
|
||||
redis.srem("chewy:queue:#{type.name}", ids)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
class Scheduler::InstanceRefreshScheduler
|
||||
include Sidekiq::Worker
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
def perform
|
||||
Instance.refresh
|
||||
|
|
|
@ -6,7 +6,7 @@ class Scheduler::IpCleanupScheduler
|
|||
IP_RETENTION_PERIOD = ENV.fetch('IP_RETENTION_PERIOD', 1.year).to_i.seconds.freeze
|
||||
SESSION_RETENTION_PERIOD = ENV.fetch('SESSION_RETENTION_PERIOD', 1.year).to_i.seconds.freeze
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
def perform
|
||||
clean_ip_columns!
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
class Scheduler::PgheroScheduler
|
||||
include Sidekiq::Worker
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
def perform
|
||||
PgHero.capture_space_stats
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
class Scheduler::ScheduledStatusesScheduler
|
||||
include Sidekiq::Worker
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
def perform
|
||||
publish_scheduled_statuses!
|
||||
|
|
|
@ -16,7 +16,7 @@ class Scheduler::SuspendedUserCleanupScheduler
|
|||
# has the capacity for it.
|
||||
MAX_DELETIONS_PER_JOB = 10
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
def perform
|
||||
return if Sidekiq::Queue.new('pull').size > MAX_PULL_SIZE
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
class Scheduler::UserCleanupScheduler
|
||||
include Sidekiq::Worker
|
||||
|
||||
sidekiq_options retry: 0
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
def perform
|
||||
clean_unconfirmed_accounts!
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
class Scheduler::VacuumScheduler
|
||||
include Sidekiq::Worker
|
||||
|
||||
sidekiq_options retry: 0, lock: :until_executed
|
||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
|
||||
|
||||
def perform
|
||||
vacuum_operations.each do |operation|
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
class: Scheduler::Trends::ReviewNotificationsScheduler
|
||||
queue: scheduler
|
||||
indexing_scheduler:
|
||||
every: '5m'
|
||||
interval: 1 minute
|
||||
class: Scheduler::IndexingScheduler
|
||||
queue: scheduler
|
||||
vacuum_scheduler:
|
||||
|
|
Loading…
Reference in a new issue