Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions app/models/solid_queue/failed_execution.rb
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,11 @@ def exception_backtrace
end

def determine_backtrace_size_limit
column = self.class.connection.schema_cache.columns_hash(self.class.table_name)["error"]
if column.limit.present?
column = self.class.connection_pool.with_connection do |connection|
connection.schema_cache.columns_hash(self.class.table_name)["error"]
end

if column && column.limit.present?
column.limit - exception_class_name.bytesize - exception_message.bytesize - JSON_OVERHEAD
end
end
Expand Down
18 changes: 13 additions & 5 deletions app/models/solid_queue/record.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,19 @@ class Record < ActiveRecord::Base

connects_to(**SolidQueue.connects_to) if SolidQueue.connects_to

def self.non_blocking_lock
if SolidQueue.use_skip_locked
lock(Arel.sql("FOR UPDATE SKIP LOCKED"))
else
lock
class << self
def non_blocking_lock
if SolidQueue.use_skip_locked
lock(Arel.sql("FOR UPDATE SKIP LOCKED"))
else
lock
end
end

def supports_insert_conflict_target?
connection_pool.with_connection do |connection|
connection.supports_insert_conflict_target?
end
end
end
end
Expand Down
2 changes: 1 addition & 1 deletion app/models/solid_queue/recurring_execution.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ class AlreadyRecorded < StandardError; end

class << self
def create_or_insert!(**attributes)
if connection.supports_insert_conflict_target?
if supports_insert_conflict_target?
# PostgreSQL fails and aborts the current transaction when it hits a duplicate key conflict
# during two concurrent INSERTs for the same value of an unique index. We need to explicitly
# indicate unique_by to ignore duplicate rows by this value when inserting
Expand Down
2 changes: 1 addition & 1 deletion app/models/solid_queue/recurring_task.rb
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def from_configuration(key, **options)
end

def create_or_update_all(tasks)
if connection.supports_insert_conflict_target?
if supports_insert_conflict_target?
# PostgreSQL fails and aborts the current transaction when it hits a duplicate key conflict
# during two concurrent INSERTs for the same value of an unique index. We need to explicitly
# indicate unique_by to ignore duplicate rows by this value when inserting
Expand Down
2 changes: 1 addition & 1 deletion app/models/solid_queue/semaphore.rb
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def signal_all(jobs)

# Requires a unique index on key
def create_unique_by(attributes)
if connection.supports_insert_conflict_target?
if supports_insert_conflict_target?
insert({ **attributes }, unique_by: :key).any?
else
create!(**attributes)
Expand Down