Skip to content

Commit e3df972

Browse files
committed
fix: test schema processing again
1 parent 9642f52 commit e3df972

File tree

2 files changed

+98
-32
lines changed

2 files changed

+98
-32
lines changed

lib/hrma/build/document_generator.rb

Lines changed: 77 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -159,58 +159,109 @@ def generate_parallel(xsd_files)
159159
# Receive a file to process and yield it to any worker that asks
160160
file = Ractor.receive
161161
puts "Queue: Received file #{file} for processing"
162+
163+
# Signal which file is being sent to workers
162164
Ractor.yield(file)
163165
end
164166
end
165167

166-
# Create worker Ractors
168+
# Create worker Ractors with improved error handling
167169
workers = ractor_count.times.map do |i|
168170
Ractor.new(pool, @log_dir, Dir.pwd, tools_constants, i) do |pool, log_dir, pwd, tool_paths, id|
169171
# Worker Ractor
170172
loop do
171173
begin
172-
# Take a file from the pool
173-
file = pool.take
174-
puts "Worker #{id}: Processing file #{file}"
175-
176-
# Process the file
177-
result = Hrma::Build::RactorDocumentProcessor.process_single_file(file, log_dir, pwd, tool_paths)
174+
# Take a file from the pool with timeout to prevent deadlocks
175+
file = nil
176+
begin
177+
# Print worker ID to help with debugging
178+
puts "Worker #{id}: Waiting for work..."
179+
file = pool.take
180+
puts "Worker #{id}: Processing file #{file}"
181+
rescue => e
182+
puts "Worker #{id}: Error taking work: #{e.message}"
183+
# Sleep briefly to avoid tight loop if there's an error
184+
sleep 0.1
185+
next
186+
end
187+
188+
# Process the file with timeout protection
189+
result = nil
190+
begin
191+
# Use timeout to prevent hanging on external processes
192+
require 'timeout'
193+
result = Timeout.timeout(300) do # 5 minute timeout
194+
Hrma::Build::RactorDocumentProcessor.process_single_file(file, log_dir, pwd, tool_paths)
195+
end
196+
rescue Timeout::Error
197+
puts "Worker #{id}: Timeout processing file #{file}"
198+
result = [file, false, "Timeout after 300 seconds"]
199+
rescue => e
200+
puts "Worker #{id}: Error processing file #{file}: #{e.message}"
201+
result = [file, false, "Exception: #{e.message}"]
202+
end
178203

179204
# Yield the result
180205
Ractor.yield([file, *result])
181206
rescue Ractor::ClosedError
182207
# Pool is closed, exit the loop
183208
puts "Worker #{id}: Pool closed, exiting"
184209
break
210+
rescue => e
211+
# Catch any other errors to prevent worker from dying
212+
puts "Worker #{id}: Unexpected error: #{e.class} - #{e.message}"
213+
# Don't break the loop, try to continue with next file
185214
end
186215
end
187216
end
188217
end
189218

190-
# Send all files to the pool
219+
# Send all files to the pool with better error handling
191220
xsd_files.each do |file|
192-
puts "Main: Sending file #{file} to queue"
193-
pool.send(file)
221+
begin
222+
puts "Main: Sending file #{file} to queue"
223+
pool.send(file)
224+
rescue => e
225+
puts "Main: Error sending file #{file} to queue: #{e.message}"
226+
mutex.synchronize do
227+
failed_files << "#{file}: Failed to queue - #{e.message}"
228+
end
229+
end
194230
end
195231

196-
# Process results as they come in
197-
xsd_files.size.times do
198-
# Wait for any worker to produce a result
199-
worker, result = Ractor.select(*workers)
200-
201-
# Process the result
202-
file, success, error_message = result
203-
204-
mutex.synchronize do
205-
progressbar.increment
206-
207-
if success
208-
puts "Main: Successfully processed #{file}"
209-
else
210-
failed_files << "#{file}: #{error_message}"
211-
puts "\nError processing #{file}: #{error_message}"
232+
# Process results as they come in with timeout protection
233+
processed_count = 0
234+
begin
235+
# Use timeout for the entire processing to prevent hanging
236+
require 'timeout'
237+
Timeout.timeout(xsd_files.size * 60) do # Allow average 1 minute per file
238+
while processed_count < xsd_files.size
239+
begin
240+
# Wait for any worker to produce a result with a timeout
241+
worker, result = Ractor.select(*workers)
242+
243+
# Process the result
244+
file, success, error_message = result
245+
246+
mutex.synchronize do
247+
processed_count += 1
248+
progressbar.increment
249+
250+
if success
251+
puts "Main: Successfully processed #{file} (#{processed_count}/#{xsd_files.size})"
252+
else
253+
failed_files << "#{file}: #{error_message}"
254+
puts "\nError processing #{file}: #{error_message}"
255+
end
256+
end
257+
rescue => e
258+
puts "Main: Error receiving result: #{e.message}"
259+
# Continue trying to receive results
260+
end
212261
end
213262
end
263+
rescue Timeout::Error
264+
puts "\nTimeout waiting for all files to process. Some files may not have been processed."
214265
end
215266

216267
# Close the pool to signal workers to exit

lib/hrma/build/ractor_document_processor.rb

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -184,13 +184,18 @@ def self.create_output_directory(output_dir)
184184
def self.generate_diagrams(file, pwd, output_dir, xsdvi_path, log = nil)
185185
diagrams_cmd = "java -jar #{xsdvi_path} #{pwd}/#{file} -rootNodeName all -oneNodeOnly -outputPath #{output_dir}/diagrams"
186186

187+
require 'open3'
188+
187189
if log
188190
log.puts "Running: #{diagrams_cmd}"
189-
result = system(diagrams_cmd, out: log, err: log)
191+
stdout_and_stderr_str, status = Open3.capture2e(diagrams_cmd)
192+
log.puts stdout_and_stderr_str
193+
result = status.success?
190194
log.puts "Error generating diagrams for #{file}" unless result
191195
result
192196
else
193-
system(diagrams_cmd)
197+
stdout_and_stderr_str, status = Open3.capture2e(diagrams_cmd)
198+
status.success?
194199
end
195200
end
196201

@@ -204,13 +209,18 @@ def self.generate_diagrams(file, pwd, output_dir, xsdvi_path, log = nil)
204209
def self.generate_merged_xsd(file, temp_file, xsdmerge_path, log = nil)
205210
xsdmerge_cmd = "xsltproc --nonet --stringparam rootxsd #{file} --output #{temp_file} #{xsdmerge_path} #{file}"
206211

212+
require 'open3'
213+
207214
if log
208215
log.puts "Running: #{xsdmerge_cmd}"
209-
result = system(xsdmerge_cmd, out: log, err: log)
216+
stdout_and_stderr_str, status = Open3.capture2e(xsdmerge_cmd)
217+
log.puts stdout_and_stderr_str
218+
result = status.success?
210219
log.puts "Error generating merged XSD for #{file}" unless result
211220
result
212221
else
213-
system(xsdmerge_cmd)
222+
stdout_and_stderr_str, status = Open3.capture2e(xsdmerge_cmd)
223+
status.success?
214224
end
215225
end
216226

@@ -225,13 +235,18 @@ def self.generate_merged_xsd(file, temp_file, xsdmerge_path, log = nil)
225235
def self.generate_final_doc(temp_file, output_file, file_basename, xs3p_path, log = nil)
226236
xs3p_cmd = "xsltproc --nonet --param title \"'Schema Documentation for #{file_basename}'\" --output #{output_file} #{xs3p_path} #{temp_file}"
227237

238+
require 'open3'
239+
228240
if log
229241
log.puts "Running: #{xs3p_cmd}"
230-
result = system(xs3p_cmd, out: log, err: log)
242+
stdout_and_stderr_str, status = Open3.capture2e(xs3p_cmd)
243+
log.puts stdout_and_stderr_str
244+
result = status.success?
231245
log.puts "Error generating documentation" unless result
232246
result
233247
else
234-
system(xs3p_cmd)
248+
stdout_and_stderr_str, status = Open3.capture2e(xs3p_cmd)
249+
status.success?
235250
end
236251
end
237252
end

0 commit comments

Comments
 (0)