Merge pull request #756 from openHPI/concurrent_scoring
Concurrent scoring
This commit is contained in:
@ -1,40 +1,39 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
require 'concurrent/future'
|
require 'concurrent/future'
|
||||||
|
|
||||||
module SubmissionScoring
|
module SubmissionScoring
|
||||||
def collect_test_results(submission)
|
def collect_test_results(submission)
|
||||||
# Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: submission.id } do
|
# Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: submission.id } do
|
||||||
submission.collect_files.select(&:teacher_defined_assessment?).map do |file|
|
futures = submission.collect_files.select(&:teacher_defined_assessment?).map do |file|
|
||||||
future = Concurrent::Future.execute do
|
Concurrent::Future.execute do
|
||||||
# Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: submission.id } do
|
# Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: submission.id } do
|
||||||
assessor = Assessor.new(execution_environment: submission.execution_environment)
|
assessor = Assessor.new(execution_environment: submission.execution_environment)
|
||||||
output = execute_test_file(file, submission)
|
output = execute_test_file(file, submission)
|
||||||
assessment = assessor.assess(output)
|
assessment = assessor.assess(output)
|
||||||
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score] > 0))
|
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?)
|
||||||
testrun_output = passed ? nil : 'message: ' + output[:message].to_s + "\n stdout: " + output[:stdout].to_s + "\n stderr: " + output[:stderr].to_s
|
testrun_output = passed ? nil : 'message: ' + output[:message].to_s + "\n stdout: " + output[:stdout].to_s + "\n stderr: " + output[:stderr].to_s
|
||||||
unless testrun_output.blank?
|
unless testrun_output.blank?
|
||||||
submission.exercise.execution_environment.error_templates.each do |template|
|
submission.exercise.execution_environment.error_templates.each do |template|
|
||||||
pattern = Regexp.new(template.signature).freeze
|
pattern = Regexp.new(template.signature).freeze
|
||||||
if pattern.match(testrun_output)
|
StructuredError.create_from_template(template, testrun_output, submission) if pattern.match(testrun_output)
|
||||||
StructuredError.create_from_template(template, testrun_output, submission)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
Testrun.new(
|
Testrun.new(
|
||||||
submission: submission,
|
submission: submission,
|
||||||
cause: 'assess',
|
cause: 'assess', # Required to differ run and assess for RfC show
|
||||||
file: file,
|
file: file, # Test file that was executed
|
||||||
passed: passed,
|
passed: passed,
|
||||||
output: testrun_output,
|
output: testrun_output,
|
||||||
container_execution_time: output[:container_execution_time],
|
container_execution_time: output[:container_execution_time],
|
||||||
waiting_for_container_time: output[:waiting_for_container_time]
|
waiting_for_container_time: output[:waiting_for_container_time]
|
||||||
).save
|
).save
|
||||||
output.merge!(assessment)
|
output.merge!(assessment)
|
||||||
output.merge!(filename: file.name_with_extension, message: feedback_message(file, output), weight: file.weight)
|
output.merge!(filename: file.name_with_extension, message: feedback_message(file, output), weight: file.weight)
|
||||||
# end
|
# end
|
||||||
end
|
end
|
||||||
future.value
|
|
||||||
end
|
end
|
||||||
# end
|
futures.map(&:value)
|
||||||
end
|
end
|
||||||
|
|
||||||
private :collect_test_results
|
private :collect_test_results
|
||||||
@ -61,22 +60,18 @@ module SubmissionScoring
|
|||||||
score = 0.0
|
score = 0.0
|
||||||
unless outputs.nil? || outputs.empty?
|
unless outputs.nil? || outputs.empty?
|
||||||
outputs.each do |output|
|
outputs.each do |output|
|
||||||
unless output.nil?
|
score += output[:score] * output[:weight] unless output.nil?
|
||||||
score += output[:score] * output[:weight]
|
|
||||||
end
|
|
||||||
|
|
||||||
if output.present? && output[:status] == :timeout
|
output[:stderr] += "\n\n#{t('exercises.editor.timeout', permitted_execution_time: submission.exercise.execution_environment.permitted_execution_time.to_s)}" if output.present? && output[:status] == :timeout
|
||||||
output[:stderr] += "\n\n#{t('exercises.editor.timeout', permitted_execution_time: submission.exercise.execution_environment.permitted_execution_time.to_s)}"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
submission.update(score: score)
|
submission.update(score: score)
|
||||||
if submission.normalized_score == 1.0
|
if submission.normalized_score == 1.0
|
||||||
Thread.new do
|
Thread.new do
|
||||||
RequestForComment.where(exercise_id: submission.exercise_id, user_id: submission.user_id, user_type: submission.user_type).each { |rfc|
|
RequestForComment.where(exercise_id: submission.exercise_id, user_id: submission.user_id, user_type: submission.user_type).each do |rfc|
|
||||||
rfc.full_score_reached = true
|
rfc.full_score_reached = true
|
||||||
rfc.save
|
rfc.save
|
||||||
}
|
end
|
||||||
ensure
|
ensure
|
||||||
ActiveRecord::Base.connection_pool.release_connection
|
ActiveRecord::Base.connection_pool.release_connection
|
||||||
end
|
end
|
||||||
|
Reference in New Issue
Block a user