Merge pull request #1076 from openHPI/runner_management
Enable support for runner management
This commit is contained in:
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@ -16,8 +16,6 @@ jobs:
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
co_execenv_python:
|
||||
image: openhpi/co_execenv_python:3.4
|
||||
co_execenv_java:
|
||||
image: openhpi/co_execenv_java:8
|
||||
|
||||
|
@ -43,7 +43,6 @@ rspec:
|
||||
- rake db:schema:load
|
||||
- rake db:migrate
|
||||
- docker login -u "${DOCKERHUB_USER}" -p "${DOCKERHUB_PASS}"
|
||||
- docker pull openhpi/co_execenv_python:3.4
|
||||
- docker pull openhpi/co_execenv_java:8
|
||||
script:
|
||||
- rspec --format progress
|
||||
|
3
Gemfile
3
Gemfile
@ -16,9 +16,11 @@ gem 'highline'
|
||||
gem 'i18n-js'
|
||||
gem 'ims-lti', '< 2.0.0'
|
||||
gem 'jbuilder'
|
||||
gem 'json_schemer'
|
||||
gem 'js-routes'
|
||||
gem 'kramdown'
|
||||
gem 'mimemagic'
|
||||
gem 'net-http-persistent'
|
||||
gem 'nokogiri'
|
||||
gem 'pagedown-bootstrap-rails'
|
||||
gem 'pg'
|
||||
@ -45,7 +47,6 @@ gem 'webpacker'
|
||||
gem 'whenever', require: false
|
||||
|
||||
# Error Tracing
|
||||
gem 'concurrent-ruby'
|
||||
gem 'mnemosyne-ruby'
|
||||
gem 'newrelic_rpm'
|
||||
gem 'sentry-rails'
|
||||
|
17
Gemfile.lock
17
Gemfile.lock
@ -140,6 +140,7 @@ GEM
|
||||
chronic (0.10.2)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.1.9)
|
||||
connection_pool (2.2.5)
|
||||
crack (0.4.5)
|
||||
rexml
|
||||
crass (1.0.6)
|
||||
@ -157,6 +158,8 @@ GEM
|
||||
multi_json
|
||||
domain_name (0.5.20190701)
|
||||
unf (>= 0.0.5, < 1.0.0)
|
||||
ecma-re-validator (0.3.0)
|
||||
regexp_parser (~> 2.0)
|
||||
erubi (1.10.0)
|
||||
eventmachine (1.2.7)
|
||||
excon (0.87.0)
|
||||
@ -194,6 +197,7 @@ GEM
|
||||
haml (5.2.2)
|
||||
temple (>= 0.8.0)
|
||||
tilt
|
||||
hana (1.3.7)
|
||||
hashdiff (1.0.1)
|
||||
headless (2.3.1)
|
||||
highline (2.0.3)
|
||||
@ -222,6 +226,11 @@ GEM
|
||||
js-routes (2.1.1)
|
||||
railties (>= 4)
|
||||
json (2.3.1)
|
||||
json_schemer (0.2.18)
|
||||
ecma-re-validator (~> 0.3)
|
||||
hana (~> 1.3)
|
||||
regexp_parser (~> 2.0)
|
||||
uri_template (~> 0.7)
|
||||
jwt (2.3.0)
|
||||
kaminari (1.2.1)
|
||||
activesupport (>= 4.1.0)
|
||||
@ -271,6 +280,8 @@ GEM
|
||||
multi_xml (0.6.0)
|
||||
multipart-post (2.1.1)
|
||||
nested_form (0.3.2)
|
||||
net-http-persistent (4.0.0)
|
||||
connection_pool (~> 2.2)
|
||||
netrc (0.11.0)
|
||||
newrelic_rpm (8.1.0)
|
||||
nio4r (2.5.8)
|
||||
@ -507,6 +518,7 @@ GEM
|
||||
unf_ext
|
||||
unf_ext (0.0.8)
|
||||
unicode-display_width (2.1.0)
|
||||
uri_template (0.7.0)
|
||||
web-console (4.1.0)
|
||||
actionview (>= 6.0.0)
|
||||
activemodel (>= 6.0.0)
|
||||
@ -547,7 +559,6 @@ DEPENDENCIES
|
||||
capybara
|
||||
carrierwave
|
||||
charlock_holmes
|
||||
concurrent-ruby
|
||||
database_cleaner
|
||||
docker-api
|
||||
eventmachine
|
||||
@ -561,10 +572,12 @@ DEPENDENCIES
|
||||
ims-lti (< 2.0.0)
|
||||
jbuilder
|
||||
js-routes
|
||||
json_schemer
|
||||
kramdown
|
||||
listen
|
||||
mimemagic
|
||||
mnemosyne-ruby
|
||||
net-http-persistent
|
||||
newrelic_rpm
|
||||
nokogiri
|
||||
nyan-cat-formatter
|
||||
@ -611,4 +624,4 @@ DEPENDENCIES
|
||||
whenever
|
||||
|
||||
BUNDLED WITH
|
||||
2.2.23
|
||||
2.2.29
|
||||
|
@ -67,13 +67,31 @@ CodeOceanEditorEvaluation = {
|
||||
},
|
||||
|
||||
printScoringResults: function (response) {
|
||||
response = (Array.isArray(response)) ? response : [response]
|
||||
const test_results = response.filter(function(x) {
|
||||
if (x === undefined || x === null) {
|
||||
return false;
|
||||
}
|
||||
switch (x.file_role) {
|
||||
case 'teacher_defined_test':
|
||||
return true;
|
||||
case 'teacher_defined_linter':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
$('#results ul').first().html('');
|
||||
$('.test-count .number').html(response.filter(function(x) { return x && x.file_role === 'teacher_defined_test'; }).length);
|
||||
$('.test-count .number').html(test_results.length);
|
||||
this.clearOutput();
|
||||
|
||||
_.each(response, function (result, index) {
|
||||
this.printOutput(result, false, index);
|
||||
this.printScoringResult(result, index);
|
||||
_.each(test_results, function (result, index) {
|
||||
// based on https://stackoverflow.com/questions/8511281/check-if-a-value-is-an-object-in-javascript
|
||||
if (result === Object(result)) {
|
||||
this.printOutput(result, false, index);
|
||||
this.printScoringResult(result, index);
|
||||
}
|
||||
}.bind(this));
|
||||
|
||||
if (_.some(response, function (result) {
|
||||
@ -157,7 +175,6 @@ CodeOceanEditorEvaluation = {
|
||||
if (!msg.data || msg.data === "\r") {
|
||||
return;
|
||||
}
|
||||
msg.data = msg.data.replace(/(\r)/gm, "\n");
|
||||
var stream = {};
|
||||
stream[msg.stream] = msg.data;
|
||||
this.printOutput(stream, true, 0);
|
||||
|
@ -2,11 +2,11 @@ CodeOceanEditorWebsocket = {
|
||||
websocket: null,
|
||||
|
||||
createSocketUrl: function(url) {
|
||||
var sockURL = new URL(url, window.location);
|
||||
const sockURL = new URL(url, window.location);
|
||||
// not needed any longer, we put it directly into the url: sockURL.pathname = url;
|
||||
|
||||
// sanitize socket protocol string, strip trailing slash and other malicious chars if they are there
|
||||
sockURL.protocol = '<%= DockerClient.config['ws_client_protocol']&.match(/(\w+):*\/*/)&.to_a&.at(1) %>:';
|
||||
// replace `http` with `ws` for the WebSocket connection. This also works with `https` and `wss`.
|
||||
sockURL.protocol = sockURL.protocol.replace("http", "ws");
|
||||
|
||||
// strip anchor if it is in the url
|
||||
sockURL.hash = '';
|
@ -26,8 +26,7 @@ CodeOceanEditorSubmissions = {
|
||||
cause: $(initiator).data('cause') || $(initiator).prop('id'),
|
||||
exercise_id: $('#editor').data('exercise-id'),
|
||||
files_attributes: (filter || _.identity)(this.collectFiles())
|
||||
},
|
||||
annotations_arr: []
|
||||
}
|
||||
},
|
||||
dataType: 'json',
|
||||
method: 'POST',
|
||||
|
@ -205,82 +205,3 @@ Turtle.prototype.css = function (key, value) {
|
||||
this.canvas.css(key, value);
|
||||
}
|
||||
};
|
||||
|
||||
function run(launchmsg) {
|
||||
var i, turtlescreen, msg, result, cmd;
|
||||
$('#assess').empty();
|
||||
|
||||
turtlescreen = new Turtle();
|
||||
|
||||
output = $('#output');
|
||||
output.empty();
|
||||
if (typeof pipeurl === 'undefined') {
|
||||
if (wp_port === '443') {
|
||||
pipeurl = 'wss://'+wp_hostname+'/pipe';
|
||||
} else {
|
||||
pipeurl = 'ws://'+wp_hostname+':'+wp_port+'/pipe';
|
||||
}
|
||||
}
|
||||
saveFile();
|
||||
output.pipe = new WebSocket(pipeurl);
|
||||
output.pipe.onopen = function () {
|
||||
output.pipe.send(JSON.stringify(launchmsg));
|
||||
};
|
||||
output.pipe.onmessage = function (response) {
|
||||
msg = JSON.parse(response.data);
|
||||
if (msg.cmd === 'input') {
|
||||
output.inputelem = $('<input>',{'size':40});
|
||||
submit = $('<input>',{'type':'submit'});
|
||||
submit.click(function (){
|
||||
text = output.inputelem.val();
|
||||
output.input.replaceWith($('<code>', {text:text+'\n'}));
|
||||
output.pipe.send(JSON.stringify({'cmd':'inputresult',
|
||||
'data':text}));
|
||||
});
|
||||
output.inputelem.keydown(function(event){
|
||||
if(event.keyCode === 13){
|
||||
submit.click();
|
||||
}
|
||||
});
|
||||
output.append($('<code>', {text:msg.data}));
|
||||
output.input = $('<span>').append(output.inputelem).append(submit);
|
||||
output.append(output.input);
|
||||
output.inputelem.focus();
|
||||
} else if (msg.cmd === 'stop') {
|
||||
if (launchmsg.cmd === 'runscript') {
|
||||
if (msg.timedout) {
|
||||
output.append('<hr>Dein Programm hat zu lange gerechnet und wurde beendet.');
|
||||
} else {
|
||||
output.append('<hr>Dein Progamm wurde beendet');
|
||||
}
|
||||
}
|
||||
output.pipe.close();
|
||||
} else if (msg.cmd === 'passed') {
|
||||
$('#assess').html("Herzlich Glückwunsch! Dein Programm funktioniert korrekt.");
|
||||
} else if (msg.cmd === 'failed') {
|
||||
$('#assess').html(msg.data);
|
||||
} else if (msg.cmd === 'turtle') {
|
||||
if (msg.action in turtlescreen) {
|
||||
result = turtlescreen[msg.action].apply(turtlescreen, msg.args);
|
||||
output.pipe.send(JSON.stringify({cmd:'result', 'result':result}));
|
||||
} else {
|
||||
output.pipe.send(JSON.stringify({cmd:'exception', exception:'AttributeError',
|
||||
message:msg.action}));
|
||||
}
|
||||
} else if (msg.cmd === 'turtlebatch') {
|
||||
for (i=0; i < msg.batch.length; i += 1) {
|
||||
cmd = msg.batch[i];
|
||||
turtlescreen[cmd[0]].apply(turtlescreen, cmd[1]);
|
||||
}
|
||||
} else {
|
||||
if(msg.stream === 'internal') {
|
||||
output.append('<hr><em>Interner Fehler (bitte melden):</em>\n');
|
||||
}
|
||||
else if (msg.stream === 'stderr') {
|
||||
showConsole();
|
||||
$('#consoleradio').prop('checked', 'checked');
|
||||
}
|
||||
output.append($('<code>',{text:msg.data, 'class':msg.stream}));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -15,13 +15,5 @@ module Admin
|
||||
format.json { render(json: dashboard_data) }
|
||||
end
|
||||
end
|
||||
|
||||
def dump_docker
|
||||
authorize(self)
|
||||
respond_to do |format|
|
||||
format.html { render(json: DockerContainerPool.dump_info) }
|
||||
format.json { render(json: DockerContainerPool.dump_info) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -5,10 +5,13 @@ module CommonBehavior
|
||||
@object = options[:object]
|
||||
respond_to do |format|
|
||||
if @object.save
|
||||
yield if block_given?
|
||||
notice = t('shared.object_created', model: @object.class.model_name.human)
|
||||
if block_given?
|
||||
result = yield
|
||||
notice = result if result.present?
|
||||
end
|
||||
path = options[:path].try(:call) || @object
|
||||
respond_with_valid_object(format, notice: t('shared.object_created', model: @object.class.model_name.human),
|
||||
path: path, status: :created)
|
||||
respond_with_valid_object(format, notice: notice, path: path, status: :created)
|
||||
else
|
||||
respond_with_invalid_object(format, template: :new)
|
||||
end
|
||||
@ -42,9 +45,13 @@ path: path, status: :created)
|
||||
@object = options[:object]
|
||||
respond_to do |format|
|
||||
if @object.update(options[:params])
|
||||
notice = t('shared.object_updated', model: @object.class.model_name.human)
|
||||
if block_given?
|
||||
result = yield
|
||||
notice = result if result.present?
|
||||
end
|
||||
path = options[:path] || @object
|
||||
respond_with_valid_object(format, notice: t('shared.object_updated', model: @object.class.model_name.human),
|
||||
path: path, status: :ok)
|
||||
respond_with_valid_object(format, notice: notice, path: path, status: :ok)
|
||||
else
|
||||
respond_with_invalid_object(format, template: :edit)
|
||||
end
|
||||
|
@ -1,112 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'concurrent/future'
|
||||
|
||||
module SubmissionScoring
|
||||
def collect_test_results(submission)
|
||||
# Mnemosyne.trace 'custom.codeocean.collect_test_results', meta: { submission: submission.id } do
|
||||
futures = submission.collect_files.select(&:teacher_defined_assessment?).map do |file|
|
||||
Concurrent::Future.execute do
|
||||
# Mnemosyne.trace 'custom.codeocean.collect_test_results_block', meta: { file: file.id, submission: submission.id } do
|
||||
assessor = Assessor.new(execution_environment: submission.execution_environment)
|
||||
output = execute_test_file(file, submission)
|
||||
assessment = assessor.assess(output)
|
||||
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?)
|
||||
testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}"
|
||||
if testrun_output.present?
|
||||
submission.exercise.execution_environment.error_templates.each do |template|
|
||||
pattern = Regexp.new(template.signature).freeze
|
||||
StructuredError.create_from_template(template, testrun_output, submission) if pattern.match(testrun_output)
|
||||
end
|
||||
end
|
||||
testrun = Testrun.create(
|
||||
submission: submission,
|
||||
cause: 'assess', # Required to differ run and assess for RfC show
|
||||
file: file, # Test file that was executed
|
||||
passed: passed,
|
||||
output: testrun_output,
|
||||
container_execution_time: output[:container_execution_time],
|
||||
waiting_for_container_time: output[:waiting_for_container_time]
|
||||
)
|
||||
|
||||
filename = file.name_with_extension
|
||||
|
||||
if file.teacher_defined_linter?
|
||||
LinterCheckRun.create_from(testrun, assessment)
|
||||
switch_locale do
|
||||
assessment = assessor.translate_linter(assessment, I18n.locale)
|
||||
|
||||
# replace file name with hint if linter is not used for grading. Refactor!
|
||||
filename = t('exercises.implement.not_graded') if file.weight.zero?
|
||||
end
|
||||
end
|
||||
|
||||
output.merge!(assessment)
|
||||
output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight)
|
||||
# end
|
||||
end
|
||||
end
|
||||
futures.map(&:value!)
|
||||
end
|
||||
|
||||
private :collect_test_results
|
||||
|
||||
def execute_test_file(file, submission)
|
||||
DockerClient.new(execution_environment: file.context.execution_environment).execute_test_command(submission,
|
||||
file.name_with_extension)
|
||||
end
|
||||
|
||||
private :execute_test_file
|
||||
|
||||
def feedback_message(file, output)
|
||||
switch_locale do
|
||||
if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test'
|
||||
I18n.t('exercises.implement.default_test_feedback')
|
||||
elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter'
|
||||
I18n.t('exercises.implement.default_linter_feedback')
|
||||
else
|
||||
render_markdown(file.feedback_message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def score_submission(submission)
|
||||
outputs = collect_test_results(submission)
|
||||
score = 0.0
|
||||
if outputs.present?
|
||||
outputs.each do |output|
|
||||
score += output[:score] * output[:weight] unless output.nil?
|
||||
|
||||
if output.present? && output[:status] == :timeout
|
||||
output[:stderr] += "\n\n#{t('exercises.editor.timeout',
|
||||
permitted_execution_time: submission.exercise.execution_environment.permitted_execution_time.to_s)}"
|
||||
end
|
||||
end
|
||||
end
|
||||
submission.update(score: score)
|
||||
if submission.normalized_score.to_d == 1.0.to_d
|
||||
Thread.new do
|
||||
RequestForComment.where(exercise_id: submission.exercise_id, user_id: submission.user_id,
|
||||
user_type: submission.user_type).each do |rfc|
|
||||
rfc.full_score_reached = true
|
||||
rfc.save
|
||||
end
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
end
|
||||
if @embed_options.present? && @embed_options[:hide_test_results] && outputs.present?
|
||||
outputs.each do |output|
|
||||
output.except!(:error_messages, :count, :failed, :filename, :message, :passed, :stderr, :stdout)
|
||||
end
|
||||
end
|
||||
|
||||
# Return all test results except for those of a linter if not allowed
|
||||
show_linter = Python20CourseWeek.show_linter? submission.exercise
|
||||
outputs&.reject do |output|
|
||||
next if show_linter || output.blank?
|
||||
|
||||
output[:file_role] == 'teacher_defined_linter'
|
||||
end
|
||||
end
|
||||
end
|
@ -15,7 +15,9 @@ class ExecutionEnvironmentsController < ApplicationController
|
||||
def create
|
||||
@execution_environment = ExecutionEnvironment.new(execution_environment_params)
|
||||
authorize!
|
||||
create_and_respond(object: @execution_environment)
|
||||
create_and_respond(object: @execution_environment) do
|
||||
sync_to_runner_management
|
||||
end
|
||||
end
|
||||
|
||||
def destroy
|
||||
@ -28,8 +30,9 @@ class ExecutionEnvironmentsController < ApplicationController
|
||||
end
|
||||
|
||||
def execute_command
|
||||
@docker_client = DockerClient.new(execution_environment: @execution_environment)
|
||||
render(json: @docker_client.execute_arbitrary_command(params[:command]))
|
||||
runner = Runner.for(current_user, @execution_environment)
|
||||
output = runner.execute_command(params[:command], raise_exception: false)
|
||||
render json: output
|
||||
end
|
||||
|
||||
def working_time_query
|
||||
@ -105,8 +108,15 @@ class ExecutionEnvironmentsController < ApplicationController
|
||||
|
||||
def execution_environment_params
|
||||
if params[:execution_environment].present?
|
||||
params[:execution_environment].permit(:docker_image, :exposed_ports, :editor_mode, :file_extension, :file_type_id, :help, :indent_size, :memory_limit, :name, :network_enabled, :permitted_execution_time, :pool_size, :run_command, :test_command, :testing_framework).merge(
|
||||
user_id: current_user.id, user_type: current_user.class.name
|
||||
exposed_ports = if params[:execution_environment][:exposed_ports_list]
|
||||
# Transform the `exposed_ports_list` to `exposed_ports` array
|
||||
params[:execution_environment].delete(:exposed_ports_list).scan(/\d+/)
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
params[:execution_environment].permit(:docker_image, :editor_mode, :file_extension, :file_type_id, :help, :indent_size, :memory_limit, :cpu_limit, :name, :network_enabled, :permitted_execution_time, :pool_size, :run_command, :test_command, :testing_framework).merge(
|
||||
user_id: current_user.id, user_type: current_user.class.name, exposed_ports: exposed_ports
|
||||
)
|
||||
end
|
||||
end
|
||||
@ -123,12 +133,12 @@ class ExecutionEnvironmentsController < ApplicationController
|
||||
end
|
||||
|
||||
def set_docker_images
|
||||
DockerClient.check_availability!
|
||||
@docker_images = DockerClient.image_tags.sort
|
||||
rescue DockerClient::Error => e
|
||||
@docker_images = []
|
||||
@docker_images ||= ExecutionEnvironment.pluck(:docker_image)
|
||||
@docker_images += Runner.strategy_class.available_images
|
||||
rescue Runner::Error::InternalServerError => e
|
||||
flash[:warning] = e.message
|
||||
Sentry.capture_exception(e)
|
||||
ensure
|
||||
@docker_images = @docker_images.sort.uniq
|
||||
end
|
||||
private :set_docker_images
|
||||
|
||||
@ -155,6 +165,30 @@ class ExecutionEnvironmentsController < ApplicationController
|
||||
end
|
||||
|
||||
def update
|
||||
update_and_respond(object: @execution_environment, params: execution_environment_params)
|
||||
update_and_respond(object: @execution_environment, params: execution_environment_params) do
|
||||
sync_to_runner_management
|
||||
end
|
||||
end
|
||||
|
||||
def sync_all_to_runner_management
|
||||
authorize ExecutionEnvironment
|
||||
|
||||
return unless Runner.management_active?
|
||||
|
||||
success = ExecutionEnvironment.all.map do |execution_environment|
|
||||
Runner.strategy_class.sync_environment(execution_environment)
|
||||
end
|
||||
if success.all?
|
||||
redirect_to ExecutionEnvironment, notice: t('execution_environments.index.synchronize_all.success')
|
||||
else
|
||||
redirect_to ExecutionEnvironment, alert: t('execution_environments.index.synchronize_all.failure')
|
||||
end
|
||||
end
|
||||
|
||||
def sync_to_runner_management
|
||||
unless Runner.management_active? && Runner.strategy_class.sync_environment(@execution_environment)
|
||||
t('execution_environments.form.errors.not_synced_to_runner_management')
|
||||
end
|
||||
end
|
||||
private :sync_to_runner_management
|
||||
end
|
||||
|
@ -4,7 +4,6 @@ class ExercisesController < ApplicationController
|
||||
include CommonBehavior
|
||||
include Lti
|
||||
include SubmissionParameters
|
||||
include SubmissionScoring
|
||||
include TimeHelper
|
||||
|
||||
before_action :handle_file_uploads, only: %i[create update]
|
||||
@ -533,7 +532,7 @@ working_time_accumulated: working_time_accumulated})
|
||||
|
||||
def submit
|
||||
@submission = Submission.create(submission_params)
|
||||
score_submission(@submission)
|
||||
@submission.calculate_score
|
||||
if @submission.user.external_user? && lti_outcome_service?(@submission.exercise_id, @submission.user.id)
|
||||
transmit_lti_score
|
||||
else
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
class RemoteEvaluationController < ApplicationController
|
||||
include RemoteEvaluationParameters
|
||||
include SubmissionScoring
|
||||
include Lti
|
||||
|
||||
skip_after_action :verify_authorized
|
||||
@ -63,7 +62,7 @@ status: 202}
|
||||
validation_token = remote_evaluation_params[:validation_token]
|
||||
if (remote_evaluation_mapping = RemoteEvaluationMapping.find_by(validation_token: validation_token))
|
||||
@submission = Submission.create(build_submission_params(cause, remote_evaluation_mapping))
|
||||
score_submission(@submission)
|
||||
@submission.calculate_score
|
||||
else
|
||||
# TODO: better output
|
||||
# TODO: check token expired?
|
||||
|
@ -1,8 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class RequestForCommentsController < ApplicationController
|
||||
include SubmissionScoring
|
||||
|
||||
before_action :require_user!
|
||||
before_action :set_request_for_comment, only: %i[show mark_as_solved set_thank_you_note]
|
||||
before_action :set_study_group_grouping,
|
||||
@ -119,12 +117,10 @@ class RequestForCommentsController < ApplicationController
|
||||
|
||||
respond_to do |format|
|
||||
if @request_for_comment.save
|
||||
# create thread here and execute tests. A run is triggered from the frontend and does not need to be handled here.
|
||||
Thread.new do
|
||||
score_submission(@request_for_comment.submission)
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
# execute the tests here and wait until they finished.
|
||||
# As the same runner is used for the score and test run, no parallelization is possible
|
||||
# A run is triggered from the frontend and does not need to be handled here.
|
||||
@request_for_comment.submission.calculate_score
|
||||
format.json { render :show, status: :created, location: @request_for_comment }
|
||||
else
|
||||
format.html { render :new }
|
||||
|
@ -5,76 +5,22 @@ class SubmissionsController < ApplicationController
|
||||
include CommonBehavior
|
||||
include Lti
|
||||
include SubmissionParameters
|
||||
include SubmissionScoring
|
||||
include Tubesock::Hijack
|
||||
|
||||
before_action :set_submission,
|
||||
only: %i[download download_file render_file run score extract_errors show statistics test]
|
||||
before_action :set_docker_client, only: %i[run test]
|
||||
before_action :set_files, only: %i[download download_file render_file show run]
|
||||
before_action :set_file, only: %i[download_file render_file run]
|
||||
before_action :set_submission, only: %i[download download_file render_file run score show statistics test]
|
||||
before_action :set_files, only: %i[download show]
|
||||
before_action :set_files_and_specific_file, only: %i[download_file render_file run test]
|
||||
before_action :set_mime_type, only: %i[download_file render_file]
|
||||
skip_before_action :verify_authenticity_token, only: %i[download_file render_file]
|
||||
|
||||
def max_run_output_buffer_size
|
||||
if @submission.cause == 'requestComments'
|
||||
5000
|
||||
else
|
||||
500
|
||||
end
|
||||
end
|
||||
|
||||
def authorize!
|
||||
authorize(@submission || @submissions)
|
||||
end
|
||||
private :authorize!
|
||||
|
||||
def create
|
||||
@submission = Submission.new(submission_params)
|
||||
authorize!
|
||||
copy_comments
|
||||
create_and_respond(object: @submission)
|
||||
end
|
||||
|
||||
def command_substitutions(filename)
|
||||
{
|
||||
class_name: File.basename(filename, File.extname(filename)).upcase_first,
|
||||
filename: filename,
|
||||
module_name: File.basename(filename, File.extname(filename)).underscore,
|
||||
}
|
||||
end
|
||||
private :command_substitutions
|
||||
|
||||
def copy_comments
|
||||
# copy each annotation and set the target_file.id
|
||||
params[:annotations_arr]&.each do |annotation|
|
||||
# comment = Comment.new(annotation[1].permit(:user_id, :file_id, :user_type, :row, :column, :text, :created_at, :updated_at))
|
||||
comment = Comment.new(user_id: annotation[1][:user_id], file_id: annotation[1][:file_id],
|
||||
user_type: current_user.class.name, row: annotation[1][:row], column: annotation[1][:column], text: annotation[1][:text])
|
||||
source_file = CodeOcean::File.find(annotation[1][:file_id])
|
||||
|
||||
# retrieve target file
|
||||
target_file = @submission.files.detect do |file|
|
||||
# file_id has to be that of a the former iteration OR of the initial file (if this is the first run)
|
||||
file.file_id == source_file.file_id || file.file_id == source_file.id # seems to be needed here: (check this): || file.file_id == source_file.id ; yes this is needed, for comments on templates as well as comments on files added by users.
|
||||
end
|
||||
|
||||
# save to assign an id
|
||||
target_file.save!
|
||||
|
||||
comment.file_id = target_file.id
|
||||
comment.save!
|
||||
end
|
||||
end
|
||||
|
||||
def download
|
||||
if @embed_options[:disable_download]
|
||||
raise Pundit::NotAuthorizedError
|
||||
end
|
||||
|
||||
# files = @submission.files.map{ }
|
||||
# zipline( files, 'submission.zip')
|
||||
# send_data(@file.content, filename: @file.name_with_extension)
|
||||
raise Pundit::NotAuthorizedError if @embed_options[:disable_download]
|
||||
|
||||
id_file = create_remote_evaluation_mapping
|
||||
|
||||
@ -112,9 +58,7 @@ class SubmissionsController < ApplicationController
|
||||
end
|
||||
|
||||
def download_file
|
||||
if @embed_options[:disable_download]
|
||||
raise Pundit::NotAuthorizedError
|
||||
end
|
||||
raise Pundit::NotAuthorizedError if @embed_options[:disable_download]
|
||||
|
||||
if @file.native_file?
|
||||
send_file(@file.native_file.path)
|
||||
@ -138,321 +82,152 @@ class SubmissionsController < ApplicationController
|
||||
end
|
||||
|
||||
def run
|
||||
# TODO: reimplement SSEs with websocket commands
|
||||
# with_server_sent_events do |server_sent_event|
|
||||
# output = @docker_client.execute_run_command(@submission, sanitize_filename)
|
||||
|
||||
# server_sent_event.write({stdout: output[:stdout]}, event: 'output') if output[:stdout]
|
||||
# server_sent_event.write({stderr: output[:stderr]}, event: 'output') if output[:stderr]
|
||||
# end
|
||||
# These method-local socket variables are required in order to use one socket
|
||||
# in the callbacks of the other socket. As the callbacks for the client socket
|
||||
# are registered first, the runner socket may still be nil.
|
||||
client_socket, runner_socket = nil
|
||||
|
||||
hijack do |tubesock|
|
||||
if @embed_options[:disable_run]
|
||||
kill_socket(tubesock)
|
||||
return
|
||||
client_socket = tubesock
|
||||
return kill_client_socket(client_socket) if @embed_options[:disable_run]
|
||||
|
||||
client_socket.onclose do |_event|
|
||||
runner_socket&.close(:terminated_by_client)
|
||||
end
|
||||
|
||||
# probably add:
|
||||
# ensure
|
||||
# #guarantee that the thread is releasing the DB connection after it is done
|
||||
# ApplicationRecord.connectionpool.releaseconnection
|
||||
# end
|
||||
unless EventMachine.reactor_running? && EventMachine.reactor_thread.alive?
|
||||
Thread.new do
|
||||
EventMachine.run
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
end
|
||||
client_socket.onmessage do |raw_event|
|
||||
# Obviously, this is just flushing the current connection: Filtering.
|
||||
next if raw_event == "\n"
|
||||
|
||||
# socket is the socket into the container, tubesock is the socket to the client
|
||||
# Otherwise, we expect to receive a JSON: Parsing.
|
||||
event = JSON.parse(raw_event).deep_symbolize_keys
|
||||
|
||||
# give the docker_client the tubesock object, so that it can send messages (timeout)
|
||||
@docker_client.tubesock = tubesock
|
||||
|
||||
container_request_time = Time.zone.now
|
||||
result = @docker_client.execute_run_command(@submission, sanitize_filename)
|
||||
tubesock.send_data JSON.dump({'cmd' => 'status', 'status' => result[:status]})
|
||||
@waiting_for_container_time = Time.zone.now - container_request_time
|
||||
|
||||
if result[:status] == :container_running
|
||||
socket = result[:socket]
|
||||
command = result[:command]
|
||||
|
||||
socket.on :message do |event|
|
||||
Rails.logger.info("#{Time.zone.now.getutc}: Docker sending: #{event.data}")
|
||||
handle_message(event.data, tubesock, result[:container])
|
||||
end
|
||||
|
||||
socket.on :close do |_event|
|
||||
kill_socket(tubesock)
|
||||
end
|
||||
|
||||
tubesock.onmessage do |data|
|
||||
Rails.logger.info("#{Time.zone.now.getutc}: Client sending: #{data}")
|
||||
# Check whether the client send a JSON command and kill container
|
||||
# if the command is 'client_kill', send it to docker otherwise.
|
||||
begin
|
||||
parsed = JSON.parse(data) unless data == "\n"
|
||||
if parsed.instance_of?(Hash) && parsed['cmd'] == 'client_kill'
|
||||
Rails.logger.debug('Client exited container.')
|
||||
@docker_client.kill_container(result[:container])
|
||||
case event[:cmd].to_sym
|
||||
when :client_kill
|
||||
close_client_connection(client_socket)
|
||||
Rails.logger.debug('Client exited container.')
|
||||
when :result, :canvasevent, :exception
|
||||
# The client cannot send something before the runner connection is established.
|
||||
if runner_socket.present?
|
||||
runner_socket.send_data raw_event
|
||||
else
|
||||
socket.send data
|
||||
Rails.logger.debug { "Sent the received client data to docker:#{data}" }
|
||||
Rails.logger.info("Could not forward data from client because runner connection was not established yet: #{event[:data].inspect}")
|
||||
end
|
||||
rescue JSON::ParserError
|
||||
socket.send data
|
||||
Rails.logger.debug { "Rescued parsing error, sent the received client data to docker:#{data}" }
|
||||
Sentry.set_extras(data: data)
|
||||
else
|
||||
Rails.logger.info("Unknown command from client: #{event[:cmd]}")
|
||||
Sentry.set_extras(event: event)
|
||||
Sentry.capture_message("Unknown command from client: #{event[:cmd]}")
|
||||
end
|
||||
rescue JSON::ParserError => e
|
||||
Rails.logger.info("Data received from client is not valid json: #{raw_event.inspect}")
|
||||
Sentry.set_extras(data: raw_event)
|
||||
Sentry.capture_exception(e)
|
||||
rescue TypeError => e
|
||||
Rails.logger.info("JSON data received from client cannot be parsed as hash: #{raw_event.inspect}")
|
||||
Sentry.set_extras(data: raw_event)
|
||||
Sentry.capture_exception(e)
|
||||
end
|
||||
end
|
||||
|
||||
@output = +''
|
||||
durations = @submission.run(@file) do |socket|
|
||||
runner_socket = socket
|
||||
client_socket.send_data JSON.dump({cmd: :status, status: :container_running})
|
||||
|
||||
runner_socket.on :stdout do |data|
|
||||
json_data = prepare data, :stdout
|
||||
@output << json_data[0, max_output_buffer_size - @output.size]
|
||||
client_socket.send_data(json_data)
|
||||
end
|
||||
|
||||
runner_socket.on :stderr do |data|
|
||||
json_data = prepare data, :stderr
|
||||
@output << json_data[0, max_output_buffer_size - @output.size]
|
||||
client_socket.send_data(json_data)
|
||||
end
|
||||
|
||||
runner_socket.on :exit do |exit_code|
|
||||
exit_statement =
|
||||
if @output.empty? && exit_code.zero?
|
||||
t('exercises.implement.no_output_exit_successful', timestamp: l(Time.zone.now, format: :short), exit_code: exit_code)
|
||||
elsif @output.empty?
|
||||
t('exercises.implement.no_output_exit_failure', timestamp: l(Time.zone.now, format: :short), exit_code: exit_code)
|
||||
elsif exit_code.zero?
|
||||
"\n#{t('exercises.implement.exit_successful', timestamp: l(Time.zone.now, format: :short), exit_code: exit_code)}"
|
||||
else
|
||||
"\n#{t('exercises.implement.exit_failure', timestamp: l(Time.zone.now, format: :short), exit_code: exit_code)}"
|
||||
end
|
||||
end
|
||||
client_socket.send_data JSON.dump({cmd: :write, stream: :stdout, data: "#{exit_statement}\n"})
|
||||
|
||||
# Send command after all listeners are attached.
|
||||
# Newline required to flush
|
||||
@execution_request_time = Time.zone.now
|
||||
socket.send "#{command}\n"
|
||||
Rails.logger.info("Sent command: #{command}")
|
||||
else
|
||||
kill_socket(tubesock)
|
||||
close_client_connection(client_socket)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def kill_socket(tubesock)
|
||||
@container_execution_time = Time.zone.now - @execution_request_time if @execution_request_time.present?
|
||||
# search for errors and save them as StructuredError (for scoring runs see submission_scoring.rb)
|
||||
errors = extract_errors
|
||||
send_hints(tubesock, errors)
|
||||
|
||||
# save the output of this "run" as a "testrun" (scoring runs are saved in submission_scoring.rb)
|
||||
@container_execution_time = durations[:execution_duration]
|
||||
@waiting_for_container_time = durations[:waiting_duration]
|
||||
rescue Runner::Error::ExecutionTimeout => e
|
||||
client_socket.send_data JSON.dump({cmd: :status, status: :timeout})
|
||||
close_client_connection(client_socket)
|
||||
Rails.logger.debug { "Running a submission timed out: #{e.message}" }
|
||||
@output = "timeout: #{@output}"
|
||||
extract_durations(e)
|
||||
rescue Runner::Error => e
|
||||
client_socket.send_data JSON.dump({cmd: :status, status: :container_depleted})
|
||||
close_client_connection(client_socket)
|
||||
Rails.logger.debug { "Runner error while running a submission: #{e.message}" }
|
||||
extract_durations(e)
|
||||
ensure
|
||||
save_run_output
|
||||
|
||||
# For Python containers, the @run_output is '{"cmd":"exit"}' as a string.
|
||||
# If this is the case, we should consider it as blank
|
||||
if @run_output.blank? || @run_output&.strip == '{"cmd":"exit"}' || @run_output&.strip == 'timeout:'
|
||||
@raw_output ||= ''
|
||||
@run_output ||= ''
|
||||
parse_message t('exercises.implement.no_output', timestamp: l(Time.zone.now, format: :short)), 'stdout', tubesock
|
||||
end
|
||||
|
||||
# Hijacked connection needs to be notified correctly
|
||||
tubesock.send_data JSON.dump({'cmd' => 'exit'})
|
||||
tubesock.close
|
||||
end
|
||||
|
||||
def handle_message(message, tubesock, container)
|
||||
@raw_output ||= ''
|
||||
@run_output ||= ''
|
||||
# Handle special commands first
|
||||
case message
|
||||
when /^#exit|{"cmd": "exit"}/
|
||||
# Just call exit_container on the docker_client.
|
||||
# Do not call kill_socket for the websocket to the client here.
|
||||
# @docker_client.exit_container closes the socket to the container,
|
||||
# kill_socket is called in the "on close handler" of the websocket to the container
|
||||
@docker_client.exit_container(container)
|
||||
when /^#timeout/
|
||||
@run_output = "timeout: #{@run_output}" # add information that this run timed out to the buffer
|
||||
else
|
||||
# Filter out information about run_command, test_command, user or working directory
|
||||
run_command = @submission.execution_environment.run_command % command_substitutions(sanitize_filename)
|
||||
test_command = @submission.execution_environment.test_command % command_substitutions(sanitize_filename)
|
||||
if test_command.blank?
|
||||
# If no test command is set, use the run_command for the RegEx below. Otherwise, no output will be displayed!
|
||||
test_command = run_command
|
||||
end
|
||||
unless %r{root@|:/workspace|#{run_command}|#{test_command}|bash: cmd:canvasevent: command not found}.match?(message)
|
||||
parse_message(message, 'stdout', tubesock, container)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def parse_message(message, output_stream, socket, container = nil, recursive: true)
|
||||
parsed = ''
|
||||
begin
|
||||
parsed = JSON.parse(message)
|
||||
if parsed.instance_of?(Hash) && parsed.key?('cmd')
|
||||
socket.send_data message
|
||||
Rails.logger.info("parse_message sent: #{message}")
|
||||
@docker_client.exit_container(container) if container && parsed['cmd'] == 'exit'
|
||||
else
|
||||
parsed = {'cmd' => 'write', 'stream' => output_stream, 'data' => message}
|
||||
socket.send_data JSON.dump(parsed)
|
||||
Rails.logger.info("parse_message sent: #{JSON.dump(parsed)}")
|
||||
end
|
||||
rescue JSON::ParserError
|
||||
# Check wether the message contains multiple lines, if true try to parse each line
|
||||
if recursive && message.include?("\n")
|
||||
message.split("\n").each do |part|
|
||||
parse_message(part, output_stream, socket, container, recursive: false)
|
||||
end
|
||||
elsif message.include?('<img') || message.start_with?('{"cmd') || message.include?('"turtlebatch"')
|
||||
# Rails.logger.info('img foung')
|
||||
@buffering = true
|
||||
@buffer = ''
|
||||
@buffer += message
|
||||
# Rails.logger.info('Starting to buffer')
|
||||
elsif @buffering && message.include?('/>')
|
||||
@buffer += message
|
||||
parsed = {'cmd' => 'write', 'stream' => output_stream, 'data' => @buffer}
|
||||
socket.send_data JSON.dump(parsed)
|
||||
# socket.send_data @buffer
|
||||
@buffering = false
|
||||
# Rails.logger.info('Sent complete buffer')
|
||||
elsif @buffering && message.end_with?("}\r")
|
||||
@buffer += message
|
||||
socket.send_data @buffer
|
||||
@buffering = false
|
||||
# Rails.logger.info('Sent complete buffer')
|
||||
elsif @buffering
|
||||
@buffer += message
|
||||
# Rails.logger.info('Appending to buffer')
|
||||
else
|
||||
# Rails.logger.info('else')
|
||||
parsed = {'cmd' => 'write', 'stream' => output_stream, 'data' => message}
|
||||
socket.send_data JSON.dump(parsed)
|
||||
Rails.logger.info("parse_message sent: #{JSON.dump(parsed)}")
|
||||
end
|
||||
ensure
|
||||
@raw_output += parsed['data'].to_s if parsed.instance_of?(Hash) && parsed.key?('data')
|
||||
# save the data that was send to the run_output if there is enough space left. this will be persisted as a testrun with cause "run"
|
||||
@run_output += JSON.dump(parsed).to_s if @run_output.size <= max_run_output_buffer_size
|
||||
end
|
||||
end
|
||||
|
||||
def save_run_output
|
||||
if @run_output.present?
|
||||
@run_output = @run_output[(0..max_run_output_buffer_size - 1)] # trim the string to max_message_buffer_size chars
|
||||
Testrun.create(
|
||||
file: @file,
|
||||
cause: 'run',
|
||||
submission: @submission,
|
||||
output: @run_output,
|
||||
container_execution_time: @container_execution_time,
|
||||
waiting_for_container_time: @waiting_for_container_time
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def extract_errors
|
||||
results = []
|
||||
if @raw_output.present?
|
||||
@submission.exercise.execution_environment.error_templates.each do |template|
|
||||
pattern = Regexp.new(template.signature).freeze
|
||||
if pattern.match(@raw_output)
|
||||
results << StructuredError.create_from_template(template, @raw_output, @submission)
|
||||
end
|
||||
end
|
||||
end
|
||||
results
|
||||
end
|
||||
|
||||
def score
|
||||
hijack do |tubesock|
|
||||
if @embed_options[:disable_score]
|
||||
kill_socket(tubesock)
|
||||
return
|
||||
end
|
||||
return if @embed_options[:disable_score]
|
||||
|
||||
unless EventMachine.reactor_running? && EventMachine.reactor_thread.alive?
|
||||
Thread.new do
|
||||
EventMachine.run
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
end
|
||||
# tubesock is the socket to the client
|
||||
|
||||
# the score_submission call will end up calling docker exec, which is blocking.
|
||||
# to ensure responsiveness, we therefore open a thread here.
|
||||
Thread.new do
|
||||
tubesock.send_data JSON.dump(score_submission(@submission))
|
||||
|
||||
# To enable hints when scoring a submission, uncomment the next line:
|
||||
# send_hints(tubesock, StructuredError.where(submission: @submission))
|
||||
|
||||
tubesock.send_data JSON.dump({'cmd' => 'exit'})
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
tubesock.send_data(JSON.dump(@submission.calculate_score))
|
||||
# To enable hints when scoring a submission, uncomment the next line:
|
||||
# send_hints(tubesock, StructuredError.where(submission: @submission))
|
||||
rescue Runner::Error => e
|
||||
tubesock.send_data JSON.dump({cmd: :status, status: :container_depleted})
|
||||
Rails.logger.debug { "Runner error while scoring submission #{@submission.id}: #{e.message}" }
|
||||
ensure
|
||||
kill_client_socket(tubesock)
|
||||
end
|
||||
end
|
||||
|
||||
def send_hints(tubesock, errors)
|
||||
return if @embed_options[:disable_hints]
|
||||
|
||||
errors = errors.to_a.uniq(&:hint)
|
||||
errors.each do |error|
|
||||
tubesock.send_data JSON.dump({cmd: 'hint', hint: error.hint, description: error.error_template.description})
|
||||
end
|
||||
end
|
||||
|
||||
def set_docker_client
|
||||
@docker_client = DockerClient.new(execution_environment: @submission.execution_environment)
|
||||
end
|
||||
private :set_docker_client
|
||||
|
||||
def set_file
|
||||
@file = @files.detect {|file| file.name_with_extension == sanitize_filename }
|
||||
head :not_found unless @file
|
||||
end
|
||||
private :set_file
|
||||
|
||||
def set_files
|
||||
@files = @submission.collect_files.select(&:visible)
|
||||
end
|
||||
private :set_files
|
||||
|
||||
def set_mime_type
|
||||
@mime_type = Mime::Type.lookup_by_extension(@file.file_type.file_extension.gsub(/^\./, ''))
|
||||
response.headers['Content-Type'] = @mime_type.to_s
|
||||
end
|
||||
private :set_mime_type
|
||||
|
||||
def set_submission
|
||||
@submission = Submission.find(params[:id])
|
||||
authorize!
|
||||
end
|
||||
private :set_submission
|
||||
|
||||
def show; end
|
||||
|
||||
def statistics; end
|
||||
|
||||
def test
|
||||
hijack do |tubesock|
|
||||
unless EventMachine.reactor_running? && EventMachine.reactor_thread.alive?
|
||||
Thread.new do
|
||||
EventMachine.run
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
end
|
||||
return kill_client_socket(tubesock) if @embed_options[:disable_run]
|
||||
|
||||
output = @docker_client.execute_test_command(@submission, sanitize_filename)
|
||||
|
||||
# tubesock is the socket to the client
|
||||
tubesock.send_data JSON.dump(output)
|
||||
tubesock.send_data JSON.dump('cmd' => 'exit')
|
||||
tubesock.send_data(JSON.dump(@submission.test(@file)))
|
||||
rescue Runner::Error => e
|
||||
tubesock.send_data JSON.dump({cmd: :status, status: :container_depleted})
|
||||
Rails.logger.debug { "Runner error while testing submission #{@submission.id}: #{e.message}" }
|
||||
ensure
|
||||
kill_client_socket(tubesock)
|
||||
end
|
||||
end
|
||||
|
||||
def with_server_sent_events
|
||||
response.headers['Content-Type'] = 'text/event-stream'
|
||||
server_sent_event = SSE.new(response.stream)
|
||||
server_sent_event.write(nil, event: 'start')
|
||||
yield(server_sent_event) if block_given?
|
||||
server_sent_event.write({code: 200}, event: 'close')
|
||||
rescue StandardError => e
|
||||
Sentry.capture_exception(e)
|
||||
logger.error(e.message)
|
||||
logger.error(e.backtrace.join("\n"))
|
||||
server_sent_event.write({code: 500}, event: 'close')
|
||||
ensure
|
||||
server_sent_event.close
|
||||
private
|
||||
|
||||
def authorize!
|
||||
authorize(@submission || @submissions)
|
||||
end
|
||||
|
||||
def close_client_connection(client_socket)
|
||||
# search for errors and save them as StructuredError (for scoring runs see submission.rb)
|
||||
errors = extract_errors
|
||||
send_hints(client_socket, errors)
|
||||
kill_client_socket(client_socket)
|
||||
end
|
||||
|
||||
def kill_client_socket(client_socket)
|
||||
client_socket.send_data JSON.dump({cmd: :exit})
|
||||
client_socket.close
|
||||
end
|
||||
private :with_server_sent_events
|
||||
|
||||
def create_remote_evaluation_mapping
|
||||
user = @submission.user
|
||||
@ -480,7 +255,89 @@ class SubmissionsController < ApplicationController
|
||||
path
|
||||
end
|
||||
|
||||
def extract_durations(error)
|
||||
@container_execution_time = error.execution_duration
|
||||
@waiting_for_container_time = error.waiting_duration
|
||||
end
|
||||
|
||||
def extract_errors
|
||||
results = []
|
||||
if @output.present?
|
||||
@submission.exercise.execution_environment.error_templates.each do |template|
|
||||
pattern = Regexp.new(template.signature).freeze
|
||||
results << StructuredError.create_from_template(template, @output, @submission) if pattern.match(@output)
|
||||
end
|
||||
end
|
||||
results
|
||||
end
|
||||
|
||||
def max_output_buffer_size
|
||||
if @submission.cause == 'requestComments'
|
||||
5000
|
||||
else
|
||||
500
|
||||
end
|
||||
end
|
||||
|
||||
def prepare(data, stream)
|
||||
if valid_command? data
|
||||
data
|
||||
else
|
||||
JSON.dump({cmd: :write, stream: stream, data: data})
|
||||
end
|
||||
end
|
||||
|
||||
def sanitize_filename
|
||||
params[:filename].gsub(/\.json$/, '')
|
||||
end
|
||||
|
||||
# save the output of this "run" as a "testrun" (scoring runs are saved in submission.rb)
|
||||
def save_run_output
|
||||
Testrun.create(
|
||||
file: @file,
|
||||
cause: 'run',
|
||||
submission: @submission,
|
||||
output: @output,
|
||||
container_execution_time: @container_execution_time,
|
||||
waiting_for_container_time: @waiting_for_container_time
|
||||
)
|
||||
end
|
||||
|
||||
def send_hints(tubesock, errors)
|
||||
return if @embed_options[:disable_hints]
|
||||
|
||||
errors = errors.to_a.uniq(&:hint)
|
||||
errors.each do |error|
|
||||
tubesock.send_data JSON.dump({cmd: 'hint', hint: error.hint, description: error.error_template.description})
|
||||
end
|
||||
end
|
||||
|
||||
def set_files_and_specific_file
|
||||
# @files contains all visible files for the user
|
||||
# @file contains the specific file requested for run / test / render / ...
|
||||
set_files
|
||||
@file = @files.detect {|file| file.name_with_extension == sanitize_filename }
|
||||
head :not_found unless @file
|
||||
end
|
||||
|
||||
def set_files
|
||||
@files = @submission.collect_files.select(&:visible)
|
||||
end
|
||||
|
||||
def set_mime_type
|
||||
@mime_type = Mime::Type.lookup_by_extension(@file.file_type.file_extension.gsub(/^\./, ''))
|
||||
response.headers['Content-Type'] = @mime_type.to_s
|
||||
end
|
||||
|
||||
def set_submission
|
||||
@submission = Submission.find(params[:id])
|
||||
authorize!
|
||||
end
|
||||
|
||||
def valid_command?(data)
|
||||
parsed = JSON.parse(data)
|
||||
parsed.instance_of?(Hash) && parsed.key?('cmd')
|
||||
rescue JSON::ParserError
|
||||
false
|
||||
end
|
||||
end
|
||||
|
4
app/errors/application_error.rb
Normal file
4
app/errors/application_error.rb
Normal file
@ -0,0 +1,4 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class ApplicationError < StandardError
|
||||
end
|
7
app/errors/runner/connection/buffer/error.rb
Normal file
7
app/errors/runner/connection/buffer/error.rb
Normal file
@ -0,0 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Runner::Connection::Buffer
|
||||
class Error < ApplicationError
|
||||
class NotEmpty < Error; end
|
||||
end
|
||||
end
|
29
app/errors/runner/error.rb
Normal file
29
app/errors/runner/error.rb
Normal file
@ -0,0 +1,29 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Runner
|
||||
class Error < ApplicationError
|
||||
attr_accessor :waiting_duration, :execution_duration
|
||||
|
||||
class BadRequest < Error; end
|
||||
|
||||
class EnvironmentNotFound < Error; end
|
||||
|
||||
class ExecutionTimeout < Error; end
|
||||
|
||||
class InternalServerError < Error; end
|
||||
|
||||
class NotAvailable < Error; end
|
||||
|
||||
class Unauthorized < Error; end
|
||||
|
||||
class RunnerNotFound < Error; end
|
||||
|
||||
class FaradayError < Error; end
|
||||
|
||||
class UnexpectedResponse < Error; end
|
||||
|
||||
class WorkspaceError < Error; end
|
||||
|
||||
class Unknown < Error; end
|
||||
end
|
||||
end
|
@ -7,8 +7,15 @@ module Admin
|
||||
end
|
||||
|
||||
def docker_data
|
||||
pool_size = begin
|
||||
Runner.strategy_class.pool_size
|
||||
rescue Runner::Error => e
|
||||
Rails.logger.debug { "Runner error while fetching current pool size: #{e.message}" }
|
||||
[]
|
||||
end
|
||||
|
||||
ExecutionEnvironment.order(:id).select(:id, :pool_size).map do |execution_environment|
|
||||
execution_environment.attributes.merge(quantity: DockerContainerPool.quantities[execution_environment.id])
|
||||
execution_environment.attributes.merge(quantity: pool_size[execution_environment.id])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -4,16 +4,6 @@ require File.expand_path('../../uploaders/file_uploader', __dir__)
|
||||
require File.expand_path('../../../lib/active_model/validations/boolean_presence_validator', __dir__)
|
||||
|
||||
module CodeOcean
|
||||
class FileNameValidator < ActiveModel::Validator
|
||||
def validate(record)
|
||||
existing_files = File.where(name: record.name, path: record.path, file_type_id: record.file_type_id,
|
||||
context_id: record.context_id, context_type: record.context_type).to_a
|
||||
if !existing_files.empty? && (!record.context.is_a?(Exercise) || record.context.new_record?)
|
||||
record.errors[:base] << 'Duplicate'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class File < ApplicationRecord
|
||||
include DefaultValues
|
||||
|
||||
|
@ -7,6 +7,9 @@ class ExecutionEnvironment < ApplicationRecord
|
||||
include DefaultValues
|
||||
|
||||
VALIDATION_COMMAND = 'whoami'
|
||||
DEFAULT_CPU_LIMIT = 20
|
||||
DEFAULT_MEMORY_LIMIT = 256
|
||||
MINIMUM_MEMORY_LIMIT = 4
|
||||
|
||||
after_initialize :set_default_values
|
||||
|
||||
@ -20,12 +23,15 @@ class ExecutionEnvironment < ApplicationRecord
|
||||
validate :working_docker_image?, if: :validate_docker_image?
|
||||
validates :docker_image, presence: true
|
||||
validates :memory_limit,
|
||||
numericality: {greater_than_or_equal_to: DockerClient::MINIMUM_MEMORY_LIMIT, only_integer: true}, presence: true
|
||||
numericality: {greater_than_or_equal_to: MINIMUM_MEMORY_LIMIT, only_integer: true}, presence: true
|
||||
validates :network_enabled, boolean_presence: true
|
||||
validates :name, presence: true
|
||||
validates :permitted_execution_time, numericality: {only_integer: true}, presence: true
|
||||
validates :pool_size, numericality: {only_integer: true}, presence: true
|
||||
validates :run_command, presence: true
|
||||
validates :cpu_limit, presence: true, numericality: {greater_than: 0, only_integer: true}
|
||||
before_validation :clean_exposed_ports
|
||||
validates :exposed_ports, array: {numericality: {greater_than_or_equal_to: 0, less_than: 65_536, only_integer: true}}
|
||||
|
||||
def set_default_values
|
||||
set_default_values_if_present(permitted_execution_time: 60, pool_size: 0)
|
||||
@ -36,6 +42,27 @@ class ExecutionEnvironment < ApplicationRecord
|
||||
name
|
||||
end
|
||||
|
||||
def to_json(*_args)
|
||||
{
|
||||
id: id,
|
||||
image: docker_image,
|
||||
prewarmingPoolSize: pool_size,
|
||||
cpuLimit: cpu_limit,
|
||||
memoryLimit: memory_limit,
|
||||
networkAccess: network_enabled,
|
||||
exposedPorts: exposed_ports,
|
||||
}.to_json
|
||||
end
|
||||
|
||||
def exposed_ports_list
|
||||
exposed_ports.join(', ')
|
||||
end
|
||||
|
||||
def clean_exposed_ports
|
||||
self.exposed_ports = exposed_ports.uniq.sort
|
||||
end
|
||||
private :clean_exposed_ports
|
||||
|
||||
def valid_test_setup?
|
||||
if test_command? ^ testing_framework?
|
||||
errors.add(:test_command,
|
||||
@ -51,10 +78,12 @@ class ExecutionEnvironment < ApplicationRecord
|
||||
private :validate_docker_image?
|
||||
|
||||
def working_docker_image?
|
||||
DockerClient.pull(docker_image) if DockerClient.find_image_by_tag(docker_image).present?
|
||||
output = DockerClient.new(execution_environment: self).execute_arbitrary_command(VALIDATION_COMMAND)
|
||||
runner = Runner.for(author, self)
|
||||
output = runner.execute_command(VALIDATION_COMMAND)
|
||||
errors.add(:docker_image, "error: #{output[:stderr]}") if output[:stderr].present?
|
||||
rescue DockerClient::Error => e
|
||||
rescue Runner::Error::NotAvailable => e
|
||||
Rails.logger.info("The Docker image could not be verified: #{e}")
|
||||
rescue Runner::Error => e
|
||||
errors.add(:docker_image, "error: #{e}")
|
||||
end
|
||||
private :working_docker_image?
|
||||
|
155
app/models/runner.rb
Normal file
155
app/models/runner.rb
Normal file
@ -0,0 +1,155 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Runner < ApplicationRecord
|
||||
belongs_to :execution_environment
|
||||
belongs_to :user, polymorphic: true
|
||||
|
||||
before_validation :request_id
|
||||
|
||||
validates :execution_environment, :user, :runner_id, presence: true
|
||||
|
||||
attr_accessor :strategy
|
||||
|
||||
def self.strategy_class
|
||||
@strategy_class ||= if Runner.management_active?
|
||||
strategy_name = CodeOcean::Config.new(:code_ocean).read[:runner_management][:strategy]
|
||||
"runner/strategy/#{strategy_name}".camelize.constantize
|
||||
else
|
||||
Runner::Strategy::Null
|
||||
end
|
||||
end
|
||||
|
||||
def self.management_active?
|
||||
@management_active ||= begin
|
||||
runner_management = CodeOcean::Config.new(:code_ocean).read[:runner_management]
|
||||
if runner_management
|
||||
runner_management[:enabled]
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.for(user, execution_environment)
|
||||
runner = find_by(user: user, execution_environment: execution_environment)
|
||||
if runner.nil?
|
||||
runner = Runner.create(user: user, execution_environment: execution_environment)
|
||||
# The `strategy` is added through the before_validation hook `:request_id`.
|
||||
raise Runner::Error::Unknown.new("Runner could not be saved: #{runner.errors.inspect}") unless runner.persisted?
|
||||
else
|
||||
# This information is required but not persisted in the runner model.
|
||||
runner.strategy = strategy_class.new(runner.runner_id, runner.execution_environment)
|
||||
end
|
||||
|
||||
runner
|
||||
end
|
||||
|
||||
def copy_files(files)
|
||||
@strategy.copy_files(files)
|
||||
rescue Runner::Error::RunnerNotFound
|
||||
request_new_id
|
||||
save
|
||||
@strategy.copy_files(files)
|
||||
end
|
||||
|
||||
def attach_to_execution(command, &block)
|
||||
starting_time = Time.zone.now
|
||||
begin
|
||||
# As the EventMachine reactor is probably shared with other threads, we cannot use EventMachine.run with
|
||||
# stop_event_loop to wait for the WebSocket connection to terminate. Instead we use a self built event
|
||||
# loop for that: Runner::EventLoop. The attach_to_execution method of the strategy is responsible for
|
||||
# initializing its Runner::Connection with the given event loop. The Runner::Connection class ensures that
|
||||
# this event loop is stopped after the socket was closed.
|
||||
event_loop = Runner::EventLoop.new
|
||||
socket = @strategy.attach_to_execution(command, event_loop, &block)
|
||||
event_loop.wait
|
||||
raise socket.error if socket.error.present?
|
||||
rescue Runner::Error => e
|
||||
e.execution_duration = Time.zone.now - starting_time
|
||||
raise
|
||||
end
|
||||
Time.zone.now - starting_time # execution duration
|
||||
end
|
||||
|
||||
def execute_command(command, raise_exception: true)
|
||||
output = {}
|
||||
stdout = +''
|
||||
stderr = +''
|
||||
try = 0
|
||||
begin
|
||||
if try.nonzero?
|
||||
request_new_id
|
||||
save
|
||||
end
|
||||
|
||||
exit_code = 1 # default to error
|
||||
execution_time = attach_to_execution(command) do |socket|
|
||||
socket.on :stderr do |data|
|
||||
stderr << data
|
||||
end
|
||||
socket.on :stdout do |data|
|
||||
stdout << data
|
||||
end
|
||||
socket.on :exit do |received_exit_code|
|
||||
exit_code = received_exit_code
|
||||
end
|
||||
end
|
||||
output.merge!(container_execution_time: execution_time, status: exit_code.zero? ? :ok : :failed)
|
||||
rescue Runner::Error::ExecutionTimeout => e
|
||||
Rails.logger.debug { "Running command `#{command}` timed out: #{e.message}" }
|
||||
output.merge!(status: :timeout, container_execution_time: e.execution_duration)
|
||||
rescue Runner::Error::RunnerNotFound => e
|
||||
Rails.logger.debug { "Running command `#{command}` failed for the first time: #{e.message}" }
|
||||
try += 1
|
||||
|
||||
if try == 1
|
||||
# Reset the variable. This is required to prevent raising an outdated exception after a successful second try
|
||||
e = nil
|
||||
retry
|
||||
end
|
||||
|
||||
Rails.logger.debug { "Running command `#{command}` failed for the second time: #{e.message}" }
|
||||
output.merge!(status: :failed, container_execution_time: e.execution_duration)
|
||||
rescue Runner::Error => e
|
||||
Rails.logger.debug { "Running command `#{command}` failed: #{e.message}" }
|
||||
output.merge!(status: :failed, container_execution_time: e.execution_duration)
|
||||
ensure
|
||||
# We forward the exception if requested
|
||||
raise e if raise_exception && defined?(e) && e.present?
|
||||
|
||||
output.merge!(stdout: stdout, stderr: stderr)
|
||||
end
|
||||
end
|
||||
|
||||
def destroy_at_management
|
||||
@strategy.destroy_at_management
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def request_id
|
||||
request_new_id if runner_id.blank?
|
||||
end
|
||||
|
||||
def request_new_id
|
||||
strategy_class = self.class.strategy_class
|
||||
begin
|
||||
self.runner_id = strategy_class.request_from_management(execution_environment)
|
||||
@strategy = strategy_class.new(runner_id, execution_environment)
|
||||
rescue Runner::Error::EnvironmentNotFound
|
||||
# Whenever the environment could not be found by the runner management, we
|
||||
# try to synchronize it and then forward a more specific error to our callee.
|
||||
if strategy_class.sync_environment(execution_environment)
|
||||
raise Runner::Error::EnvironmentNotFound.new(
|
||||
"The execution environment with id #{execution_environment.id} was not found yet by the runner management. "\
|
||||
'It has been successfully synced now so that the next request should be successful.'
|
||||
)
|
||||
else
|
||||
raise Runner::Error::EnvironmentNotFound.new(
|
||||
"The execution environment with id #{execution_environment.id} was not found by the runner management."\
|
||||
'In addition, it could not be synced so that this probably indicates a permanent error.'
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -135,4 +135,157 @@ class Submission < ApplicationRecord
|
||||
((rfc_element.comments_count < MAX_COMMENTS_ON_RECOMMENDED_RFC) && !rfc_element.question.empty?)
|
||||
end
|
||||
end
|
||||
|
||||
def calculate_score
|
||||
file_scores = nil
|
||||
# If prepared_runner raises an error, no Testrun will be created.
|
||||
prepared_runner do |runner, waiting_duration|
|
||||
file_scores = collect_files.select(&:teacher_defined_assessment?).map do |file|
|
||||
output = run_test_file file, runner, waiting_duration
|
||||
score_file(output, file)
|
||||
end
|
||||
end
|
||||
combine_file_scores(file_scores)
|
||||
end
|
||||
|
||||
def run(file, &block)
|
||||
run_command = command_for execution_environment.run_command, file.name_with_extension
|
||||
durations = {}
|
||||
prepared_runner do |runner, waiting_duration|
|
||||
durations[:execution_duration] = runner.attach_to_execution(run_command, &block)
|
||||
durations[:waiting_duration] = waiting_duration
|
||||
rescue Runner::Error => e
|
||||
e.waiting_duration = waiting_duration
|
||||
raise
|
||||
end
|
||||
durations
|
||||
end
|
||||
|
||||
def test(file)
|
||||
prepared_runner do |runner, waiting_duration|
|
||||
output = run_test_file file, runner, waiting_duration
|
||||
score_file output, file
|
||||
rescue Runner::Error => e
|
||||
e.waiting_duration = waiting_duration
|
||||
raise
|
||||
end
|
||||
end
|
||||
|
||||
def run_test_file(file, runner, waiting_duration)
|
||||
test_command = command_for execution_environment.test_command, file.name_with_extension
|
||||
result = {file_role: file.role, waiting_for_container_time: waiting_duration}
|
||||
output = runner.execute_command(test_command)
|
||||
result.merge(output)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def prepared_runner
|
||||
request_time = Time.zone.now
|
||||
begin
|
||||
runner = Runner.for(user, exercise.execution_environment)
|
||||
runner.copy_files(collect_files)
|
||||
rescue Runner::Error => e
|
||||
e.waiting_duration = Time.zone.now - request_time
|
||||
raise
|
||||
end
|
||||
waiting_duration = Time.zone.now - request_time
|
||||
yield(runner, waiting_duration)
|
||||
end
|
||||
|
||||
def command_for(template, file)
|
||||
filepath = collect_files.find {|f| f.name_with_extension == file }.filepath
|
||||
template % command_substitutions(filepath)
|
||||
end
|
||||
|
||||
def command_substitutions(filename)
|
||||
{
|
||||
class_name: File.basename(filename, File.extname(filename)).upcase_first,
|
||||
filename: filename,
|
||||
module_name: File.basename(filename, File.extname(filename)).underscore,
|
||||
}
|
||||
end
|
||||
|
||||
def score_file(output, file)
|
||||
assessor = Assessor.new(execution_environment: execution_environment)
|
||||
assessment = assessor.assess(output)
|
||||
passed = ((assessment[:passed] == assessment[:count]) and (assessment[:score]).positive?)
|
||||
testrun_output = passed ? nil : "status: #{output[:status]}\n stdout: #{output[:stdout]}\n stderr: #{output[:stderr]}"
|
||||
if testrun_output.present?
|
||||
execution_environment.error_templates.each do |template|
|
||||
pattern = Regexp.new(template.signature).freeze
|
||||
StructuredError.create_from_template(template, testrun_output, self) if pattern.match(testrun_output)
|
||||
end
|
||||
end
|
||||
testrun = Testrun.create(
|
||||
submission: self,
|
||||
cause: 'assess', # Required to differ run and assess for RfC show
|
||||
file: file, # Test file that was executed
|
||||
passed: passed,
|
||||
output: testrun_output,
|
||||
container_execution_time: output[:container_execution_time],
|
||||
waiting_for_container_time: output[:waiting_for_container_time]
|
||||
)
|
||||
|
||||
filename = file.name_with_extension
|
||||
|
||||
if file.teacher_defined_linter?
|
||||
LinterCheckRun.create_from(testrun, assessment)
|
||||
assessment = assessor.translate_linter(assessment, I18n.locale)
|
||||
|
||||
# replace file name with hint if linter is not used for grading. Refactor!
|
||||
filename = I18n.t('exercises.implement.not_graded') if file.weight.zero?
|
||||
end
|
||||
|
||||
output.merge!(assessment)
|
||||
output.merge!(filename: filename, message: feedback_message(file, output), weight: file.weight)
|
||||
end
|
||||
|
||||
def feedback_message(file, output)
|
||||
if output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_test'
|
||||
I18n.t('exercises.implement.default_test_feedback')
|
||||
elsif output[:score] == Assessor::MAXIMUM_SCORE && output[:file_role] == 'teacher_defined_linter'
|
||||
I18n.t('exercises.implement.default_linter_feedback')
|
||||
else
|
||||
# The render_markdown method from application_helper.rb is not available in model classes.
|
||||
ActionController::Base.helpers.sanitize(
|
||||
Kramdown::Document.new(file.feedback_message).to_html,
|
||||
tags: %w[strong],
|
||||
attributes: []
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def combine_file_scores(outputs)
|
||||
score = 0.0
|
||||
if outputs.present?
|
||||
outputs.each do |output|
|
||||
score += output[:score] * output[:weight] unless output.nil?
|
||||
end
|
||||
end
|
||||
update(score: score)
|
||||
if normalized_score.to_d == 1.0.to_d
|
||||
Thread.new do
|
||||
RequestForComment.where(exercise_id: exercise_id, user_id: user_id, user_type: user_type).find_each do |rfc|
|
||||
rfc.full_score_reached = true
|
||||
rfc.save
|
||||
end
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
end
|
||||
if @embed_options.present? && @embed_options[:hide_test_results] && outputs.present?
|
||||
outputs.each do |output|
|
||||
output.except!(:error_messages, :count, :failed, :filename, :message, :passed, :stderr, :stdout)
|
||||
end
|
||||
end
|
||||
|
||||
# Return all test results except for those of a linter if not allowed
|
||||
show_linter = Python20CourseWeek.show_linter? exercise
|
||||
outputs&.reject do |output|
|
||||
next if show_linter || output.blank?
|
||||
|
||||
output[:file_role] == 'teacher_defined_linter'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -2,8 +2,5 @@
|
||||
|
||||
module Admin
|
||||
class DashboardPolicy < AdminOnlyPolicy
|
||||
def dump_docker?
|
||||
admin?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -8,4 +8,8 @@ class ExecutionEnvironmentPolicy < AdminOnlyPolicy
|
||||
[:index?].each do |action|
|
||||
define_method(action) { admin? || teacher? }
|
||||
end
|
||||
|
||||
def sync_all_to_runner_management?
|
||||
admin?
|
||||
end
|
||||
end
|
||||
|
27
app/validators/array_validator.rb
Normal file
27
app/validators/array_validator.rb
Normal file
@ -0,0 +1,27 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class ArrayValidator < ActiveModel::EachValidator
|
||||
# Taken from https://gist.github.com/justalever/73a1b36df8468ec101f54381996fb9d1
|
||||
|
||||
def validate_each(record, attribute, values)
|
||||
Array(values).each do |value|
|
||||
options.each do |key, args|
|
||||
validator_options = {attributes: attribute}
|
||||
validator_options.merge!(args) if args.is_a?(Hash)
|
||||
|
||||
next if value.nil? && validator_options[:allow_nil]
|
||||
next if value.blank? && validator_options[:allow_blank]
|
||||
|
||||
validator_class_name = "#{key.to_s.camelize}Validator"
|
||||
validator_class = begin
|
||||
validator_class_name.constantize
|
||||
rescue NameError
|
||||
"ActiveModel::Validations::#{validator_class_name}".constantize
|
||||
end
|
||||
|
||||
validator = validator_class.new(validator_options)
|
||||
validator.validate_each(record, attribute, value)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
13
app/validators/code_ocean/file_name_validator.rb
Normal file
13
app/validators/code_ocean/file_name_validator.rb
Normal file
@ -0,0 +1,13 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module CodeOcean
|
||||
class FileNameValidator < ActiveModel::Validator
|
||||
def validate(record)
|
||||
existing_files = File.where(name: record.name, path: record.path, file_type_id: record.file_type_id,
|
||||
context_id: record.context_id, context_type: record.context_type).to_a
|
||||
if !existing_files.empty? && (!record.context.is_a?(Exercise) || record.context.new_record?)
|
||||
record.errors[:base] << 'Duplicate'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -13,14 +13,15 @@ div.mb-4
|
||||
= "CodeOcean Release:"
|
||||
pre = Sentry.configuration.release
|
||||
|
||||
- if DockerContainerPool.config[:active]
|
||||
- if Runner.management_active?
|
||||
div.mb-4
|
||||
= "DockerContainerPool Release:"
|
||||
pre = DockerContainerPool.dump_info['release']
|
||||
= Runner.strategy_class.name.demodulize
|
||||
=< "Release:"
|
||||
pre = Runner.strategy_class.release
|
||||
|
||||
h2 Docker
|
||||
|
||||
- if DockerContainerPool.config[:active]
|
||||
- if Runner.management_active?
|
||||
h3 = t('.current')
|
||||
.table-responsive
|
||||
table.table
|
||||
|
@ -14,12 +14,16 @@
|
||||
= f.text_field(:docker_image, class: 'alternative-input form-control', disabled: true)
|
||||
.help-block.form-text == t('.hints.docker_image')
|
||||
.form-group
|
||||
= f.label(:exposed_ports)
|
||||
= f.text_field(:exposed_ports, class: 'form-control', placeholder: '3000, 4000')
|
||||
.help-block.form-text == t('.hints.exposed_ports')
|
||||
= f.label(:exposed_ports_list)
|
||||
= f.text_field(:exposed_ports_list, class: 'form-control', placeholder: '3000, 4000', pattern: '^(\s*(\d{1,5},\s*)*(\d{1,5}\s*))?$')
|
||||
.help-block.form-text = t('.hints.exposed_ports_list')
|
||||
.form-group
|
||||
= f.label(:memory_limit)
|
||||
= f.number_field(:memory_limit, class: 'form-control', min: DockerClient::MINIMUM_MEMORY_LIMIT, value: f.object.memory_limit || DockerClient::DEFAULT_MEMORY_LIMIT)
|
||||
= f.number_field(:memory_limit, class: 'form-control', min: ExecutionEnvironment::MINIMUM_MEMORY_LIMIT, value: f.object.memory_limit || ExecutionEnvironment::DEFAULT_MEMORY_LIMIT)
|
||||
.form-group
|
||||
= f.label(:cpu_limit)
|
||||
= f.number_field(:cpu_limit, class: 'form-control', min: 1, step: 1, value: f.object.cpu_limit || ExecutionEnvironment::DEFAULT_CPU_LIMIT)
|
||||
.help-block.form-text = t('.hints.cpu_limit')
|
||||
.form-check.mb-3
|
||||
label.form-check-label
|
||||
= f.check_box(:network_enabled, class: 'form-check-input')
|
||||
|
@ -1,4 +1,9 @@
|
||||
h1 = ExecutionEnvironment.model_name.human(count: 2)
|
||||
h1.d-inline-block = ExecutionEnvironment.model_name.human(count: 2)
|
||||
|
||||
- if Runner.management_active?
|
||||
= button_to( { action: :sync_all_to_runner_management }, { form_class: 'float-right mb-2', class: 'btn btn-success' })
|
||||
i.fa.fa-upload
|
||||
= t('execution_environments.index.synchronize_all.button')
|
||||
|
||||
.table-responsive
|
||||
table.table
|
||||
@ -8,6 +13,7 @@ h1 = ExecutionEnvironment.model_name.human(count: 2)
|
||||
th = t('activerecord.attributes.execution_environment.user')
|
||||
th = t('activerecord.attributes.execution_environment.pool_size')
|
||||
th = t('activerecord.attributes.execution_environment.memory_limit')
|
||||
th = t('activerecord.attributes.execution_environment.cpu_limit')
|
||||
th = t('activerecord.attributes.execution_environment.network_enabled')
|
||||
th = t('activerecord.attributes.execution_environment.permitted_execution_time')
|
||||
th colspan=5 = t('shared.actions')
|
||||
@ -18,6 +24,7 @@ h1 = ExecutionEnvironment.model_name.human(count: 2)
|
||||
td = link_to_if(policy(execution_environment.author).show?, execution_environment.author, execution_environment.author)
|
||||
td = execution_environment.pool_size
|
||||
td = execution_environment.memory_limit
|
||||
td = execution_environment.cpu_limit
|
||||
td = symbol_for(execution_environment.network_enabled)
|
||||
td = execution_environment.permitted_execution_time
|
||||
td = link_to(t('shared.show'), execution_environment) if policy(execution_environment).show?
|
||||
|
@ -5,7 +5,7 @@ h1
|
||||
= row(label: 'execution_environment.name', value: @execution_environment.name)
|
||||
= row(label: 'execution_environment.user', value: link_to_if(policy(@execution_environment.author).show?, @execution_environment.author, @execution_environment.author))
|
||||
= row(label: 'execution_environment.file_type', value: @execution_environment.file_type.present? ? link_to(@execution_environment.file_type, @execution_environment.file_type) : nil)
|
||||
- [:docker_image, :exposed_ports, :memory_limit, :network_enabled, :permitted_execution_time, :pool_size].each do |attribute|
|
||||
- [:docker_image, :exposed_ports_list, :memory_limit, :cpu_limit, :network_enabled, :permitted_execution_time, :pool_size].each do |attribute|
|
||||
= row(label: "execution_environment.#{attribute}", value: @execution_environment.send(attribute))
|
||||
- [:run_command, :test_command].each do |attribute|
|
||||
= row(label: "execution_environment.#{attribute}") do
|
||||
|
@ -49,6 +49,9 @@ module CodeOcean
|
||||
config.after_initialize do
|
||||
# Initialize the counters according to the db
|
||||
Prometheus::Controller.initialize_metrics
|
||||
|
||||
# Initialize the runner environment
|
||||
Runner.strategy_class.initialize_environment
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -9,3 +9,10 @@ test:
|
||||
enabled: false
|
||||
prometheus_exporter:
|
||||
enabled: false
|
||||
runner_management:
|
||||
enabled: true
|
||||
strategy: poseidon
|
||||
url: https://runners.example.org
|
||||
ca_file: /example/certificates/ca.crt
|
||||
token: SECRET
|
||||
unused_runner_expiration_time: 180
|
||||
|
@ -1,33 +1,73 @@
|
||||
default: &default
|
||||
flowr:
|
||||
# When enabled, flowr can assist learners with related search results from
|
||||
# StackOverflow.com regarding exceptions that occurred during code execution.
|
||||
# The search is initiated through the learners' browser and displayed in the output pane.
|
||||
enabled: false
|
||||
# The number of search results to be displayed
|
||||
answers_per_query: 3
|
||||
|
||||
code_pilot:
|
||||
# When enabled, CodePilot can be used by learners to request individual help by a tutor
|
||||
# through a video conferencing system. Optionally, it also provides access to recordings
|
||||
# of previous sessions. Support for CodePilot is currently in beta.
|
||||
enabled: false
|
||||
# The root URL of CodePilot
|
||||
url: //localhost:3000
|
||||
|
||||
codeharbor:
|
||||
# When enabled, CodeHarbor is integrated in the teachers' view and allows importing
|
||||
# and exporting exercises from CodeOcean using the ProFormA XML format to CodeHarbor.
|
||||
enabled: false
|
||||
# The root URL of CodeHarbor
|
||||
url: https://codeharbor.openhpi.de
|
||||
|
||||
codeocean_events:
|
||||
# When enabled, learner-specific events within the editor are stored and can be used
|
||||
# as part of learning analytics. This setting enables the JavaScript event handlers.
|
||||
enabled: false
|
||||
|
||||
prometheus_exporter:
|
||||
# When enabled, a dedicated endpoint using the Prometheus format is offered and might
|
||||
# be used by a Prometheus-compatible monitoring system. Exported metrics include absolute
|
||||
# counters of all relations with specific support for Request-for-Comments.
|
||||
enabled: false
|
||||
|
||||
runner_management:
|
||||
# When enabled, CodeOcean delegates the handling and management of (containerized) runners
|
||||
# to a dedicated runner management. Otherwise, code executions are performed locally using
|
||||
# Docker and without pre-warming support (one container per execution).
|
||||
enabled: false
|
||||
# The strategy to use. Possible values are: poseidon, docker_container_pool
|
||||
strategy: poseidon
|
||||
# The root URL of the runner management to use
|
||||
# If a hostname is specified and the target host is reachable via IPv6, the WebSocket
|
||||
# connection might not use the IPv6-to-IPv4 fallback but rather fail unexpectedly.
|
||||
url: https://runners.example.org
|
||||
# The root certificate authority to trust for TLS connections to the runner management (Poseidon only)
|
||||
ca_file: /example/certificates/ca.crt
|
||||
# The authorization token for connections to the runner management (Poseidon only)
|
||||
# If TLS support is not enabled, this token is transmitted in clear text!
|
||||
token: SECRET
|
||||
# The maximum time in seconds a runner may idle at the runner management before it is removed.
|
||||
# Each begin of an interaction with the runner resets this time. Thus, this value should
|
||||
# be truly greater than any permitted execution time of an execution environment.
|
||||
unused_runner_expiration_time: 180
|
||||
|
||||
|
||||
development:
|
||||
<<: *default
|
||||
flowr:
|
||||
enabled: true
|
||||
answers_per_query: 3
|
||||
code_pilot:
|
||||
enabled: false
|
||||
url: //localhost:3000
|
||||
codeharbor:
|
||||
enabled: true
|
||||
url: https://codeharbor.openhpi.de
|
||||
prometheus_exporter:
|
||||
enabled: false
|
||||
|
||||
|
||||
production:
|
||||
<<: *default
|
||||
prometheus_exporter:
|
||||
enabled: true
|
||||
|
||||
|
||||
test:
|
||||
<<: *default
|
||||
prometheus_exporter:
|
||||
enabled: false
|
||||
|
@ -3,17 +3,14 @@ default: &default
|
||||
connection_timeout: 3
|
||||
pool:
|
||||
active: false
|
||||
location: http://localhost:7100
|
||||
ports: !ruby/range 4500..4600
|
||||
|
||||
development:
|
||||
<<: *default
|
||||
host: tcp://127.0.0.1:2376
|
||||
ws_host: ws://127.0.0.1:2376 #url to connect rails server to docker host
|
||||
ws_client_protocol: 'ws:' #set the websocket protocol to be used by the client to connect to the rails server (ws on development, wss on production)
|
||||
workspace_root: <%= Rails.root.join('tmp', 'files', Rails.env) %>
|
||||
pool:
|
||||
location: http://localhost:7100
|
||||
active: true
|
||||
refill:
|
||||
async: false
|
||||
@ -27,7 +24,6 @@ production:
|
||||
host: unix:///var/run/docker.sock
|
||||
pool:
|
||||
active: true
|
||||
location: http://localhost:3000
|
||||
refill:
|
||||
async: false
|
||||
batch_size: 8
|
||||
@ -35,14 +31,12 @@ production:
|
||||
timeout: 60
|
||||
workspace_root: <%= Rails.root.join('tmp', 'files', Rails.env) %>
|
||||
ws_host: ws://localhost:4243 #url to connect rails server to docker host
|
||||
ws_client_protocol: 'wss:' #set the websocket protocol to be used by the client to connect to the rails server (ws on development, wss on production)
|
||||
|
||||
staging:
|
||||
<<: *default
|
||||
host: unix:///var/run/docker.sock
|
||||
pool:
|
||||
active: true
|
||||
location: http://localhost:3000
|
||||
refill:
|
||||
async: false
|
||||
batch_size: 8
|
||||
@ -50,10 +44,8 @@ staging:
|
||||
timeout: 60
|
||||
workspace_root: <%= Rails.root.join('tmp', 'files', Rails.env) %>
|
||||
ws_host: ws://localhost:4243 #url to connect rails server to docker host
|
||||
ws_client_protocol: 'wss:' #set the websocket protocol to be used by the client to connect to the rails server (ws on development, wss on production)
|
||||
|
||||
test:
|
||||
<<: *default
|
||||
host: tcp://127.0.0.1:2376
|
||||
workspace_root: <%= Rails.root.join('tmp', 'files', Rails.env) %>
|
||||
ws_client_protocol: 'ws:' #set the websocket protocol to be used by the client to connect to the rails server (ws on development, wss on production)
|
||||
|
@ -3,18 +3,15 @@ default: &default
|
||||
connection_timeout: 3
|
||||
pool:
|
||||
active: false
|
||||
location: http://localhost:7100
|
||||
ports: !ruby/range 4500..4600
|
||||
|
||||
development:
|
||||
<<: *default
|
||||
host: tcp://127.0.0.1:2376
|
||||
ws_host: ws://127.0.0.1:2376 #url to connect rails server to docker host
|
||||
ws_client_protocol: 'ws:' #set the websocket protocol to be used by the client to connect to the rails server (ws on development, wss on production)
|
||||
workspace_root: <%= Rails.root.join('tmp', 'files', Rails.env) %>
|
||||
pool:
|
||||
active: true
|
||||
location: http://localhost:7100
|
||||
refill:
|
||||
async: false
|
||||
batch_size: 8
|
||||
@ -27,7 +24,6 @@ production:
|
||||
host: unix:///var/run/docker.sock
|
||||
pool:
|
||||
active: true
|
||||
location: http://localhost:7100
|
||||
refill:
|
||||
async: false
|
||||
batch_size: 8
|
||||
@ -35,14 +31,12 @@ production:
|
||||
timeout: 60
|
||||
workspace_root: <%= Rails.root.join('tmp', 'files', Rails.env) %>
|
||||
ws_host: ws://localhost:4243 #url to connect rails server to docker host
|
||||
ws_client_protocol: 'wss:' #set the websocket protocol to be used by the client to connect to the rails server (ws on development, wss on production)
|
||||
|
||||
staging:
|
||||
<<: *default
|
||||
host: unix:///var/run/docker.sock
|
||||
pool:
|
||||
active: true
|
||||
location: http://localhost:7100
|
||||
refill:
|
||||
async: false
|
||||
batch_size: 8
|
||||
@ -50,7 +44,6 @@ staging:
|
||||
timeout: 60
|
||||
workspace_root: <%= Rails.root.join('tmp', 'files', Rails.env) %>
|
||||
ws_host: ws://localhost:4243 #url to connect rails server to docker host
|
||||
ws_client_protocol: 'wss:' #set the websocket protocol to be used by the client to connect to the rails server (ws on development, wss on production)
|
||||
|
||||
test:
|
||||
<<: *default
|
||||
|
@ -66,7 +66,7 @@ Rails.application.configure do
|
||||
config.assets.quiet = true
|
||||
|
||||
# Raises error for missing translations.
|
||||
config.action_view.raise_on_missing_translations = true
|
||||
config.i18n.raise_on_missing_translations = true
|
||||
|
||||
# Annotate rendered view with file names.
|
||||
# config.action_view.annotate_rendered_view_with_filenames = true
|
||||
|
@ -32,7 +32,7 @@ Rails.application.configure do
|
||||
config.assets.raise_runtime_errors = true
|
||||
|
||||
# Raise errors for missing translations.
|
||||
config.action_view.raise_on_missing_translations = true
|
||||
config.i18n.raise_on_missing_translations = true
|
||||
|
||||
# Enable Rack::Cache to put a simple HTTP cache in front of your application
|
||||
# Add `rack-cache` to your Gemfile before enabling this.
|
||||
|
@ -1,5 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'docker_client'
|
||||
|
||||
DockerClient.initialize_environment unless Rails.env.test? && `which docker`.blank?
|
@ -10,14 +10,16 @@ de:
|
||||
execution_environment:
|
||||
docker_image: Docker-Image
|
||||
exposed_ports: Zugängliche Ports
|
||||
exposed_ports_list: Zugängliche Ports
|
||||
file_type: Standard-Dateityp
|
||||
file_type_id: Standard-Dateityp
|
||||
help: Hilfetext
|
||||
memory_limit: Speicher-Limit (in MB)
|
||||
cpu_limit: CPU-Limit (in MHz)
|
||||
network_enabled: Netzwerkzugriff
|
||||
name: Name
|
||||
permitted_execution_time: Erlaubte Ausführungszeit (in Sekunden)
|
||||
pool_size: Docker-Container-Pool-Größe
|
||||
pool_size: Prewarming-Pool-Größe
|
||||
run_command: Ausführungsbefehl
|
||||
test_command: Testbefehl
|
||||
testing_framework: Testing-Framework
|
||||
@ -238,7 +240,7 @@ de:
|
||||
show:
|
||||
current: Aktuelle Verfügbarkeit
|
||||
history: Verfügbarkeitsverlauf
|
||||
inactive: Der Container-Pool ist nicht aktiv.
|
||||
inactive: Es ist kein Runner Management aktiv.
|
||||
quantity: Verfügbare Container
|
||||
application:
|
||||
not_authorized: Sie Sind nicht berechtigt, diese Aktion auszuführen.
|
||||
@ -281,9 +283,16 @@ de:
|
||||
hints:
|
||||
command: <em>filename</em> wird automatisch durch den richtigen Dateinamen ersetzt.
|
||||
docker_image: 'Wählen Sie ein Docker-Image aus der Liste oder fügen Sie ein neues hinzu, welches über <a href="https://hub.docker.com/" target="_blank">DockerHub</a> verfügbar ist.'
|
||||
exposed_ports: Während der Ausführung sind diese Ports für den Nutzer zugänglich.
|
||||
exposed_ports_list: Während der Ausführung sind diese Ports für den Nutzer zugänglich. Die Portnummern müssen nummerisch und mit Komma voneinander getrennt sein.
|
||||
cpu_limit: Geben Sie die Mindestmenge an CPU-Anteilen an, die für jeden Runner reserviert werden soll, gemessen in MHz.
|
||||
errors:
|
||||
not_synced_to_runner_management: Die Ausführungsumgebung wurde erstellt, aber aufgrund eines Fehlers nicht zum Runnermanagement synchronisiert.
|
||||
index:
|
||||
shell: Shell
|
||||
synchronize_all:
|
||||
button: Alle synchronisieren
|
||||
success: Alle Ausführungsumgebungen wurden erfolgreich synchronisiert.
|
||||
failure: Beim Synchronisieren mindestens einer Ausführungsumgebung ist ein Fehler aufgetreten.
|
||||
shell:
|
||||
command: Befehl
|
||||
headline: Shell
|
||||
@ -397,6 +406,10 @@ de:
|
||||
hint: Hinweis
|
||||
no_files: Die Aufgabe umfasst noch keine sichtbaren Dateien.
|
||||
no_output: Die letzte Code-Ausführung terminierte am %{timestamp} ohne Ausgabe.
|
||||
no_output_exit_successful: Die letzte Code-Ausführung terminierte am %{timestamp} ohne Ausgabe and wurde erfolgreich beendet (Statuscode %{exit_code}).
|
||||
no_output_exit_failure: Die letzte Code-Ausführung terminierte am %{timestamp} ohne Ausgabe und wurde mit einem Fehler beendet (Statuscode %{exit_code}).
|
||||
exit_successful: Die letzte Code-Ausführung wurde erfolgreich beendet (Statuscode %{exit_code}).
|
||||
exit_failure: Die letzte Code-Ausführung wurde mit einem Fehler beendet (Statuscode %{exit_code}).
|
||||
no_output_yet: Bisher existiert noch keine Ausgabe.
|
||||
output: Programm-Ausgabe
|
||||
passed_tests: Erfolgreiche Tests
|
||||
|
@ -10,14 +10,16 @@ en:
|
||||
execution_environment:
|
||||
docker_image: Docker Image
|
||||
exposed_ports: Exposed Ports
|
||||
exposed_ports_list: Exposed Ports
|
||||
file_type: Default File Type
|
||||
file_type_id: Default File Type
|
||||
help: Help Text
|
||||
memory_limit: Memory Limit (in MB)
|
||||
cpu_limit: CPU Limit (in MHz)
|
||||
name: Name
|
||||
network_enabled: Network Enabled
|
||||
permitted_execution_time: Permitted Execution Time (in Seconds)
|
||||
pool_size: Docker Container Pool Size
|
||||
pool_size: Prewarming Pool Size
|
||||
run_command: Run Command
|
||||
test_command: Test Command
|
||||
testing_framework: Testing Framework
|
||||
@ -238,7 +240,7 @@ en:
|
||||
show:
|
||||
current: Current Availability
|
||||
history: Availability History
|
||||
inactive: Container pooling is not enabled.
|
||||
inactive: No runner management is currently enabled.
|
||||
quantity: Available Containers
|
||||
application:
|
||||
not_authorized: You are not authorized to perform this action.
|
||||
@ -281,9 +283,16 @@ en:
|
||||
hints:
|
||||
command: <em>filename</em> is automatically replaced with the correct filename.
|
||||
docker_image: Pick a Docker image listed above or add a new one which is available via <a href="https://hub.docker.com/" target="_blank">DockerHub</a>.
|
||||
exposed_ports: During code execution these ports are accessible for the user.
|
||||
exposed_ports_list: During code execution these ports are accessible for the user. Port numbers must be numeric and separated by a comma.
|
||||
cpu_limit: Specify the minimum amount of CPU shares to reserve for each runner, measured in MHz.
|
||||
errors:
|
||||
not_synced_to_runner_management: The execution environment was created but not synced to the runner management due to an error.
|
||||
index:
|
||||
shell: Shell
|
||||
synchronize_all:
|
||||
button: Synchronize all
|
||||
success: All execution environemnts were synchronized successfully.
|
||||
failure: At least one execution environment could not be synchronised due to an error.
|
||||
shell:
|
||||
command: Command
|
||||
headline: Shell
|
||||
@ -397,6 +406,10 @@ en:
|
||||
hint: Hint
|
||||
no_files: The exercise does not comprise visible files yet.
|
||||
no_output: The last code run finished on %{timestamp} without any output.
|
||||
no_output_exit_successful: The last code run finished on %{timestamp} without any output and exited successfully (status code %{exit_code}).
|
||||
no_output_exit_failure: The last code run finished on %{timestamp} without any output and exited with a failure (status code %{exit_code}).
|
||||
exit_successful: The last code run exited successfully (status code %{exit_code}).
|
||||
exit_failure: The last code run exited with a failure (status code %{exit_code}).
|
||||
no_output_yet: There is no output yet.
|
||||
output: Program Output
|
||||
passed_tests: Passed Tests
|
||||
|
@ -66,6 +66,8 @@ Rails.application.routes.draw do
|
||||
post 'shell', as: :execute_command, action: :execute_command
|
||||
get :statistics
|
||||
end
|
||||
|
||||
post :sync_all_to_runner_management, on: :collection
|
||||
end
|
||||
|
||||
post '/import_exercise' => 'exercises#import_exercise'
|
||||
|
@ -6,7 +6,7 @@ class AddMemoryLimitToExecutionEnvironments < ActiveRecord::Migration[4.2]
|
||||
|
||||
reversible do |direction|
|
||||
direction.up do
|
||||
ExecutionEnvironment.update(memory_limit: DockerClient::DEFAULT_MEMORY_LIMIT)
|
||||
ExecutionEnvironment.update(memory_limit: ExecutionEnvironment::DEFAULT_MEMORY_LIMIT)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -34,7 +34,7 @@ class DropErrors < ActiveRecord::Migration[5.2]
|
||||
|
||||
submissions_controller.instance_variable_set(:@raw_output, raw_output)
|
||||
submissions_controller.instance_variable_set(:@submission, submission)
|
||||
submissions_controller.extract_errors
|
||||
submissions_controller.send(:extract_errors)
|
||||
end
|
||||
|
||||
drop_table :errors
|
||||
|
13
db/migrate/20210519134938_create_runners.rb
Normal file
13
db/migrate/20210519134938_create_runners.rb
Normal file
@ -0,0 +1,13 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class CreateRunners < ActiveRecord::Migration[6.1]
|
||||
def change
|
||||
create_table :runners do |t|
|
||||
t.string :runner_id
|
||||
t.references :execution_environment
|
||||
t.references :user, polymorphic: true
|
||||
|
||||
t.timestamps
|
||||
end
|
||||
end
|
||||
end
|
@ -0,0 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class AddCpuLimitToExecutionEnvironment < ActiveRecord::Migration[6.1]
|
||||
def change
|
||||
add_column :execution_environments, :cpu_limit, :integer, null: false, default: 20
|
||||
end
|
||||
end
|
@ -0,0 +1,37 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class ChangeTypeOfExposedPortsInExecutionEnvironment < ActiveRecord::Migration[6.1]
|
||||
# rubocop:disable Rails/SkipsModelValidations:
|
||||
def up
|
||||
rename_column :execution_environments, :exposed_ports, :exposed_ports_migration
|
||||
add_column :execution_environments, :exposed_ports, :integer, array: true, default: [], nil: true
|
||||
|
||||
ExecutionEnvironment.all.each do |execution_environment|
|
||||
next if execution_environment.exposed_ports_migration.nil?
|
||||
|
||||
cleaned = execution_environment.exposed_ports_migration.scan(/\d+/)
|
||||
list = cleaned.map(&:to_i).uniq.sort
|
||||
execution_environment.update_columns(exposed_ports: list)
|
||||
end
|
||||
|
||||
remove_column :execution_environments, :exposed_ports_migration
|
||||
end
|
||||
|
||||
def down
|
||||
rename_column :execution_environments, :exposed_ports, :exposed_ports_migration
|
||||
add_column :execution_environments, :exposed_ports, :string
|
||||
|
||||
ExecutionEnvironment.all.each do |execution_environment|
|
||||
next if execution_environment.exposed_ports_migration.empty?
|
||||
|
||||
list = execution_environment.exposed_ports_migration
|
||||
if list.empty?
|
||||
execution_environment.update_columns(exposed_ports: nil)
|
||||
else
|
||||
execution_environment.update_columns(exposed_ports: list.join(','))
|
||||
end
|
||||
end
|
||||
remove_column :execution_environments, :exposed_ports_migration
|
||||
end
|
||||
# rubocop:enable Rails/SkipsModelValidations:
|
||||
end
|
16
db/schema.rb
16
db/schema.rb
@ -10,7 +10,7 @@
|
||||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema.define(version: 2021_05_12_133612) do
|
||||
ActiveRecord::Schema.define(version: 2021_06_02_071834) do
|
||||
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "pg_trgm"
|
||||
@ -104,7 +104,6 @@ ActiveRecord::Schema.define(version: 2021_05_12_133612) do
|
||||
t.string "test_command", limit: 255
|
||||
t.string "testing_framework", limit: 255
|
||||
t.text "help"
|
||||
t.string "exposed_ports", limit: 255
|
||||
t.integer "permitted_execution_time"
|
||||
t.integer "user_id"
|
||||
t.string "user_type", limit: 255
|
||||
@ -112,6 +111,8 @@ ActiveRecord::Schema.define(version: 2021_05_12_133612) do
|
||||
t.integer "file_type_id"
|
||||
t.integer "memory_limit"
|
||||
t.boolean "network_enabled"
|
||||
t.integer "cpu_limit", default: 20, null: false
|
||||
t.integer "exposed_ports", default: [], array: true
|
||||
end
|
||||
|
||||
create_table "exercise_collection_items", id: :serial, force: :cascade do |t|
|
||||
@ -339,6 +340,17 @@ ActiveRecord::Schema.define(version: 2021_05_12_133612) do
|
||||
t.index ["user_id", "user_type", "created_at"], name: "index_rfc_on_user_and_created_at", order: { created_at: :desc }
|
||||
end
|
||||
|
||||
create_table "runners", force: :cascade do |t|
|
||||
t.string "runner_id"
|
||||
t.bigint "execution_environment_id"
|
||||
t.string "user_type"
|
||||
t.bigint "user_id"
|
||||
t.datetime "created_at", precision: 6, null: false
|
||||
t.datetime "updated_at", precision: 6, null: false
|
||||
t.index ["execution_environment_id"], name: "index_runners_on_execution_environment_id"
|
||||
t.index ["user_type", "user_id"], name: "index_runners_on_user"
|
||||
end
|
||||
|
||||
create_table "searches", id: :serial, force: :cascade do |t|
|
||||
t.integer "exercise_id", null: false
|
||||
t.integer "user_id", null: false
|
||||
|
@ -1,6 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'concurrent'
|
||||
require 'pathname'
|
||||
|
||||
class DockerClient
|
||||
@ -9,10 +8,8 @@ class DockerClient
|
||||
end
|
||||
|
||||
CONTAINER_WORKSPACE_PATH = '/workspace' # '/home/python/workspace' #'/tmp/workspace'
|
||||
DEFAULT_MEMORY_LIMIT = 256
|
||||
# Ralf: I suggest to replace this with the environment variable. Ask Hauke why this is not the case!
|
||||
LOCAL_WORKSPACE_ROOT = File.expand_path(config[:workspace_root])
|
||||
MINIMUM_MEMORY_LIMIT = 4
|
||||
RECYCLE_CONTAINERS = false
|
||||
RETRY_COUNT = 2
|
||||
MINIMUM_CONTAINER_LIFETIME = 10.minutes
|
||||
@ -58,7 +55,6 @@ class DockerClient
|
||||
{
|
||||
'Image' => find_image_by_tag(execution_environment.docker_image).info['RepoTags'].first,
|
||||
'NetworkDisabled' => !execution_environment.network_enabled?,
|
||||
# DockerClient.config['allowed_cpus']
|
||||
'OpenStdin' => true,
|
||||
'StdinOnce' => true,
|
||||
# required to expose standard streams over websocket
|
||||
@ -86,7 +82,7 @@ class DockerClient
|
||||
# Headers are required by Docker
|
||||
headers = {'Origin' => 'http://localhost'}
|
||||
|
||||
socket_url = "#{DockerClient.config['ws_host']}/v1.27/containers/#{@container.id}/attach/ws?#{query_params}"
|
||||
socket_url = "#{self.class.config['ws_host']}/v1.27/containers/#{@container.id}/attach/ws?#{query_params}"
|
||||
# The ping value is measured in seconds and specifies how often a Ping frame should be sent.
|
||||
# Internally, Faye::WebSocket uses EventMachine and the ping value is used to wake the EventMachine thread
|
||||
socket = Faye::WebSocket::Client.new(socket_url, [], headers: headers, ping: 0.1)
|
||||
@ -122,13 +118,11 @@ class DockerClient
|
||||
container.start_time = Time.zone.now
|
||||
container.status = :created
|
||||
container.execution_environment = execution_environment
|
||||
container.re_use = true
|
||||
container.docker_client = new(execution_environment: execution_environment)
|
||||
|
||||
Thread.new do
|
||||
timeout = Random.rand(MINIMUM_CONTAINER_LIFETIME..MAXIMUM_CONTAINER_LIFETIME) # seconds
|
||||
sleep(timeout)
|
||||
container.re_use = false
|
||||
if container.status == :executing
|
||||
Thread.new do
|
||||
timeout = SELF_DESTROY_GRACE_PERIOD.to_i
|
||||
@ -230,7 +224,7 @@ class DockerClient
|
||||
Rails.logger.info("destroying container #{container}")
|
||||
|
||||
# Checks only if container assignment is not nil and not whether the container itself is still present.
|
||||
if container && !DockerContainerPool.config[:active]
|
||||
if container
|
||||
container.kill
|
||||
container.port_bindings.each_value {|port| PortPool.release(port) }
|
||||
begin
|
||||
@ -243,8 +237,6 @@ class DockerClient
|
||||
|
||||
# Checks only if container assignment is not nil and not whether the container itself is still present.
|
||||
container&.delete(force: true, v: true)
|
||||
elsif container
|
||||
DockerContainerPool.destroy_container(container)
|
||||
end
|
||||
rescue Docker::Error::NotFoundError => e
|
||||
Rails.logger.error("destroy_container: Rescued from Docker::Error::NotFoundError: #{e}")
|
||||
@ -264,7 +256,7 @@ class DockerClient
|
||||
def execute_command(command, before_execution_block, output_consuming_block)
|
||||
# tries ||= 0
|
||||
container_request_time = Time.zone.now
|
||||
@container = DockerContainerPool.get_container(@execution_environment)
|
||||
@container = self.class.create_container(@execution_environment)
|
||||
waiting_for_container_time = Time.zone.now - container_request_time
|
||||
if @container
|
||||
@container.status = :executing
|
||||
@ -288,7 +280,7 @@ container_execution_time: nil}
|
||||
|
||||
# called when the user clicks the "Run" button
|
||||
def open_websocket_connection(command, before_execution_block, _output_consuming_block)
|
||||
@container = DockerContainerPool.get_container(@execution_environment)
|
||||
@container = self.class.create_container(@execution_environment)
|
||||
if @container
|
||||
@container.status = :executing
|
||||
# do not use try here, directly call the passed proc and rescue from the error in order to log the problem.
|
||||
@ -354,13 +346,7 @@ container_execution_time: nil}
|
||||
# exit the timeout thread if it is still alive
|
||||
exit_thread_if_alive
|
||||
@socket.close
|
||||
# if we use pooling and recylce the containers, put it back. otherwise, destroy it.
|
||||
if DockerContainerPool.config[:active] && RECYCLE_CONTAINERS
|
||||
self.class.return_container(container,
|
||||
@execution_environment)
|
||||
else
|
||||
self.class.destroy_container(container)
|
||||
end
|
||||
self.class.destroy_container(container)
|
||||
end
|
||||
|
||||
def kill_container(container)
|
||||
@ -416,7 +402,6 @@ container_execution_time: nil}
|
||||
end
|
||||
|
||||
def self.initialize_environment
|
||||
# TODO: Move to DockerContainerPool
|
||||
raise Error.new('Docker configuration missing!') unless config[:connection_timeout] && config[:workspace_root]
|
||||
|
||||
Docker.url = config[:host] if config[:host]
|
||||
@ -449,7 +434,7 @@ container_execution_time: nil}
|
||||
end
|
||||
|
||||
def self.mapped_ports(execution_environment)
|
||||
(execution_environment.exposed_ports || '').gsub(/\s/, '').split(',').map do |port|
|
||||
execution_environment.exposed_ports.map do |port|
|
||||
["#{port}/tcp", [{'HostPort' => PortPool.available_port.to_s}]]
|
||||
end.to_h
|
||||
end
|
||||
@ -458,21 +443,6 @@ container_execution_time: nil}
|
||||
`docker pull #{docker_image}` if docker_image
|
||||
end
|
||||
|
||||
def self.return_container(container, execution_environment)
|
||||
Rails.logger.debug { "returning container #{container}" }
|
||||
begin
|
||||
clean_container_workspace(container)
|
||||
rescue Docker::Error::NotFoundError => e
|
||||
# FIXME: Create new container?
|
||||
Rails.logger.info("return_container: Rescued from Docker::Error::NotFoundError: #{e}")
|
||||
Rails.logger.info('Nothing is done here additionally. The container will be exchanged upon its next retrieval.')
|
||||
end
|
||||
DockerContainerPool.return_container(container, execution_environment)
|
||||
container.status = :available
|
||||
end
|
||||
|
||||
# private :return_container
|
||||
|
||||
def send_command(command, container)
|
||||
result = {status: :failed, stdout: '', stderr: ''}
|
||||
output = nil
|
||||
@ -492,12 +462,7 @@ container_execution_time: nil}
|
||||
result = {status: (output[2])&.zero? ? :ok : :failed, stdout: output[0].join.force_encoding('utf-8'), stderr: output[1].join.force_encoding('utf-8')}
|
||||
end
|
||||
|
||||
# if we use pooling and recylce the containers, put it back. otherwise, destroy it.
|
||||
if DockerContainerPool.config[:active] && RECYCLE_CONTAINERS
|
||||
self.class.return_container(container, @execution_environment)
|
||||
else
|
||||
self.class.destroy_container(container)
|
||||
end
|
||||
self.class.destroy_container(container)
|
||||
result
|
||||
rescue Timeout::Error
|
||||
Rails.logger.info("got timeout error for container #{container}")
|
||||
|
@ -1,7 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module DockerContainerMixin
|
||||
attr_accessor :start_time, :status, :re_use, :execution_environment, :docker_client
|
||||
attr_accessor :start_time, :status, :execution_environment, :docker_client
|
||||
|
||||
def binds
|
||||
host_config['Binds']
|
||||
|
@ -1,70 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'concurrent/future'
|
||||
require 'concurrent/timer_task'
|
||||
|
||||
class DockerContainerPool
|
||||
def self.config
|
||||
# TODO: Why erb?
|
||||
@config ||= CodeOcean::Config.new(:docker).read(erb: true)[:pool]
|
||||
end
|
||||
|
||||
def self.create_container(execution_environment)
|
||||
Rails.logger.info("trying to create container for execution environment: #{execution_environment}")
|
||||
container = DockerClient.create_container(execution_environment)
|
||||
container.status = 'available' # FIXME: String vs Symbol usage?
|
||||
# Rails.logger.debug('created container ' + container.to_s + ' for execution environment ' + execution_environment.to_s)
|
||||
container
|
||||
rescue StandardError => e
|
||||
Sentry.set_extras({container: container.inspect, execution_environment: execution_environment.inspect,
|
||||
config: config.inspect})
|
||||
Sentry.capture_exception(e)
|
||||
nil
|
||||
end
|
||||
|
||||
def self.return_container(container, execution_environment)
|
||||
Faraday.get("#{config[:location]}/docker_container_pool/return_container/#{container.id}")
|
||||
rescue StandardError => e
|
||||
Sentry.set_extras({container: container.inspect, execution_environment: execution_environment.inspect,
|
||||
config: config.inspect})
|
||||
Sentry.capture_exception(e)
|
||||
nil
|
||||
end
|
||||
|
||||
def self.get_container(execution_environment)
|
||||
# if pooling is active, do pooling, otherwise just create an container and return it
|
||||
if config[:active]
|
||||
begin
|
||||
container_id = JSON.parse(Faraday.get("#{config[:location]}/docker_container_pool/get_container/#{execution_environment.id}").body)['id']
|
||||
Docker::Container.get(container_id) if container_id.present?
|
||||
rescue StandardError => e
|
||||
Sentry.set_extras({container_id: container_id.inspect, execution_environment: execution_environment.inspect,
|
||||
config: config.inspect})
|
||||
Sentry.capture_exception(e)
|
||||
nil
|
||||
end
|
||||
else
|
||||
create_container(execution_environment)
|
||||
end
|
||||
end
|
||||
|
||||
def self.destroy_container(container)
|
||||
Faraday.get("#{config[:location]}/docker_container_pool/destroy_container/#{container.id}")
|
||||
end
|
||||
|
||||
def self.quantities
|
||||
response = JSON.parse(Faraday.get("#{config[:location]}/docker_container_pool/quantities").body)
|
||||
response.transform_keys(&:to_i)
|
||||
rescue StandardError => e
|
||||
Sentry.set_extras({response: response.inspect})
|
||||
Sentry.capture_exception(e)
|
||||
[]
|
||||
end
|
||||
|
||||
def self.dump_info
|
||||
JSON.parse(Faraday.get("#{config[:location]}/docker_container_pool/dump_info").body)
|
||||
rescue StandardError => e
|
||||
Sentry.capture_exception(e)
|
||||
nil
|
||||
end
|
||||
end
|
@ -11,6 +11,7 @@ class PyUnitAdapter < TestingFrameworkAdapter
|
||||
end
|
||||
|
||||
def parse_output(output)
|
||||
# PyUnit is expected to print test results on Stderr!
|
||||
count = COUNT_REGEXP.match(output[:stderr]).captures.first.to_i
|
||||
failures_matches = FAILURES_REGEXP.match(output[:stderr])
|
||||
failed = failures_matches ? failures_matches.captures.try(:first).to_i : 0
|
||||
|
44
lib/runner/backend-output.schema.json
Normal file
44
lib/runner/backend-output.schema.json
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/schema#",
|
||||
"title": "event",
|
||||
"type": "object",
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "exit",
|
||||
"required": true
|
||||
},
|
||||
"data": {
|
||||
"type": "integer",
|
||||
"required": true,
|
||||
"minimum": 0,
|
||||
"maximum": 255
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [ "stdout", "stderr", "error" ],
|
||||
"required": true
|
||||
},
|
||||
"data": {
|
||||
"type": "string",
|
||||
"required": true
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [ "start", "timeout" ],
|
||||
"required": true
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
]
|
||||
}
|
200
lib/runner/connection.rb
Normal file
200
lib/runner/connection.rb
Normal file
@ -0,0 +1,200 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'faye/websocket/client'
|
||||
require 'json_schemer'
|
||||
|
||||
class Runner::Connection
|
||||
# These are events for which callbacks can be registered.
|
||||
EVENTS = %i[start exit stdout stderr].freeze
|
||||
WEBSOCKET_MESSAGE_TYPES = %i[start stdout stderr error timeout exit].freeze
|
||||
BACKEND_OUTPUT_SCHEMA = JSONSchemer.schema(JSON.parse(File.read('lib/runner/backend-output.schema.json')))
|
||||
|
||||
# @!attribute start_callback
|
||||
# @!attribute exit_callback
|
||||
# @!attribute stdout_callback
|
||||
# @!attribute stderr_callback
|
||||
attr_writer :status
|
||||
attr_reader :error
|
||||
|
||||
def initialize(url, strategy, event_loop, locale = I18n.locale)
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Opening connection to #{url}" }
|
||||
@socket = Faye::WebSocket::Client.new(url, [], strategy.class.websocket_header)
|
||||
@strategy = strategy
|
||||
@status = :established
|
||||
@event_loop = event_loop
|
||||
@locale = locale
|
||||
@stdout_buffer = Buffer.new
|
||||
@stderr_buffer = Buffer.new
|
||||
|
||||
# For every event type of Faye WebSockets, the corresponding
|
||||
# RunnerConnection method starting with `on_` is called.
|
||||
%i[open message error close].each do |event_type|
|
||||
@socket.on(event_type) do |event|
|
||||
# The initial locale when establishing the connection is used for all callbacks
|
||||
I18n.with_locale(@locale) { __send__(:"on_#{event_type}", event) }
|
||||
end
|
||||
end
|
||||
|
||||
# This registers empty default callbacks.
|
||||
EVENTS.each {|event_type| instance_variable_set(:"@#{event_type}_callback", ->(e) {}) }
|
||||
@start_callback = -> {}
|
||||
# Fail if no exit status was returned.
|
||||
@exit_code = 1
|
||||
end
|
||||
|
||||
# Register a callback based on the event type received from runner management
|
||||
def on(event, &block)
|
||||
return unless EVENTS.include? event
|
||||
|
||||
instance_variable_set(:"@#{event}_callback", block)
|
||||
end
|
||||
|
||||
# Send arbitrary data in the WebSocket connection
|
||||
def send_data(raw_data)
|
||||
encoded_message = encode(raw_data)
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Sending to #{@socket.url}: #{encoded_message.inspect}" }
|
||||
@socket.send(encoded_message)
|
||||
end
|
||||
|
||||
# Close the WebSocket connection
|
||||
def close(status)
|
||||
return unless active?
|
||||
|
||||
@status = status
|
||||
@socket.close
|
||||
end
|
||||
|
||||
# Check if the WebSocket connection is currently established
|
||||
def active?
|
||||
@status == :established
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def decode(_raw_event)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def encode(_data)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def flush_buffers
|
||||
@stdout_callback.call @stdout_buffer.flush unless @stdout_buffer.empty?
|
||||
@stderr_callback.call @stderr_buffer.flush unless @stderr_buffer.empty?
|
||||
end
|
||||
|
||||
def ignored_sequence?(event_data)
|
||||
case event_data
|
||||
when /#exit\r/, /\s*{"cmd": "exit"}\r/
|
||||
# Do not forward. We will wait for the confirmed exit sent by the runner management.
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
# === WebSocket Callbacks
|
||||
# These callbacks are executed based on events indicated by Faye WebSockets and are
|
||||
# independent of the JSON specification that is used within the WebSocket once established.
|
||||
|
||||
def on_message(raw_event)
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Receiving from #{@socket.url}: #{raw_event.data.inspect}" }
|
||||
event = decode(raw_event.data)
|
||||
return unless BACKEND_OUTPUT_SCHEMA.valid?(event)
|
||||
|
||||
event = event.deep_symbolize_keys
|
||||
message_type = event[:type].to_sym
|
||||
if WEBSOCKET_MESSAGE_TYPES.include?(message_type)
|
||||
__send__("handle_#{message_type}", event)
|
||||
else
|
||||
@error = Runner::Error::UnexpectedResponse.new("Unknown WebSocket message type: #{message_type}")
|
||||
close(:error)
|
||||
end
|
||||
end
|
||||
|
||||
def on_open(_event)
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Established connection to #{@socket.url}" }
|
||||
@start_callback.call
|
||||
end
|
||||
|
||||
def on_error(event)
|
||||
# In case of an WebSocket error, the connection will be closed by Faye::WebSocket::Client automatically.
|
||||
# Thus, no further handling is required here (the user will get notified).
|
||||
Sentry.set_extras(event: event.inspect)
|
||||
Sentry.capture_message("The WebSocket connection to #{@socket.url} was closed with an error.")
|
||||
end
|
||||
|
||||
def on_close(_event)
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Closing connection to #{@socket.url} with status: #{@status}" }
|
||||
flush_buffers
|
||||
|
||||
# Depending on the status, we might want to destroy the runner at management.
|
||||
# This ensures we get a new runner on the next request.
|
||||
# All failing runs, those cancelled by the user or those hitting a timeout or error are subject to this mechanism.
|
||||
|
||||
case @status
|
||||
when :timeout
|
||||
# The runner will destroyed. For the DockerContainerPool, this mechanism is necessary.
|
||||
# However, it might not be required for Poseidon.
|
||||
@strategy.destroy_at_management
|
||||
@error = Runner::Error::ExecutionTimeout.new('Execution exceeded its time limit')
|
||||
when :terminated_by_codeocean, :terminated_by_management
|
||||
@exit_callback.call @exit_code
|
||||
when :terminated_by_client, :error
|
||||
@strategy.destroy_at_management
|
||||
else # :established
|
||||
# If the runner is killed by the DockerContainerPool after the maximum allowed time per user and
|
||||
# while the owning user is running an execution, the command execution stops and log output is incomplete.
|
||||
@strategy.destroy_at_management
|
||||
@error = Runner::Error::Unknown.new('Execution terminated with an unknown reason')
|
||||
end
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Closed connection to #{@socket.url} with status: #{@status}" }
|
||||
@event_loop.stop
|
||||
end
|
||||
|
||||
# === Message Handlers
|
||||
# Each message type indicated by the +type+ attribute in the JSON
|
||||
# sent be the runner management has a dedicated method.
|
||||
# Context:: All registered handlers are executed in the scope of
|
||||
# the bindings they had where they were registered.
|
||||
# Information not stored in the binding, such as the
|
||||
# locale or call stack are not available during execution!
|
||||
|
||||
def handle_exit(event)
|
||||
@status = :terminated_by_management
|
||||
@exit_code = event[:data]
|
||||
end
|
||||
|
||||
def handle_stdout(event)
|
||||
@stdout_buffer.store event[:data]
|
||||
@stdout_buffer.events.each do |event_data|
|
||||
@stdout_callback.call event_data unless ignored_sequence? event_data
|
||||
end
|
||||
end
|
||||
|
||||
def handle_stderr(event)
|
||||
@stderr_buffer.store event[:data]
|
||||
@stderr_buffer.events.each do |event_data|
|
||||
@stderr_callback.call event_data unless ignored_sequence? event_data
|
||||
end
|
||||
end
|
||||
|
||||
def handle_error(event)
|
||||
# In case of a (Nomad) error during execution, the runner management will notify us with an error message here.
|
||||
# This shouldn't happen to often and can be considered an internal server error by the runner management.
|
||||
# More information is available in the logs of the runner management or the orchestrator (e.g., Nomad).
|
||||
Sentry.set_extras(event: event.inspect)
|
||||
Sentry.capture_message("An error occurred during code execution while being connected to #{@socket.url}.")
|
||||
end
|
||||
|
||||
def handle_start(_event)
|
||||
# The execution just started as requested. This is an informal message and no further processing is required.
|
||||
end
|
||||
|
||||
def handle_timeout(_event)
|
||||
@status = :timeout
|
||||
# The runner management stopped the execution as the permitted execution time was exceeded.
|
||||
# We set the status here and wait for the connection to be closed (by the runner management).
|
||||
end
|
||||
end
|
104
lib/runner/connection/buffer.rb
Normal file
104
lib/runner/connection/buffer.rb
Normal file
@ -0,0 +1,104 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Runner::Connection::Buffer
|
||||
# The WebSocket connection might group multiple lines. For further processing, we require all lines
|
||||
# to be processed separately. Therefore, we split the lines by each newline character not part of an enclosed
|
||||
# substring either in single or double quotes (e.g., within a JSON). Originally, each line break consists of `\r\n`.
|
||||
# We keep the `\r` at the end of the line (keeping "empty" lines) and replace it after buffering.
|
||||
# Inspired by https://stackoverflow.com/questions/13040585/split-string-by-spaces-properly-accounting-for-quotes-and-backslashes-ruby
|
||||
SPLIT_INDIVIDUAL_LINES = Regexp.compile(/(?:"(?:\\.|[^"])*"|'(?:\\.|[^'])*'|[^\n])+/)
|
||||
|
||||
def initialize
|
||||
@global_buffer = +''
|
||||
@buffering = false
|
||||
@line_buffer = Queue.new
|
||||
super
|
||||
end
|
||||
|
||||
def store(event_data)
|
||||
# First, we append the new data to the existing `@global_buffer`.
|
||||
# Either, the `@global_buffer` is empty and this is a NO OP.
|
||||
# Or, the `@global_buffer` contains an incomplete string and thus requires the new part.
|
||||
@global_buffer += event_data
|
||||
# We process the full `@global_buffer`. Valid parts are removed from the buffer and
|
||||
# the remaining invalid parts are still stored in `@global_buffer`.
|
||||
@global_buffer = process_and_split @global_buffer
|
||||
end
|
||||
|
||||
def events
|
||||
# Return all items from `@line_buffer` in an array (which is iterable) and clear the queue
|
||||
Array.new(@line_buffer.size) { @line_buffer.pop }
|
||||
end
|
||||
|
||||
def flush
|
||||
raise Error::NotEmpty unless @line_buffer.empty?
|
||||
|
||||
remaining_buffer = @global_buffer
|
||||
@buffering = false
|
||||
@global_buffer = +''
|
||||
remaining_buffer
|
||||
end
|
||||
|
||||
def empty?
|
||||
@line_buffer.empty? && @global_buffer.empty?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def process_and_split(message_parts, stop: false)
|
||||
# We need a temporary buffer to operate on
|
||||
buffer = +''
|
||||
message_parts.scan(SPLIT_INDIVIDUAL_LINES).each do |line|
|
||||
# Same argumentation as above: We can always append (previous empty or invalid)
|
||||
buffer += line
|
||||
|
||||
if buffering_required_for? buffer
|
||||
@buffering = true
|
||||
# Check the existing substring `buffer` if it contains a valid message.
|
||||
# The remaining buffer is stored for further processing.
|
||||
buffer = process_and_split buffer, stop: true unless stop
|
||||
else
|
||||
add_to_line_buffer(buffer)
|
||||
# Clear the current buffer.
|
||||
buffer = +''
|
||||
end
|
||||
end
|
||||
# Return the remaining buffer which might become the `@global_buffer`
|
||||
buffer
|
||||
end
|
||||
|
||||
def add_to_line_buffer(message)
|
||||
@buffering = false
|
||||
@global_buffer = +''
|
||||
# For our buffering, we identified line breaks with the `\n` and removed those temporarily.
|
||||
# Thus, we now re-add the `\n` at the end of the string and remove the `\r` in the same time.
|
||||
message = message.gsub(/\r$/, "\n")
|
||||
@line_buffer.push message
|
||||
end
|
||||
|
||||
def buffering_required_for?(message)
|
||||
# First, check if the message is very short and start with {
|
||||
return true if message.size <= 5 && message.start_with?(/\s*{/)
|
||||
|
||||
invalid_json = !valid_json?(message)
|
||||
# Second, if we have the beginning of a valid command but an invalid JSON
|
||||
return true if invalid_json && message.start_with?(/\s*{"cmd/)
|
||||
# Third, global_buffer the message if it contains long messages (e.g., an image or turtle batch commands)
|
||||
return true if invalid_json && (message.include?('<img') || message.include?('"turtlebatch"'))
|
||||
|
||||
# If nothing applies, we don't want to global_buffer the current message
|
||||
false
|
||||
end
|
||||
|
||||
def currently_buffering?
|
||||
@buffering
|
||||
end
|
||||
|
||||
def valid_json?(data)
|
||||
# Try parsing the JSON. If that is successful, we have a valid JSON (otherwise not)
|
||||
JSON.parse(data)
|
||||
true
|
||||
rescue JSON::ParserError
|
||||
false
|
||||
end
|
||||
end
|
41
lib/runner/event_loop.rb
Normal file
41
lib/runner/event_loop.rb
Normal file
@ -0,0 +1,41 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# EventLoop is an abstraction around Ruby's queue so that its usage is better
|
||||
# understandable in our context.
|
||||
class Runner::EventLoop
|
||||
def initialize
|
||||
@queue = Queue.new
|
||||
ensure_event_machine
|
||||
end
|
||||
|
||||
# wait waits until another thread calls stop on this EventLoop.
|
||||
# There may only be one active wait call per loop at a time, otherwise it is not
|
||||
# deterministic which one will be unblocked if stop is called.
|
||||
def wait
|
||||
@queue.pop
|
||||
end
|
||||
|
||||
# stop unblocks the currently active wait call. If there is none, the
|
||||
# next call to wait will not be blocking.
|
||||
def stop
|
||||
@queue.push nil if @queue.empty?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# If there are multiple threads trying to connect to the WebSocket of their execution at the same time,
|
||||
# the Faye WebSocket connections will use the same reactor. We therefore only need to start an EventMachine
|
||||
# if there isn't a running reactor yet.
|
||||
# See this StackOverflow answer: https://stackoverflow.com/a/8247947
|
||||
def ensure_event_machine
|
||||
unless EventMachine.reactor_running? && EventMachine.reactor_thread.alive?
|
||||
queue = Queue.new
|
||||
Thread.new do
|
||||
EventMachine.run { queue.push nil }
|
||||
ensure
|
||||
ActiveRecord::Base.connection_pool.release_connection
|
||||
end
|
||||
queue.pop
|
||||
end
|
||||
end
|
||||
end
|
51
lib/runner/strategy.rb
Normal file
51
lib/runner/strategy.rb
Normal file
@ -0,0 +1,51 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Runner::Strategy
|
||||
def initialize(_runner_id, environment)
|
||||
@execution_environment = environment
|
||||
end
|
||||
|
||||
def self.initialize_environment
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def self.sync_environment(_environment)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def self.request_from_management(_environment)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def destroy_at_management
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def copy_files(_files)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def attach_to_execution(_command, _event_loop)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def self.available_images
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def self.config
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def self.release
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def self.pool_size
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def self.websocket_header
|
||||
raise NotImplementedError
|
||||
end
|
||||
end
|
247
lib/runner/strategy/docker_container_pool.rb
Normal file
247
lib/runner/strategy/docker_container_pool.rb
Normal file
@ -0,0 +1,247 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Runner::Strategy::DockerContainerPool < Runner::Strategy
|
||||
attr_reader :container_id, :command
|
||||
|
||||
def initialize(runner_id, _environment)
|
||||
super
|
||||
@container_id = runner_id
|
||||
end
|
||||
|
||||
def self.initialize_environment
|
||||
raise Error.new('Docker configuration missing!') unless config[:host] && config[:workspace_root]
|
||||
|
||||
FileUtils.mkdir_p(File.expand_path(config[:workspace_root]))
|
||||
end
|
||||
|
||||
def self.sync_environment(environment)
|
||||
# There is no dedicated sync mechanism yet. However, we need to emit a warning when the pool was previously
|
||||
# empty for this execution environment. In this case the validation command probably was not executed.
|
||||
return true unless environment.pool_size_previously_changed?
|
||||
|
||||
case environment.pool_size_previously_was
|
||||
when nil, 0
|
||||
false
|
||||
else
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
def self.request_from_management(environment)
|
||||
url = "#{config[:url]}/docker_container_pool/get_container/#{environment.id}"
|
||||
inactivity_timeout = [config[:unused_runner_expiration_time], environment.permitted_execution_time].max
|
||||
body = {
|
||||
inactivity_timeout: inactivity_timeout.to_i.seconds,
|
||||
}
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Requesting new runner at #{url}" }
|
||||
response = Faraday.post url, body
|
||||
|
||||
container_id = JSON.parse(response.body)['id']
|
||||
container_id.presence || raise(Runner::Error::NotAvailable.new("DockerContainerPool didn't return a container id"))
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to DockerContainerPool failed: #{e.inspect}")
|
||||
rescue JSON::ParserError => e
|
||||
raise Runner::Error::UnexpectedResponse.new("DockerContainerPool returned invalid JSON: #{e.inspect}")
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished new runner request" }
|
||||
end
|
||||
|
||||
def destroy_at_management
|
||||
url = "#{self.class.config[:url]}/docker_container_pool/destroy_container/#{container.id}"
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Destroying runner at #{url}" }
|
||||
Faraday.delete(url)
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to DockerContainerPool failed: #{e.inspect}")
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished destroying runner" }
|
||||
end
|
||||
|
||||
def copy_files(files)
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Sending files to #{local_workspace_path}" }
|
||||
FileUtils.mkdir_p(local_workspace_path)
|
||||
clean_workspace
|
||||
files.each do |file|
|
||||
if file.path.present?
|
||||
local_directory_path = local_path(file.path)
|
||||
FileUtils.mkdir_p(local_directory_path)
|
||||
end
|
||||
|
||||
local_file_path = local_path(file.filepath)
|
||||
if file.file_type.binary?
|
||||
FileUtils.cp(file.native_file.path, local_file_path)
|
||||
else
|
||||
begin
|
||||
File.open(local_file_path, 'w') {|f| f.write(file.content) }
|
||||
rescue IOError => e
|
||||
raise Runner::Error::WorkspaceError.new("Could not create file #{file.filepath}: #{e.inspect}")
|
||||
end
|
||||
end
|
||||
end
|
||||
FileUtils.chmod_R('+rwtX', local_workspace_path)
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished copying files" }
|
||||
end
|
||||
|
||||
def attach_to_execution(command, event_loop)
|
||||
reset_inactivity_timer
|
||||
|
||||
@command = command
|
||||
query_params = 'logs=0&stream=1&stderr=1&stdout=1&stdin=1'
|
||||
websocket_url = "#{self.class.config[:ws_host]}/v1.27/containers/#{container.id}/attach/ws?#{query_params}"
|
||||
|
||||
socket = Connection.new(websocket_url, self, event_loop)
|
||||
begin
|
||||
Timeout.timeout(@execution_environment.permitted_execution_time) do
|
||||
socket.send_data(command)
|
||||
yield(socket)
|
||||
event_loop.wait
|
||||
event_loop.stop
|
||||
end
|
||||
rescue Timeout::Error
|
||||
socket.close(:timeout)
|
||||
end
|
||||
socket
|
||||
end
|
||||
|
||||
def self.available_images
|
||||
url = "#{config[:url]}/docker_container_pool/available_images"
|
||||
response = Faraday.get(url)
|
||||
json = JSON.parse(response.body)
|
||||
return json if response.success?
|
||||
|
||||
raise Runner::Error::InternalServerError.new("DockerContainerPool returned: #{json['error']}")
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to DockerContainerPool failed: #{e.inspect}")
|
||||
rescue JSON::ParserError => e
|
||||
raise Runner::Error::UnexpectedResponse.new("DockerContainerPool returned invalid JSON: #{e.inspect}")
|
||||
end
|
||||
|
||||
def self.config
|
||||
@config ||= begin
|
||||
# Since the docker configuration file contains code that must be executed, we use ERB templating.
|
||||
docker_config = CodeOcean::Config.new(:docker).read(erb: true)
|
||||
codeocean_config = CodeOcean::Config.new(:code_ocean).read[:runner_management] || {}
|
||||
# All keys in `docker_config` take precedence over those in `codeocean_config`
|
||||
docker_config.merge codeocean_config
|
||||
end
|
||||
end
|
||||
|
||||
def self.release
|
||||
url = "#{config[:url]}/docker_container_pool/dump_info"
|
||||
response = Faraday.get(url)
|
||||
JSON.parse(response.body)['release']
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to DockerContainerPool failed: #{e.inspect}")
|
||||
rescue JSON::ParserError => e
|
||||
raise Runner::Error::UnexpectedResponse.new("DockerContainerPool returned invalid JSON: #{e.inspect}")
|
||||
end
|
||||
|
||||
def self.pool_size
|
||||
url = "#{config[:url]}/docker_container_pool/quantities"
|
||||
response = Faraday.get(url)
|
||||
pool_size = JSON.parse(response.body)
|
||||
pool_size.transform_keys(&:to_i)
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to DockerContainerPool failed: #{e.inspect}")
|
||||
rescue JSON::ParserError => e
|
||||
raise Runner::Error::UnexpectedResponse.new("DockerContainerPool returned invalid JSON: #{e.inspect}")
|
||||
end
|
||||
|
||||
def self.websocket_header
|
||||
# The `ping` value is measured in seconds and specifies how often a Ping frame should be sent.
|
||||
# Internally, Faye::WebSocket uses EventMachine and the `ping` value is used to wake the EventMachine thread
|
||||
{
|
||||
ping: 0.1,
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def container
|
||||
@container ||= begin
|
||||
container = Docker::Container.get(@container_id)
|
||||
raise Runner::Error::RunnerNotFound unless container.info['State']['Running']
|
||||
|
||||
container
|
||||
end
|
||||
rescue Docker::Error::NotFoundError
|
||||
raise Runner::Error::RunnerNotFound
|
||||
end
|
||||
|
||||
def local_path(path)
|
||||
unclean_path = local_workspace_path.join(path)
|
||||
clean_path = File.expand_path(unclean_path)
|
||||
unless clean_path.to_s.start_with? local_workspace_path.to_s
|
||||
raise Runner::Error::WorkspaceError.new("Local filepath #{clean_path.inspect} not allowed")
|
||||
end
|
||||
|
||||
Pathname.new(clean_path)
|
||||
end
|
||||
|
||||
def clean_workspace
|
||||
FileUtils.rm_r(local_workspace_path.children, secure: true)
|
||||
rescue Errno::ENOENT => e
|
||||
raise Runner::Error::WorkspaceError.new("The workspace directory does not exist and cannot be deleted: #{e.inspect}")
|
||||
rescue Errno::EACCES => e
|
||||
raise Runner::Error::WorkspaceError.new("Not allowed to clean workspace #{local_workspace_path}: #{e.inspect}")
|
||||
end
|
||||
|
||||
def local_workspace_path
|
||||
@local_workspace_path ||= Pathname.new(container.binds.first.split(':').first)
|
||||
end
|
||||
|
||||
def reset_inactivity_timer
|
||||
url = "#{self.class.config[:url]}/docker_container_pool/reuse_container/#{container.id}"
|
||||
inactivity_timeout = [self.class.config[:unused_runner_expiration_time], @execution_environment.permitted_execution_time].max
|
||||
body = {
|
||||
inactivity_timeout: inactivity_timeout.to_i.seconds,
|
||||
}
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Resetting inactivity timer at #{url}" }
|
||||
Faraday.post url, body
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to DockerContainerPool failed: #{e.inspect}")
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished resetting inactivity timer" }
|
||||
end
|
||||
|
||||
class Connection < Runner::Connection
|
||||
def initialize(*args)
|
||||
@stream = 'stdout'
|
||||
super
|
||||
end
|
||||
|
||||
def encode(data)
|
||||
"#{data}\n"
|
||||
end
|
||||
|
||||
def decode(event_data)
|
||||
case event_data
|
||||
when /(?<previous_data>.*?)((root|python|java|user)@#{@strategy.container_id[0..11]}|#exit|{"cmd": "exit"})/m
|
||||
# The RegEx above is used to determine unwanted output which also indicates a program termination.
|
||||
# If the RegEx matches, at least two capture groups will be created.
|
||||
# The first (called `previous_data`) contains any data before the match (including multiple lines)
|
||||
# while the second contains the unwanted output data.
|
||||
|
||||
# Assume correct termination for now and return exit code 0
|
||||
# TODO: Can we use the actual exit code here?
|
||||
@exit_code = 0
|
||||
close(:terminated_by_codeocean)
|
||||
|
||||
# The first capture group is forwarded
|
||||
{'type' => @stream, 'data' => Regexp.last_match(:previous_data)}
|
||||
when /python3.*-m\s*unittest/
|
||||
# TODO: Super dirty hack to redirect test output to stderr
|
||||
# This is only required for Python and the unittest module but must not be used with PyLint
|
||||
@stream = 'stderr'
|
||||
when /\*\*\*\*\*\*\*\*\*\*\*\*\* Module/
|
||||
# Identification of PyLint output, change stream back to stdout and return event
|
||||
@stream = 'stdout'
|
||||
{'type' => @stream, 'data' => event_data}
|
||||
when /#{Regexp.quote(@strategy.command)}/
|
||||
when /bash: cmd:canvasevent: command not found/
|
||||
else
|
||||
{'type' => @stream, 'data' => event_data}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
57
lib/runner/strategy/null.rb
Normal file
57
lib/runner/strategy/null.rb
Normal file
@ -0,0 +1,57 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# This strategy allows normal operation of CodeOcean even when the runner management is disabled.
|
||||
# However, as no command can be executed, all execution requests will fail.
|
||||
class Runner::Strategy::Null < Runner::Strategy
|
||||
def self.initialize_environment; end
|
||||
|
||||
def self.sync_environment(_environment); end
|
||||
|
||||
def self.request_from_management(_environment)
|
||||
SecureRandom.uuid
|
||||
end
|
||||
|
||||
def destroy_at_management; end
|
||||
|
||||
def copy_files(_files); end
|
||||
|
||||
def attach_to_execution(command, event_loop)
|
||||
socket = Connection.new(nil, self, event_loop)
|
||||
# We don't want to return an error if the execution environment is changed
|
||||
socket.status = :terminated_by_codeocean if command == ExecutionEnvironment::VALIDATION_COMMAND
|
||||
yield(socket)
|
||||
socket
|
||||
end
|
||||
|
||||
def self.available_images
|
||||
[]
|
||||
end
|
||||
|
||||
def self.config; end
|
||||
|
||||
def self.release
|
||||
'N/A'
|
||||
end
|
||||
|
||||
def self.pool_size
|
||||
{}
|
||||
end
|
||||
|
||||
def self.websocket_header
|
||||
{}
|
||||
end
|
||||
|
||||
class Connection < Runner::Connection
|
||||
def decode(event_data)
|
||||
event_data
|
||||
end
|
||||
|
||||
def encode(data)
|
||||
data
|
||||
end
|
||||
|
||||
def active?
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
210
lib/runner/strategy/poseidon.rb
Normal file
210
lib/runner/strategy/poseidon.rb
Normal file
@ -0,0 +1,210 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Runner::Strategy::Poseidon < Runner::Strategy
|
||||
ERRORS = %w[NOMAD_UNREACHABLE NOMAD_OVERLOAD NOMAD_INTERNAL_SERVER_ERROR UNKNOWN].freeze
|
||||
|
||||
ERRORS.each do |error|
|
||||
define_singleton_method :"error_#{error.downcase}" do
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(runner_id, _environment)
|
||||
super
|
||||
@allocation_id = runner_id
|
||||
end
|
||||
|
||||
def self.initialize_environment
|
||||
# There is no additional initialization required for Poseidon
|
||||
nil
|
||||
end
|
||||
|
||||
def self.sync_environment(environment)
|
||||
url = "#{config[:url]}/execution-environments/#{environment.id}"
|
||||
response = http_connection.put url, environment.to_json
|
||||
return true if [201, 204].include? response.status
|
||||
|
||||
Rails.logger.warn("Could not create execution environment in Poseidon, got response: #{response.as_json}")
|
||||
false
|
||||
rescue Faraday::Error => e
|
||||
Rails.logger.warn("Could not create execution environment because of Faraday error: #{e.inspect}")
|
||||
false
|
||||
end
|
||||
|
||||
def self.request_from_management(environment)
|
||||
url = "#{config[:url]}/runners"
|
||||
inactivity_timeout = [config[:unused_runner_expiration_time], environment.permitted_execution_time].max
|
||||
body = {
|
||||
executionEnvironmentId: environment.id,
|
||||
inactivityTimeout: inactivity_timeout.to_i.seconds,
|
||||
}
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Requesting new runner at #{url}" }
|
||||
response = http_connection.post url, body.to_json
|
||||
|
||||
case response.status
|
||||
when 200
|
||||
response_body = parse response
|
||||
runner_id = response_body[:runnerId]
|
||||
runner_id.presence || raise(Runner::Error::UnexpectedResponse.new('Poseidon did not send a runner id'))
|
||||
when 404
|
||||
raise Runner::Error::EnvironmentNotFound.new
|
||||
else
|
||||
handle_error response
|
||||
end
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to Poseidon failed: #{e.inspect}")
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished new runner request" }
|
||||
end
|
||||
|
||||
def destroy_at_management
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Destroying runner at #{runner_url}" }
|
||||
response = self.class.http_connection.delete runner_url
|
||||
self.class.handle_error response unless response.status == 204
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to Poseidon failed: #{e.inspect}")
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished destroying runner" }
|
||||
end
|
||||
|
||||
def copy_files(files)
|
||||
url = "#{runner_url}/files"
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Sending files to #{url}" }
|
||||
|
||||
copy = files.map do |file|
|
||||
{
|
||||
path: file.filepath,
|
||||
content: Base64.strict_encode64(file.content),
|
||||
}
|
||||
end
|
||||
|
||||
# First, clean the workspace and second, copy all files to their location.
|
||||
# This ensures that no artefacts from a previous submission remain in the workspace.
|
||||
body = {copy: copy, delete: ['./']}
|
||||
response = self.class.http_connection.patch url, body.to_json
|
||||
return if response.status == 204
|
||||
|
||||
Runner.destroy(@allocation_id) if response.status == 400
|
||||
self.class.handle_error response
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to Poseidon failed: #{e.inspect}")
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished copying files" }
|
||||
end
|
||||
|
||||
def attach_to_execution(command, event_loop)
|
||||
websocket_url = execute_command(command)
|
||||
socket = Connection.new(websocket_url, self, event_loop)
|
||||
yield(socket)
|
||||
socket
|
||||
end
|
||||
|
||||
def self.available_images
|
||||
# Images are pulled when needed for a new execution environment
|
||||
# and cleaned up automatically if no longer in use.
|
||||
# Hence, there is no additional image that we need to return
|
||||
[]
|
||||
end
|
||||
|
||||
def self.config
|
||||
@config ||= CodeOcean::Config.new(:code_ocean).read[:runner_management] || {}
|
||||
end
|
||||
|
||||
def self.release
|
||||
nil
|
||||
end
|
||||
|
||||
def self.pool_size
|
||||
{}
|
||||
end
|
||||
|
||||
def self.websocket_header
|
||||
# The `tls` option is used to customize the validation of TLS connections.
|
||||
# The `headers` option is used to pass the `Poseidon-Token` as part of the initial connection request.
|
||||
{
|
||||
tls: {root_cert_file: config[:ca_file]},
|
||||
headers: {'Poseidon-Token' => config[:token]},
|
||||
}
|
||||
end
|
||||
|
||||
def self.handle_error(response)
|
||||
case response.status
|
||||
when 400
|
||||
response_body = parse response
|
||||
raise Runner::Error::BadRequest.new(response_body[:message])
|
||||
when 401
|
||||
raise Runner::Error::Unauthorized.new('Authentication with Poseidon failed')
|
||||
when 404
|
||||
raise Runner::Error::RunnerNotFound.new
|
||||
when 500
|
||||
response_body = parse response
|
||||
error_code = response_body[:errorCode]
|
||||
if error_code == error_nomad_overload
|
||||
raise Runner::Error::NotAvailable.new("Poseidon has no runner available (#{error_code}): #{response_body[:message]}")
|
||||
else
|
||||
raise Runner::Error::InternalServerError.new("Poseidon sent #{response_body[:errorCode]}: #{response_body[:message]}")
|
||||
end
|
||||
else
|
||||
raise Runner::Error::UnexpectedResponse.new("Poseidon sent unexpected response status code #{response.status}")
|
||||
end
|
||||
end
|
||||
|
||||
def self.headers
|
||||
@headers ||= {'Content-Type' => 'application/json', 'Poseidon-Token' => config[:token]}
|
||||
end
|
||||
|
||||
def self.http_connection
|
||||
@http_connection ||= Faraday.new(ssl: {ca_file: config[:ca_file]}, headers: headers) do |faraday|
|
||||
faraday.adapter :net_http_persistent
|
||||
end
|
||||
end
|
||||
|
||||
def self.parse(response)
|
||||
JSON.parse(response.body).deep_symbolize_keys
|
||||
rescue JSON::ParserError => e
|
||||
# Poseidon should not send invalid json
|
||||
raise Runner::Error::UnexpectedResponse.new("Error parsing response from Poseidon: #{e.message}")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def execute_command(command)
|
||||
url = "#{runner_url}/execute"
|
||||
body = {command: command, timeLimit: @execution_environment.permitted_execution_time}
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Preparing command execution at #{url}: #{command}" }
|
||||
response = self.class.http_connection.post url, body.to_json
|
||||
|
||||
case response.status
|
||||
when 200
|
||||
response_body = self.class.parse response
|
||||
websocket_url = response_body[:websocketUrl]
|
||||
websocket_url.presence || raise(Runner::Error::UnexpectedResponse.new('Poseidon did not send a WebSocket URL'))
|
||||
when 400
|
||||
Runner.destroy(@allocation_id)
|
||||
self.class.handle_error response
|
||||
else
|
||||
self.class.handle_error response
|
||||
end
|
||||
rescue Faraday::Error => e
|
||||
raise Runner::Error::FaradayError.new("Request to Poseidon failed: #{e.inspect}")
|
||||
ensure
|
||||
Rails.logger.debug { "#{Time.zone.now.getutc.inspect}: Finished command execution preparation" }
|
||||
end
|
||||
|
||||
def runner_url
|
||||
"#{self.class.config[:url]}/runners/#{@allocation_id}"
|
||||
end
|
||||
|
||||
class Connection < Runner::Connection
|
||||
def decode(event_data)
|
||||
JSON.parse(event_data)
|
||||
rescue JSON::ParserError => e
|
||||
@error = Runner::Error::UnexpectedResponse.new("The WebSocket message from Poseidon could not be decoded to JSON: #{e.inspect}")
|
||||
close(:error)
|
||||
end
|
||||
|
||||
def encode(data)
|
||||
"#{data}\n"
|
||||
end
|
||||
end
|
||||
end
|
@ -1,39 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
class Controller < AnonymousController
|
||||
include SubmissionScoring
|
||||
end
|
||||
|
||||
describe SubmissionScoring do
|
||||
let(:controller) { Controller.new }
|
||||
let(:submission) { FactoryBot.create(:submission, cause: 'submit') }
|
||||
|
||||
before do
|
||||
controller.instance_variable_set(:@current_user, FactoryBot.create(:external_user))
|
||||
controller.instance_variable_set(:@_params, {})
|
||||
end
|
||||
|
||||
describe '#collect_test_results' do
|
||||
after { controller.send(:collect_test_results, submission) }
|
||||
|
||||
it 'executes every teacher-defined test file' do
|
||||
submission.collect_files.select(&:teacher_defined_assessment?).each do |file|
|
||||
allow(controller).to receive(:execute_test_file).with(file, submission).and_return({})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#score_submission', cleaning_strategy: :truncation do
|
||||
after { controller.score_submission(submission) }
|
||||
|
||||
it 'collects the test results' do
|
||||
allow(controller).to receive(:collect_test_results).and_return([])
|
||||
end
|
||||
|
||||
it 'assigns a score to the submissions' do
|
||||
expect(submission).to receive(:update).with(score: anything)
|
||||
end
|
||||
end
|
||||
end
|
@ -6,11 +6,12 @@ describe ExecutionEnvironmentsController do
|
||||
let(:execution_environment) { FactoryBot.create(:ruby) }
|
||||
let(:user) { FactoryBot.create(:admin) }
|
||||
|
||||
before { allow(controller).to receive(:current_user).and_return(user) }
|
||||
before do
|
||||
allow(controller).to receive(:current_user).and_return(user)
|
||||
allow(controller).to receive(:sync_to_runner_management).and_return(nil)
|
||||
end
|
||||
|
||||
describe 'POST #create' do
|
||||
before { allow(DockerClient).to receive(:image_tags).at_least(:once).and_return([]) }
|
||||
|
||||
context 'with a valid execution environment' do
|
||||
let(:perform_request) { proc { post :create, params: {execution_environment: FactoryBot.build(:ruby).attributes} } }
|
||||
|
||||
@ -23,6 +24,10 @@ describe ExecutionEnvironmentsController do
|
||||
expect { perform_request.call }.to change(ExecutionEnvironment, :count).by(1)
|
||||
end
|
||||
|
||||
it 'registers the execution environment with the runner management' do
|
||||
expect(controller).to have_received(:sync_to_runner_management)
|
||||
end
|
||||
|
||||
expect_redirect(ExecutionEnvironment.last)
|
||||
end
|
||||
|
||||
@ -32,6 +37,10 @@ describe ExecutionEnvironmentsController do
|
||||
expect_assigns(execution_environment: ExecutionEnvironment)
|
||||
expect_status(200)
|
||||
expect_template(:new)
|
||||
|
||||
it 'does not register the execution environment with the runner management' do
|
||||
expect(controller).not_to have_received(:sync_to_runner_management)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -50,7 +59,6 @@ describe ExecutionEnvironmentsController do
|
||||
|
||||
describe 'GET #edit' do
|
||||
before do
|
||||
allow(DockerClient).to receive(:image_tags).at_least(:once).and_return([])
|
||||
get :edit, params: {id: execution_environment.id}
|
||||
end
|
||||
|
||||
@ -64,12 +72,12 @@ describe ExecutionEnvironmentsController do
|
||||
let(:command) { 'which ruby' }
|
||||
|
||||
before do
|
||||
allow(DockerClient).to receive(:new).with(execution_environment: execution_environment).and_call_original
|
||||
allow_any_instance_of(DockerClient).to receive(:execute_arbitrary_command).with(command)
|
||||
runner = instance_double 'runner'
|
||||
allow(Runner).to receive(:for).with(user, execution_environment).and_return runner
|
||||
allow(runner).to receive(:execute_command).and_return({})
|
||||
post :execute_command, params: {command: command, id: execution_environment.id}
|
||||
end
|
||||
|
||||
expect_assigns(docker_client: DockerClient)
|
||||
expect_assigns(execution_environment: :execution_environment)
|
||||
expect_json
|
||||
expect_status(200)
|
||||
@ -88,7 +96,6 @@ describe ExecutionEnvironmentsController do
|
||||
|
||||
describe 'GET #new' do
|
||||
before do
|
||||
allow(DockerClient).to receive(:image_tags).at_least(:once).and_return([])
|
||||
get :new
|
||||
end
|
||||
|
||||
@ -100,11 +107,11 @@ describe ExecutionEnvironmentsController do
|
||||
|
||||
describe '#set_docker_images' do
|
||||
context 'when Docker is available' do
|
||||
let(:docker_images) { [1, 2, 3] }
|
||||
let(:docker_images) { %w[image:one image:two image:three] }
|
||||
|
||||
before do
|
||||
allow(DockerClient).to receive(:check_availability!).at_least(:once)
|
||||
allow(DockerClient).to receive(:image_tags).and_return(docker_images)
|
||||
allow(Runner).to receive(:strategy_class).and_return Runner::Strategy::DockerContainerPool
|
||||
allow(Runner::Strategy::DockerContainerPool).to receive(:available_images).and_return(docker_images)
|
||||
controller.send(:set_docker_images)
|
||||
end
|
||||
|
||||
@ -115,7 +122,8 @@ describe ExecutionEnvironmentsController do
|
||||
let(:error_message) { 'Docker is unavailable' }
|
||||
|
||||
before do
|
||||
allow(DockerClient).to receive(:check_availability!).at_least(:once).and_raise(DockerClient::Error.new(error_message))
|
||||
allow(Runner).to receive(:strategy_class).and_return Runner::Strategy::DockerContainerPool
|
||||
allow(Runner::Strategy::DockerContainerPool).to receive(:available_images).and_raise(Runner::Error::InternalServerError.new(error_message))
|
||||
controller.send(:set_docker_images)
|
||||
end
|
||||
|
||||
@ -155,13 +163,17 @@ describe ExecutionEnvironmentsController do
|
||||
describe 'PUT #update' do
|
||||
context 'with a valid execution environment' do
|
||||
before do
|
||||
allow(DockerClient).to receive(:image_tags).at_least(:once).and_return([])
|
||||
allow(controller).to receive(:sync_to_runner_management).and_return(nil)
|
||||
put :update, params: {execution_environment: FactoryBot.attributes_for(:ruby), id: execution_environment.id}
|
||||
end
|
||||
|
||||
expect_assigns(docker_images: Array)
|
||||
expect_assigns(execution_environment: ExecutionEnvironment)
|
||||
expect_redirect(:execution_environment)
|
||||
|
||||
it 'updates the execution environment at the runner management' do
|
||||
expect(controller).to have_received(:sync_to_runner_management)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with an invalid execution environment' do
|
||||
@ -170,6 +182,35 @@ describe ExecutionEnvironmentsController do
|
||||
expect_assigns(execution_environment: ExecutionEnvironment)
|
||||
expect_status(200)
|
||||
expect_template(:edit)
|
||||
|
||||
it 'does not update the execution environment at the runner management' do
|
||||
expect(controller).not_to have_received(:sync_to_runner_management)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#sync_all_to_runner_management' do
|
||||
let(:execution_environments) { FactoryBot.build_list(:ruby, 3) }
|
||||
|
||||
let(:codeocean_config) { instance_double(CodeOcean::Config) }
|
||||
let(:runner_management_config) { {runner_management: {enabled: true, strategy: :poseidon}} }
|
||||
|
||||
before do
|
||||
# Ensure to reset the memorized helper
|
||||
Runner.instance_variable_set :@strategy_class, nil
|
||||
allow(CodeOcean::Config).to receive(:new).with(:code_ocean).and_return(codeocean_config)
|
||||
allow(codeocean_config).to receive(:read).and_return(runner_management_config)
|
||||
end
|
||||
|
||||
it 'copies all execution environments to the runner management' do
|
||||
allow(ExecutionEnvironment).to receive(:all).and_return(execution_environments)
|
||||
|
||||
execution_environments.each do |execution_environment|
|
||||
allow(Runner::Strategy::Poseidon).to receive(:sync_environment).with(execution_environment).and_return(true)
|
||||
expect(Runner::Strategy::Poseidon).to receive(:sync_environment).with(execution_environment).once
|
||||
end
|
||||
|
||||
post :sync_all_to_runner_management
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -240,12 +240,31 @@ describe ExercisesController do
|
||||
let(:output) { {} }
|
||||
let(:perform_request) { post :submit, format: :json, params: {id: exercise.id, submission: {cause: 'submit', exercise_id: exercise.id}} }
|
||||
let(:user) { FactoryBot.create(:external_user) }
|
||||
let(:scoring_response) do
|
||||
[{
|
||||
status: :ok,
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
waiting_for_container_time: 0,
|
||||
container_execution_time: 0,
|
||||
file_role: 'teacher_defined_test',
|
||||
count: 1,
|
||||
failed: 0,
|
||||
error_messages: [],
|
||||
passed: 1,
|
||||
score: 1.0,
|
||||
filename: 'index.html_spec.rb',
|
||||
message: 'Well done.',
|
||||
weight: 2.0,
|
||||
}]
|
||||
end
|
||||
|
||||
before do
|
||||
FactoryBot.create(:lti_parameter, external_user: user, exercise: exercise)
|
||||
allow_any_instance_of(Submission).to receive(:normalized_score).and_return(1)
|
||||
allow(controller).to receive(:collect_test_results).and_return([{score: 1, weight: 1}])
|
||||
allow(controller).to receive(:score_submission).and_call_original
|
||||
submission = FactoryBot.build(:submission, exercise: exercise, user: user)
|
||||
allow(submission).to receive(:normalized_score).and_return(1)
|
||||
allow(submission).to receive(:calculate_score).and_return(scoring_response)
|
||||
allow(Submission).to receive(:create).and_return(submission)
|
||||
end
|
||||
|
||||
context 'when LTI outcomes are supported' do
|
||||
|
@ -154,13 +154,8 @@ describe SubmissionsController do
|
||||
let(:filename) { submission.collect_files.detect(&:main_file?).name_with_extension }
|
||||
let(:perform_request) { get :run, params: {filename: filename, id: submission.id} }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(ActionController::Live::SSE).to receive(:write).at_least(3).times
|
||||
end
|
||||
|
||||
context 'when no errors occur during execution' do
|
||||
before do
|
||||
allow_any_instance_of(DockerClient).to receive(:execute_run_command).with(submission, filename).and_return({})
|
||||
perform_request
|
||||
end
|
||||
|
||||
@ -223,60 +218,9 @@ describe SubmissionsController do
|
||||
let(:output) { {} }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(DockerClient).to receive(:execute_test_command).with(submission, filename)
|
||||
get :test, params: {filename: filename, id: submission.id}
|
||||
end
|
||||
|
||||
pending('todo')
|
||||
end
|
||||
|
||||
describe '#with_server_sent_events' do
|
||||
let(:response) { ActionDispatch::TestResponse.new }
|
||||
|
||||
before { allow(controller).to receive(:response).and_return(response) }
|
||||
|
||||
context 'when no error occurs' do
|
||||
after { controller.send(:with_server_sent_events) }
|
||||
|
||||
it 'uses server-sent events' do
|
||||
expect(ActionController::Live::SSE).to receive(:new).and_call_original
|
||||
end
|
||||
|
||||
it "writes a 'start' event" do
|
||||
allow_any_instance_of(ActionController::Live::SSE).to receive(:write)
|
||||
expect_any_instance_of(ActionController::Live::SSE).to receive(:write).with(nil, event: 'start')
|
||||
end
|
||||
|
||||
it "writes a 'close' event" do
|
||||
allow_any_instance_of(ActionController::Live::SSE).to receive(:write)
|
||||
expect_any_instance_of(ActionController::Live::SSE).to receive(:write).with({code: 200}, event: 'close')
|
||||
end
|
||||
|
||||
it 'closes the stream' do
|
||||
expect_any_instance_of(ActionController::Live::SSE).to receive(:close).and_call_original
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an error occurs' do
|
||||
after { controller.send(:with_server_sent_events) { raise } }
|
||||
|
||||
it 'uses server-sent events' do
|
||||
expect(ActionController::Live::SSE).to receive(:new).and_call_original
|
||||
end
|
||||
|
||||
it "writes a 'start' event" do
|
||||
allow_any_instance_of(ActionController::Live::SSE).to receive(:write)
|
||||
expect_any_instance_of(ActionController::Live::SSE).to receive(:write).with(nil, event: 'start')
|
||||
end
|
||||
|
||||
it "writes a 'close' event" do
|
||||
allow_any_instance_of(ActionController::Live::SSE).to receive(:write)
|
||||
expect_any_instance_of(ActionController::Live::SSE).to receive(:write).with({code: 500}, event: 'close')
|
||||
end
|
||||
|
||||
it 'closes the stream' do
|
||||
expect_any_instance_of(ActionController::Live::SSE).to receive(:close).and_call_original
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -16,9 +16,10 @@ describe 'seeds' do
|
||||
allow(ActiveRecord::Base).to receive(:establish_connection).with(:development) {
|
||||
ActiveRecord::Base.establish_connection(:test)
|
||||
}
|
||||
allow_any_instance_of(ExecutionEnvironment).to receive(:working_docker_image?).and_return true
|
||||
end
|
||||
|
||||
describe 'execute db:seed' do
|
||||
describe 'execute db:seed', cleaning_strategy: :truncation do
|
||||
it 'collects the test results' do
|
||||
expect { seed }.not_to raise_error
|
||||
end
|
||||
|
@ -4,6 +4,7 @@ FactoryBot.define do
|
||||
factory :coffee_script, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'hklement/ubuntu-coffee:latest' }
|
||||
file_type { association :dot_coffee, user: user }
|
||||
help
|
||||
@ -18,6 +19,7 @@ FactoryBot.define do
|
||||
factory :html, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'hklement/ubuntu-html:latest' }
|
||||
file_type { association :dot_html, user: user }
|
||||
help
|
||||
@ -34,6 +36,7 @@ FactoryBot.define do
|
||||
factory :java, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'openhpi/co_execenv_java:8' }
|
||||
file_type { association :dot_java, user: user }
|
||||
help
|
||||
@ -50,6 +53,7 @@ FactoryBot.define do
|
||||
factory :jruby, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'hklement/ubuntu-jruby:latest' }
|
||||
file_type { association :dot_rb, user: user }
|
||||
help
|
||||
@ -66,6 +70,7 @@ FactoryBot.define do
|
||||
factory :node_js, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'hklement/ubuntu-node:latest' }
|
||||
file_type { association :dot_js, user: user }
|
||||
help
|
||||
@ -80,6 +85,7 @@ FactoryBot.define do
|
||||
factory :python, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'openhpi/co_execenv_python:3.4' }
|
||||
file_type { association :dot_py, user: user }
|
||||
help
|
||||
@ -96,6 +102,7 @@ FactoryBot.define do
|
||||
factory :ruby, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'hklement/ubuntu-ruby:latest' }
|
||||
file_type { association :dot_rb, user: user }
|
||||
help
|
||||
@ -112,9 +119,10 @@ FactoryBot.define do
|
||||
factory :sinatra, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'hklement/ubuntu-sinatra:latest' }
|
||||
file_type { association :dot_rb, user: user }
|
||||
exposed_ports { '4567' }
|
||||
exposed_ports { [4567] }
|
||||
help
|
||||
name { 'Sinatra' }
|
||||
network_enabled { true }
|
||||
@ -129,6 +137,7 @@ FactoryBot.define do
|
||||
factory :sqlite, class: 'ExecutionEnvironment' do
|
||||
created_by_teacher
|
||||
default_memory_limit
|
||||
default_cpu_limit
|
||||
docker_image { 'hklement/ubuntu-sqlite:latest' }
|
||||
file_type { association :dot_sql, user: user }
|
||||
help
|
||||
@ -143,7 +152,11 @@ FactoryBot.define do
|
||||
end
|
||||
|
||||
trait :default_memory_limit do
|
||||
memory_limit { DockerClient::DEFAULT_MEMORY_LIMIT }
|
||||
memory_limit { ExecutionEnvironment::DEFAULT_MEMORY_LIMIT }
|
||||
end
|
||||
|
||||
trait :default_cpu_limit do
|
||||
cpu_limit { 20 }
|
||||
end
|
||||
|
||||
trait :help do
|
||||
|
10
spec/factories/runner.rb
Normal file
10
spec/factories/runner.rb
Normal file
@ -0,0 +1,10 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
# This factory does not request the runner management as the id is already provided.
|
||||
FactoryBot.define do
|
||||
factory :runner do
|
||||
runner_id { SecureRandom.uuid }
|
||||
association :execution_environment, factory: :ruby
|
||||
association :user, factory: :external_user
|
||||
end
|
||||
end
|
@ -6,7 +6,7 @@ describe 'Editor', js: true do
|
||||
let(:exercise) { FactoryBot.create(:audio_video, description: Forgery(:lorem_ipsum).sentence) }
|
||||
let(:scoring_response) do
|
||||
[{
|
||||
status: 'ok',
|
||||
status: :ok,
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
waiting_for_container_time: 0,
|
||||
@ -94,7 +94,9 @@ describe 'Editor', js: true do
|
||||
end
|
||||
|
||||
it 'contains a button for submitting the exercise' do
|
||||
allow_any_instance_of(SubmissionsController).to receive(:score_submission).and_return(scoring_response)
|
||||
submission = FactoryBot.build(:submission, user: user, exercise: exercise)
|
||||
allow(submission).to receive(:calculate_score).and_return(scoring_response)
|
||||
allow(Submission).to receive(:find).and_return(submission)
|
||||
click_button(I18n.t('exercises.editor.score'))
|
||||
expect(page).not_to have_css('#submit_outdated')
|
||||
expect(page).to have_css('#submit')
|
||||
|
@ -3,7 +3,7 @@
|
||||
require 'rails_helper'
|
||||
|
||||
describe 'Factories' do
|
||||
it 'are all valid', docker: true, permitted_execution_time: 30 do
|
||||
it 'are all valid', permitted_execution_time: 30 do
|
||||
expect { FactoryBot.lint }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
@ -10,7 +10,12 @@ describe Admin::DashboardHelper do
|
||||
end
|
||||
|
||||
describe '#docker_data' do
|
||||
before { FactoryBot.create(:ruby) }
|
||||
before do
|
||||
FactoryBot.create(:ruby)
|
||||
dcp = instance_double 'docker_container_pool'
|
||||
allow(Runner).to receive(:strategy_class).and_return dcp
|
||||
allow(dcp).to receive(:pool_size).and_return([])
|
||||
end
|
||||
|
||||
it 'contains an entry for every execution environment' do
|
||||
expect(docker_data.length).to eq(ExecutionEnvironment.count)
|
||||
@ -21,7 +26,6 @@ describe Admin::DashboardHelper do
|
||||
end
|
||||
|
||||
it 'contains the number of available containers for every execution environment' do
|
||||
expect(DockerContainerPool).to receive(:quantities).exactly(ExecutionEnvironment.count).times.and_call_original
|
||||
expect(docker_data.first).to include(:quantity)
|
||||
end
|
||||
end
|
||||
|
@ -5,7 +5,7 @@ require 'seeds_helper'
|
||||
|
||||
WORKSPACE_PATH = Rails.root.join('tmp', 'files', Rails.env, 'code_ocean_test')
|
||||
|
||||
describe DockerClient, docker: true do
|
||||
describe DockerClient do
|
||||
let(:command) { 'whoami' }
|
||||
let(:docker_client) { described_class.new(execution_environment: FactoryBot.build(:java), user: FactoryBot.build(:admin)) }
|
||||
let(:execution_environment) { FactoryBot.build(:java) }
|
||||
@ -14,6 +14,7 @@ describe DockerClient, docker: true do
|
||||
let(:workspace_path) { WORKSPACE_PATH }
|
||||
|
||||
before do
|
||||
described_class.initialize_environment
|
||||
allow(described_class).to receive(:container_creation_options).and_wrap_original do |original_method, *args, &block|
|
||||
result = original_method.call(*args, &block)
|
||||
result['NanoCPUs'] = 2 * 1_000_000_000 # CPU quota in units of 10^-9 CPUs.
|
||||
@ -70,13 +71,15 @@ describe DockerClient, docker: true do
|
||||
|
||||
it 'uses the correct Docker image' do
|
||||
expect(described_class).to receive(:find_image_by_tag).with(execution_environment.docker_image).and_call_original
|
||||
create_container
|
||||
container = create_container
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
|
||||
it 'creates a unique directory' do
|
||||
expect(described_class).to receive(:generate_local_workspace_path).and_call_original
|
||||
expect(FileUtils).to receive(:mkdir).with(kind_of(String)).and_call_original
|
||||
create_container
|
||||
container = create_container
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
|
||||
it 'creates a container' do
|
||||
@ -90,22 +93,26 @@ describe DockerClient, docker: true do
|
||||
result
|
||||
end
|
||||
expect(Docker::Container).to receive(:create).with(kind_of(Hash)).and_call_original
|
||||
create_container
|
||||
container = create_container
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
|
||||
it 'starts the container' do
|
||||
expect_any_instance_of(Docker::Container).to receive(:start).and_call_original
|
||||
create_container
|
||||
container = create_container
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
|
||||
it 'configures mapped directories' do
|
||||
expect(described_class).to receive(:mapped_directories).and_call_original
|
||||
create_container
|
||||
container = create_container
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
|
||||
it 'configures mapped ports' do
|
||||
expect(described_class).to receive(:mapped_ports).with(execution_environment).and_call_original
|
||||
create_container
|
||||
container = create_container
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
|
||||
context 'when an error occurs' do
|
||||
@ -117,7 +124,9 @@ describe DockerClient, docker: true do
|
||||
end
|
||||
|
||||
it 'retries to create a container' do
|
||||
expect(create_container).to be_a(Docker::Container)
|
||||
container = create_container
|
||||
expect(container).to be_a(Docker::Container)
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
end
|
||||
|
||||
@ -161,6 +170,7 @@ describe DockerClient, docker: true do
|
||||
end
|
||||
|
||||
describe '#create_workspace_file' do
|
||||
let(:container) { Docker::Container.send(:new, Docker::Connection.new('http://example.org', {}), 'id' => SecureRandom.hex) }
|
||||
let(:file) { FactoryBot.build(:file, content: 'puts 42') }
|
||||
let(:file_path) { File.join(workspace_path, file.name_with_extension) }
|
||||
|
||||
@ -169,7 +179,7 @@ describe DockerClient, docker: true do
|
||||
it 'creates a file' do
|
||||
expect(described_class).to receive(:local_workspace_path).at_least(:once).and_return(workspace_path)
|
||||
FileUtils.mkdir_p(workspace_path)
|
||||
docker_client.send(:create_workspace_file, container: CONTAINER, file: file)
|
||||
docker_client.send(:create_workspace_file, container: container, file: file)
|
||||
expect(File.exist?(file_path)).to be true
|
||||
expect(File.new(file_path, 'r').read).to eq(file.content)
|
||||
end
|
||||
@ -196,15 +206,17 @@ describe DockerClient, docker: true do
|
||||
end
|
||||
|
||||
it 'deletes the container' do
|
||||
expect(container).to receive(:delete).with(force: true, v: true)
|
||||
expect(container).to receive(:delete).with(force: true, v: true).and_call_original
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute_arbitrary_command' do
|
||||
let(:execute_arbitrary_command) { docker_client.execute_arbitrary_command(command) }
|
||||
|
||||
it 'takes a container from the pool' do
|
||||
expect(DockerContainerPool).to receive(:get_container).and_call_original
|
||||
after { described_class.destroy_container(docker_client.container) }
|
||||
|
||||
it 'creates a new container' do
|
||||
expect(described_class).to receive(:create_container).and_call_original
|
||||
execute_arbitrary_command
|
||||
end
|
||||
|
||||
@ -245,10 +257,13 @@ describe DockerClient, docker: true do
|
||||
describe '#execute_run_command' do
|
||||
let(:filename) { submission.exercise.files.detect {|file| file.role == 'main_file' }.name_with_extension }
|
||||
|
||||
after { docker_client.send(:execute_run_command, submission, filename) }
|
||||
after do
|
||||
docker_client.send(:execute_run_command, submission, filename)
|
||||
described_class.destroy_container(docker_client.container)
|
||||
end
|
||||
|
||||
it 'takes a container from the pool' do
|
||||
expect(DockerContainerPool).to receive(:get_container).with(submission.execution_environment).and_call_original
|
||||
it 'creates a new container' do
|
||||
expect(described_class).to receive(:create_container).with(submission.execution_environment).and_call_original
|
||||
end
|
||||
|
||||
it 'creates the workspace files' do
|
||||
@ -265,10 +280,13 @@ describe DockerClient, docker: true do
|
||||
describe '#execute_test_command' do
|
||||
let(:filename) { submission.exercise.files.detect {|file| file.role == 'teacher_defined_test' || file.role == 'teacher_defined_linter' }.name_with_extension }
|
||||
|
||||
after { docker_client.send(:execute_test_command, submission, filename) }
|
||||
after do
|
||||
docker_client.send(:execute_test_command, submission, filename)
|
||||
described_class.destroy_container(docker_client.container)
|
||||
end
|
||||
|
||||
it 'takes a container from the pool' do
|
||||
expect(DockerContainerPool).to receive(:get_container).with(submission.execution_environment).and_call_original
|
||||
it 'creates a new container' do
|
||||
expect(described_class).to receive(:create_container).with(submission.execution_environment).and_call_original
|
||||
end
|
||||
|
||||
it 'creates the workspace files' do
|
||||
@ -313,6 +331,8 @@ describe DockerClient, docker: true do
|
||||
let(:container) { described_class.create_container(execution_environment) }
|
||||
let(:local_workspace_path) { described_class.local_workspace_path(container) }
|
||||
|
||||
after { described_class.destroy_container(container) }
|
||||
|
||||
it 'returns a path' do
|
||||
expect(local_workspace_path).to be_a(Pathname)
|
||||
end
|
||||
@ -332,7 +352,7 @@ describe DockerClient, docker: true do
|
||||
|
||||
describe '.mapped_ports' do
|
||||
context 'with exposed ports' do
|
||||
before { execution_environment.exposed_ports = '3000' }
|
||||
before { execution_environment.exposed_ports = [3000] }
|
||||
|
||||
it 'returns a mapping' do
|
||||
expect(described_class.mapped_ports(execution_environment)).to be_a(Hash)
|
||||
@ -357,14 +377,17 @@ describe DockerClient, docker: true do
|
||||
let(:container) { described_class.create_container(execution_environment) }
|
||||
let(:send_command) { docker_client.send(:send_command, command, container, &block) }
|
||||
|
||||
after { send_command }
|
||||
after do
|
||||
send_command
|
||||
described_class.destroy_container(container)
|
||||
end
|
||||
|
||||
it 'limits the execution time' do
|
||||
expect(Timeout).to receive(:timeout).at_least(:once).with(kind_of(Numeric)).and_call_original
|
||||
end
|
||||
|
||||
it 'provides the command to be executed as input' do
|
||||
pending('we are currently not using any input and for output server send events instead of attach.')
|
||||
pending('we are currently not using attach but rather exec.')
|
||||
expect(container).to receive(:attach).with(stdin: kind_of(StringIO))
|
||||
end
|
||||
|
||||
|
@ -3,6 +3,8 @@
|
||||
require 'rails_helper'
|
||||
|
||||
describe DockerContainerMixin do
|
||||
let(:container) { Docker::Container.send(:new, Docker::Connection.new('http://example.org', {}), 'id' => SecureRandom.hex) }
|
||||
|
||||
describe '#binds' do
|
||||
let(:binds) { [] }
|
||||
|
||||
@ -11,8 +13,8 @@ describe DockerContainerMixin do
|
||||
end
|
||||
|
||||
it 'returns the correct information' do
|
||||
allow(CONTAINER).to receive(:json).and_return('HostConfig' => {'Binds' => binds})
|
||||
expect(CONTAINER.binds).to eq(binds)
|
||||
allow(container).to receive(:json).and_return('HostConfig' => {'Binds' => binds})
|
||||
expect(container.binds).to eq(binds)
|
||||
end
|
||||
end
|
||||
|
||||
@ -25,8 +27,8 @@ describe DockerContainerMixin do
|
||||
end
|
||||
|
||||
it 'returns the correct information' do
|
||||
allow(CONTAINER).to receive(:json).and_return('HostConfig' => {'PortBindings' => port_bindings})
|
||||
expect(CONTAINER.port_bindings).to eq(port => port)
|
||||
allow(container).to receive(:json).and_return('HostConfig' => {'PortBindings' => port_bindings})
|
||||
expect(container.port_bindings).to eq(port => port)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
271
spec/lib/runner/strategy/docker_container_pool_spec.rb
Normal file
271
spec/lib/runner/strategy/docker_container_pool_spec.rb
Normal file
@ -0,0 +1,271 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
require 'pathname'
|
||||
|
||||
describe Runner::Strategy::DockerContainerPool do
|
||||
let(:runner_id) { FactoryBot.attributes_for(:runner)[:runner_id] }
|
||||
let(:execution_environment) { FactoryBot.create :ruby }
|
||||
let(:container_pool) { described_class.new(runner_id, execution_environment) }
|
||||
let(:docker_container_pool_url) { 'http://localhost:1234' }
|
||||
let(:config) { {url: docker_container_pool_url, unused_runner_expiration_time: 180} }
|
||||
let(:container) { instance_double(Docker::Container) }
|
||||
|
||||
before do
|
||||
allow(described_class).to receive(:config).and_return(config)
|
||||
allow(container).to receive(:id).and_return(runner_id)
|
||||
end
|
||||
|
||||
# All requests handle a Faraday error the same way.
|
||||
shared_examples 'Faraday error handling' do |http_verb|
|
||||
it 'raises a runner error' do
|
||||
allow(Faraday).to receive(http_verb).and_raise(Faraday::TimeoutError)
|
||||
expect { action.call }.to raise_error(Runner::Error::FaradayError)
|
||||
end
|
||||
end
|
||||
|
||||
describe '::request_from_management' do
|
||||
let(:action) { -> { described_class.request_from_management(execution_environment) } }
|
||||
let(:response_body) { nil }
|
||||
let!(:request_runner_stub) do
|
||||
WebMock
|
||||
.stub_request(:post, "#{docker_container_pool_url}/docker_container_pool/get_container/#{execution_environment.id}")
|
||||
.to_return(body: response_body, status: 200)
|
||||
end
|
||||
|
||||
context 'when the DockerContainerPool returns an id' do
|
||||
let(:response_body) { {id: runner_id}.to_json }
|
||||
|
||||
it 'successfully requests the DockerContainerPool' do
|
||||
action.call
|
||||
expect(request_runner_stub).to have_been_requested.once
|
||||
end
|
||||
|
||||
it 'returns the received runner id' do
|
||||
id = action.call
|
||||
expect(id).to eq(runner_id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the DockerContainerPool does not return an id' do
|
||||
let(:response_body) { {}.to_json }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::NotAvailable)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the DockerContainerPool returns invalid JSON' do
|
||||
let(:response_body) { '{hello}' }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::UnexpectedResponse)
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'Faraday error handling', :post
|
||||
end
|
||||
|
||||
describe '#destroy_at_management' do
|
||||
let(:action) { -> { container_pool.destroy_at_management } }
|
||||
let!(:destroy_runner_stub) do
|
||||
WebMock
|
||||
.stub_request(:delete, "#{docker_container_pool_url}/docker_container_pool/destroy_container/#{runner_id}")
|
||||
.to_return(body: nil, status: 200)
|
||||
end
|
||||
|
||||
before { allow(container_pool).to receive(:container).and_return(container) }
|
||||
|
||||
it 'successfully requests the DockerContainerPool' do
|
||||
action.call
|
||||
expect(destroy_runner_stub).to have_been_requested.once
|
||||
end
|
||||
|
||||
include_examples 'Faraday error handling', :delete
|
||||
end
|
||||
|
||||
describe '#copy_files' do
|
||||
let(:files) { [] }
|
||||
let(:action) { -> { container_pool.copy_files(files) } }
|
||||
let(:local_path) { Pathname.new('/tmp/container20') }
|
||||
|
||||
before do
|
||||
allow(container_pool).to receive(:local_workspace_path).and_return(local_path)
|
||||
allow(container_pool).to receive(:clean_workspace)
|
||||
allow(FileUtils).to receive(:chmod_R)
|
||||
end
|
||||
|
||||
it 'creates the workspace directory' do
|
||||
expect(FileUtils).to receive(:mkdir_p).with(local_path)
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
|
||||
it 'cleans the workspace' do
|
||||
expect(container_pool).to receive(:clean_workspace)
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
|
||||
it 'sets permission bits on the workspace' do
|
||||
expect(FileUtils).to receive(:chmod_R).with('+rwtX', local_path)
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
|
||||
context 'when receiving a normal file' do
|
||||
let(:file_content) { 'print("Hello World!")' }
|
||||
let(:files) { [FactoryBot.build(:file, content: file_content)] }
|
||||
|
||||
it 'writes the file to disk' do
|
||||
file = instance_double(File)
|
||||
allow(File).to receive(:open).and_yield(file)
|
||||
expect(file).to receive(:write).with(file_content)
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
|
||||
it 'creates the file inside the workspace' do
|
||||
expect(File).to receive(:open).with(local_path.join(files.first.filepath), 'w')
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
|
||||
it 'raises an error in case of an IOError' do
|
||||
allow(File).to receive(:open).and_raise(IOError)
|
||||
expect { container_pool.copy_files(files) }.to raise_error(Runner::Error::WorkspaceError, /#{files.first.filepath}/)
|
||||
end
|
||||
|
||||
it 'does not create a directory for it' do
|
||||
expect(FileUtils).not_to receive(:mkdir_p)
|
||||
end
|
||||
|
||||
context 'when the file is inside a directory' do
|
||||
let(:directory) { 'temp/dir' }
|
||||
let(:files) { [FactoryBot.build(:file, path: directory)] }
|
||||
|
||||
before do
|
||||
allow(File).to receive(:open)
|
||||
allow(FileUtils).to receive(:mkdir_p).with(local_path)
|
||||
allow(FileUtils).to receive(:mkdir_p).with(local_path.join(directory))
|
||||
end
|
||||
|
||||
it 'cleans the directory path' do
|
||||
allow(container_pool).to receive(:local_path).and_call_original
|
||||
expect(container_pool).to receive(:local_path).with(directory).and_call_original
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
|
||||
it 'creates the directory of the file' do
|
||||
expect(FileUtils).to receive(:mkdir_p).with(local_path.join(directory))
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when receiving a binary file' do
|
||||
let(:files) { [FactoryBot.build(:file, :image)] }
|
||||
|
||||
it 'copies the file inside the workspace' do
|
||||
expect(FileUtils).to receive(:cp).with(files.first.native_file.path, local_path.join(files.first.filepath))
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when receiving multiple files' do
|
||||
let(:files) { FactoryBot.build_list(:file, 3) }
|
||||
|
||||
it 'creates all files' do
|
||||
files.each do |file|
|
||||
expect(File).to receive(:open).with(local_path.join(file.filepath), 'w')
|
||||
end
|
||||
container_pool.copy_files(files)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#local_workspace_path' do
|
||||
before { allow(container_pool).to receive(:container).and_return(container) }
|
||||
|
||||
it 'returns the local part of the mount binding' do
|
||||
local_path = 'tmp/container20'
|
||||
allow(container).to receive(:binds).and_return(["#{local_path}:/workspace"])
|
||||
expect(container_pool.send(:local_workspace_path)).to eq(Pathname.new(local_path))
|
||||
end
|
||||
end
|
||||
|
||||
describe '#local_path' do
|
||||
let(:local_workspace) { Pathname.new('/tmp/workspace') }
|
||||
|
||||
before { allow(container_pool).to receive(:local_workspace_path).and_return(local_workspace) }
|
||||
|
||||
it 'raises an error for relative paths outside of the workspace' do
|
||||
expect { container_pool.send(:local_path, '../exercise.py') }.to raise_error(Runner::Error::WorkspaceError, %r{tmp/exercise.py})
|
||||
end
|
||||
|
||||
it 'raises an error for absolute paths outside of the workspace' do
|
||||
expect { container_pool.send(:local_path, '/test') }.to raise_error(Runner::Error::WorkspaceError, %r{/test})
|
||||
end
|
||||
|
||||
it 'removes .. from the path' do
|
||||
expect(container_pool.send(:local_path, 'test/../exercise.py')).to eq(Pathname.new('/tmp/workspace/exercise.py'))
|
||||
end
|
||||
|
||||
it 'joins the given path with the local workspace path' do
|
||||
expect(container_pool.send(:local_path, 'exercise.py')).to eq(Pathname.new('/tmp/workspace/exercise.py'))
|
||||
end
|
||||
end
|
||||
|
||||
describe '#clean_workspace' do
|
||||
let(:local_workspace) { instance_double(Pathname) }
|
||||
|
||||
before { allow(container_pool).to receive(:local_workspace_path).and_return(local_workspace) }
|
||||
|
||||
it 'removes all children of the workspace recursively' do
|
||||
children = %w[test.py exercise.rb subfolder].map {|child| Pathname.new(child) }
|
||||
allow(local_workspace).to receive(:children).and_return(children)
|
||||
expect(FileUtils).to receive(:rm_r).with(children, secure: true)
|
||||
container_pool.send(:clean_workspace)
|
||||
end
|
||||
|
||||
it 'raises an error if the workspace does not exist' do
|
||||
allow(local_workspace).to receive(:children).and_raise(Errno::ENOENT)
|
||||
expect { container_pool.send(:clean_workspace) }.to raise_error(Runner::Error::WorkspaceError, /not exist/)
|
||||
end
|
||||
|
||||
it 'raises an error if it lacks permission for deleting an entry' do
|
||||
allow(local_workspace).to receive(:children).and_return(['test.py'])
|
||||
allow(FileUtils).to receive(:remove_entry_secure).and_raise(Errno::EACCES)
|
||||
expect { container_pool.send(:clean_workspace) }.to raise_error(Runner::Error::WorkspaceError, /Not allowed/)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#container' do
|
||||
it 'raises an error if there is no container for the saved id' do
|
||||
allow(Docker::Container).to receive(:get).and_raise(Docker::Error::NotFoundError)
|
||||
expect { container_pool.send(:container) }.to raise_error(Runner::Error::RunnerNotFound)
|
||||
end
|
||||
|
||||
it 'raises an error if the received container is not running' do
|
||||
allow(Docker::Container).to receive(:get).and_return(container)
|
||||
allow(container).to receive(:info).and_return({'State' => {'Running' => false}})
|
||||
expect { container_pool.send(:container) }.to raise_error(Runner::Error::RunnerNotFound)
|
||||
end
|
||||
|
||||
it 'returns the received container' do
|
||||
allow(Docker::Container).to receive(:get).and_return(container)
|
||||
allow(container).to receive(:info).and_return({'State' => {'Running' => true}})
|
||||
expect(container_pool.send(:container)).to eq(container)
|
||||
end
|
||||
|
||||
it 'does not request a container if one is saved' do
|
||||
container_pool.instance_variable_set(:@container, container)
|
||||
expect(Docker::Container).not_to receive(:get)
|
||||
container_pool.send(:container)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#attach_to_execution' do
|
||||
# TODO: add tests here
|
||||
|
||||
let(:command) { 'ls' }
|
||||
let(:event_loop) { Runner::EventLoop.new }
|
||||
let(:action) { -> { container_pool.attach_to_execution(command, event_loop) } }
|
||||
let(:websocket_url) { 'ws://ws.example.com/path/to/websocket' }
|
||||
end
|
||||
end
|
353
spec/lib/runner/strategy/poseidon_spec.rb
Normal file
353
spec/lib/runner/strategy/poseidon_spec.rb
Normal file
@ -0,0 +1,353 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
describe Runner::Strategy::Poseidon do
|
||||
let(:runner_id) { FactoryBot.attributes_for(:runner)[:runner_id] }
|
||||
let(:execution_environment) { FactoryBot.create :ruby }
|
||||
let(:poseidon) { described_class.new(runner_id, execution_environment) }
|
||||
let(:error_message) { 'test error message' }
|
||||
let(:response_body) { nil }
|
||||
|
||||
# All requests handle a BadRequest (400) response the same way.
|
||||
shared_examples 'BadRequest (400) error handling' do
|
||||
context 'when Poseidon returns BadRequest (400)' do
|
||||
let(:response_body) { {message: error_message}.to_json }
|
||||
let(:response_status) { 400 }
|
||||
|
||||
it 'raises an error' do
|
||||
allow(Runner).to receive(:destroy).with(runner_id)
|
||||
expect { action.call }.to raise_error(Runner::Error::BadRequest, /#{error_message}/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Only #copy_files and #execute_command destroy the runner locally in case
|
||||
# of a BadRequest (400) response.
|
||||
shared_examples 'BadRequest (400) destroys local runner' do
|
||||
context 'when Poseidon returns BadRequest (400)' do
|
||||
let(:response_body) { {message: error_message}.to_json }
|
||||
let(:response_status) { 400 }
|
||||
|
||||
it 'destroys the runner locally' do
|
||||
expect(Runner).to receive(:destroy).with(runner_id)
|
||||
expect { action.call }.to raise_error(Runner::Error::BadRequest)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# All requests handle a Unauthorized (401) response the same way.
|
||||
shared_examples 'Unauthorized (401) error handling' do
|
||||
context 'when Poseidon returns Unauthorized (401)' do
|
||||
let(:response_status) { 401 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::Unauthorized)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# All requests except creation handle a NotFound (404) response the same way.
|
||||
shared_examples 'NotFound (404) error handling' do
|
||||
context 'when Poseidon returns NotFound (404)' do
|
||||
let(:response_status) { 404 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::RunnerNotFound)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# All requests handle an InternalServerError (500) response the same way.
|
||||
shared_examples 'InternalServerError (500) error handling' do
|
||||
context 'when Poseidon returns InternalServerError (500)' do
|
||||
shared_examples 'InternalServerError (500) with error code' do |error_code, error_class|
|
||||
let(:response_status) { 500 }
|
||||
let(:response_body) { {message: error_message, errorCode: error_code}.to_json }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(error_class) do |error|
|
||||
expect(error.message).to match(/#{error_message}/)
|
||||
expect(error.message).to match(/#{error_code}/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when error code is nomad overload' do
|
||||
include_examples(
|
||||
'InternalServerError (500) with error code',
|
||||
described_class.error_nomad_overload, Runner::Error::NotAvailable
|
||||
)
|
||||
end
|
||||
|
||||
context 'when error code is not nomad overload' do
|
||||
include_examples(
|
||||
'InternalServerError (500) with error code',
|
||||
described_class.error_unknown, Runner::Error::InternalServerError
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# All requests handle an unknown response status the same way.
|
||||
shared_examples 'unknown response status error handling' do
|
||||
context 'when Poseidon returns an unknown response status' do
|
||||
let(:response_status) { 1337 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::UnexpectedResponse, /#{response_status}/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# All requests handle a Faraday error the same way.
|
||||
shared_examples 'Faraday error handling' do
|
||||
context 'when Faraday throws an error' do
|
||||
# The response status is not needed in this context but the describes block this context is embedded
|
||||
# into expect this variable to be set in order to properly stub requests to the runner management.
|
||||
let(:response_status) { -1 }
|
||||
|
||||
it 'raises an error' do
|
||||
faraday_connection = instance_double 'Faraday::Connection'
|
||||
allow(described_class).to receive(:http_connection).and_return(faraday_connection)
|
||||
%i[post patch delete].each {|message| allow(faraday_connection).to receive(message).and_raise(Faraday::TimeoutError) }
|
||||
expect { action.call }.to raise_error(Runner::Error::FaradayError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '::sync_environment' do
|
||||
let(:action) { -> { described_class.sync_environment(execution_environment) } }
|
||||
let(:execution_environment) { FactoryBot.create(:ruby) }
|
||||
|
||||
it 'makes the correct request to Poseidon' do
|
||||
faraday_connection = instance_double 'Faraday::Connection'
|
||||
allow(described_class).to receive(:http_connection).and_return(faraday_connection)
|
||||
allow(faraday_connection).to receive(:put).and_return(Faraday::Response.new(status: 201))
|
||||
action.call
|
||||
expect(faraday_connection).to have_received(:put) do |url, body|
|
||||
expect(url).to match(%r{execution-environments/#{execution_environment.id}\z})
|
||||
expect(body).to eq(execution_environment.to_json)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'returns true when the api request was successful' do |status|
|
||||
it "returns true on status #{status}" do
|
||||
faraday_connection = instance_double 'Faraday::Connection'
|
||||
allow(described_class).to receive(:http_connection).and_return(faraday_connection)
|
||||
allow(faraday_connection).to receive(:put).and_return(Faraday::Response.new(status: status))
|
||||
expect(action.call).to be_truthy
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'returns false when the api request failed' do |status|
|
||||
it "returns false on status #{status}" do
|
||||
faraday_connection = instance_double 'Faraday::Connection'
|
||||
allow(described_class).to receive(:http_connection).and_return(faraday_connection)
|
||||
allow(faraday_connection).to receive(:put).and_return(Faraday::Response.new(status: status))
|
||||
expect(action.call).to be_falsey
|
||||
end
|
||||
end
|
||||
|
||||
[201, 204].each do |status|
|
||||
include_examples 'returns true when the api request was successful', status
|
||||
end
|
||||
|
||||
[400, 500].each do |status|
|
||||
include_examples 'returns false when the api request failed', status
|
||||
end
|
||||
|
||||
it 'returns false if Faraday raises an error' do
|
||||
faraday_connection = instance_double 'Faraday::Connection'
|
||||
allow(described_class).to receive(:http_connection).and_return(faraday_connection)
|
||||
allow(faraday_connection).to receive(:put).and_raise(Faraday::TimeoutError)
|
||||
expect(action.call).to be_falsey
|
||||
end
|
||||
end
|
||||
|
||||
describe '::request_from_management' do
|
||||
let(:action) { -> { described_class.request_from_management(execution_environment) } }
|
||||
let!(:request_runner_stub) do
|
||||
WebMock
|
||||
.stub_request(:post, "#{described_class.config[:url]}/runners")
|
||||
.with(
|
||||
body: {
|
||||
executionEnvironmentId: execution_environment.id,
|
||||
inactivityTimeout: described_class.config[:unused_runner_expiration_time].seconds,
|
||||
},
|
||||
headers: {'Content-Type' => 'application/json'}
|
||||
)
|
||||
.to_return(body: response_body, status: response_status)
|
||||
end
|
||||
|
||||
context 'when Poseidon returns Ok (200) with an id' do
|
||||
let(:response_body) { {runnerId: runner_id}.to_json }
|
||||
let(:response_status) { 200 }
|
||||
|
||||
it 'successfully requests Poseidon' do
|
||||
action.call
|
||||
expect(request_runner_stub).to have_been_requested.once
|
||||
end
|
||||
|
||||
it 'returns the received runner id' do
|
||||
id = action.call
|
||||
expect(id).to eq(runner_id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Poseidon returns Ok (200) without an id' do
|
||||
let(:response_body) { {}.to_json }
|
||||
let(:response_status) { 200 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::UnexpectedResponse)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Poseidon returns Ok (200) with invalid JSON' do
|
||||
let(:response_body) { '{hello}' }
|
||||
let(:response_status) { 200 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::UnexpectedResponse)
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'BadRequest (400) error handling'
|
||||
include_examples 'Unauthorized (401) error handling'
|
||||
|
||||
context 'when Poseidon returns NotFound (404)' do
|
||||
let(:response_status) { 404 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::EnvironmentNotFound)
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'InternalServerError (500) error handling'
|
||||
include_examples 'unknown response status error handling'
|
||||
include_examples 'Faraday error handling'
|
||||
end
|
||||
|
||||
describe '#execute_command' do
|
||||
let(:command) { 'ls' }
|
||||
let(:action) { -> { poseidon.send(:execute_command, command) } }
|
||||
let(:websocket_url) { 'ws://ws.example.com/path/to/websocket' }
|
||||
let!(:execute_command_stub) do
|
||||
WebMock
|
||||
.stub_request(:post, "#{described_class.config[:url]}/runners/#{runner_id}/execute")
|
||||
.with(
|
||||
body: {command: command, timeLimit: execution_environment.permitted_execution_time},
|
||||
headers: {'Content-Type' => 'application/json'}
|
||||
)
|
||||
.to_return(body: response_body, status: response_status)
|
||||
end
|
||||
|
||||
context 'when Poseidon returns Ok (200) with a websocket url' do
|
||||
let(:response_status) { 200 }
|
||||
let(:response_body) { {websocketUrl: websocket_url}.to_json }
|
||||
|
||||
it 'schedules an execution in Poseidon' do
|
||||
action.call
|
||||
expect(execute_command_stub).to have_been_requested.once
|
||||
end
|
||||
|
||||
it 'returns the url' do
|
||||
url = action.call
|
||||
expect(url).to eq(websocket_url)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Poseidon returns Ok (200) without a websocket url' do
|
||||
let(:response_body) { {}.to_json }
|
||||
let(:response_status) { 200 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::UnexpectedResponse)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Poseidon returns Ok (200) with invalid JSON' do
|
||||
let(:response_body) { '{hello}' }
|
||||
let(:response_status) { 200 }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { action.call }.to raise_error(Runner::Error::UnexpectedResponse)
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'BadRequest (400) error handling'
|
||||
include_examples 'BadRequest (400) destroys local runner'
|
||||
include_examples 'Unauthorized (401) error handling'
|
||||
include_examples 'NotFound (404) error handling'
|
||||
include_examples 'InternalServerError (500) error handling'
|
||||
include_examples 'unknown response status error handling'
|
||||
include_examples 'Faraday error handling'
|
||||
end
|
||||
|
||||
describe '#destroy_at_management' do
|
||||
let(:action) { -> { poseidon.destroy_at_management } }
|
||||
let!(:destroy_stub) do
|
||||
WebMock
|
||||
.stub_request(:delete, "#{described_class.config[:url]}/runners/#{runner_id}")
|
||||
.to_return(body: response_body, status: response_status)
|
||||
end
|
||||
|
||||
context 'when Poseidon returns NoContent (204)' do
|
||||
let(:response_status) { 204 }
|
||||
|
||||
it 'deletes the runner from Poseidon' do
|
||||
action.call
|
||||
expect(destroy_stub).to have_been_requested.once
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'Unauthorized (401) error handling'
|
||||
include_examples 'NotFound (404) error handling'
|
||||
include_examples 'InternalServerError (500) error handling'
|
||||
include_examples 'unknown response status error handling'
|
||||
include_examples 'Faraday error handling'
|
||||
end
|
||||
|
||||
describe '#copy_files' do
|
||||
let(:file_content) { 'print("Hello World!")' }
|
||||
let(:file) { FactoryBot.build(:file, content: file_content) }
|
||||
let(:action) { -> { poseidon.copy_files([file]) } }
|
||||
let(:encoded_file_content) { Base64.strict_encode64(file.content) }
|
||||
let!(:copy_files_stub) do
|
||||
WebMock
|
||||
.stub_request(:patch, "#{described_class.config[:url]}/runners/#{runner_id}/files")
|
||||
.with(
|
||||
body: {copy: [{path: file.filepath, content: encoded_file_content}], delete: ['./']},
|
||||
headers: {'Content-Type' => 'application/json'}
|
||||
)
|
||||
.to_return(body: response_body, status: response_status)
|
||||
end
|
||||
|
||||
context 'when Poseidon returns NoContent (204)' do
|
||||
let(:response_status) { 204 }
|
||||
|
||||
it 'sends the files to Poseidon' do
|
||||
action.call
|
||||
expect(copy_files_stub).to have_been_requested.once
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'BadRequest (400) error handling'
|
||||
include_examples 'BadRequest (400) destroys local runner'
|
||||
include_examples 'Unauthorized (401) error handling'
|
||||
include_examples 'NotFound (404) error handling'
|
||||
include_examples 'InternalServerError (500) error handling'
|
||||
include_examples 'unknown response status error handling'
|
||||
include_examples 'Faraday error handling'
|
||||
end
|
||||
|
||||
describe '#attach_to_execution' do
|
||||
# TODO: add tests here
|
||||
|
||||
let(:command) { 'ls' }
|
||||
let(:event_loop) { Runner::EventLoop.new }
|
||||
let(:action) { -> { poseidon.attach_to_execution(command, event_loop) } }
|
||||
let(:websocket_url) { 'ws://ws.example.com/path/to/websocket' }
|
||||
end
|
||||
end
|
@ -5,10 +5,11 @@ require 'rails_helper'
|
||||
describe ExecutionEnvironment do
|
||||
let(:execution_environment) { described_class.create.tap {|execution_environment| execution_environment.update(network_enabled: nil) } }
|
||||
|
||||
it 'validates that the Docker image works', docker: true do
|
||||
it 'validates that the Docker image works' do
|
||||
allow(execution_environment).to receive(:validate_docker_image?).and_return(true)
|
||||
expect(execution_environment).to receive(:working_docker_image?)
|
||||
allow(execution_environment).to receive(:working_docker_image?).and_return(true)
|
||||
execution_environment.update(docker_image: FactoryBot.attributes_for(:ruby)[:docker_image])
|
||||
expect(execution_environment).to have_received(:working_docker_image?)
|
||||
end
|
||||
|
||||
it 'validates the presence of a Docker image name' do
|
||||
@ -16,7 +17,7 @@ describe ExecutionEnvironment do
|
||||
end
|
||||
|
||||
it 'validates the minimum value of the memory limit' do
|
||||
execution_environment.update(memory_limit: DockerClient::MINIMUM_MEMORY_LIMIT / 2)
|
||||
execution_environment.update(memory_limit: ExecutionEnvironment::MINIMUM_MEMORY_LIMIT / 2)
|
||||
expect(execution_environment.errors[:memory_limit]).to be_present
|
||||
end
|
||||
|
||||
@ -30,6 +31,21 @@ describe ExecutionEnvironment do
|
||||
expect(execution_environment.errors[:memory_limit]).to be_present
|
||||
end
|
||||
|
||||
it 'validates the minimum value of the cpu limit' do
|
||||
execution_environment.update(cpu_limit: 0)
|
||||
expect(execution_environment.errors[:cpu_limit]).to be_present
|
||||
end
|
||||
|
||||
it 'validates that cpu limit is an integer' do
|
||||
execution_environment.update(cpu_limit: Math::PI)
|
||||
expect(execution_environment.errors[:cpu_limit]).to be_present
|
||||
end
|
||||
|
||||
it 'validates the presence of a cpu limit' do
|
||||
execution_environment.update(cpu_limit: nil)
|
||||
expect(execution_environment.errors[:cpu_limit]).to be_present
|
||||
end
|
||||
|
||||
it 'validates the presence of a name' do
|
||||
expect(execution_environment.errors[:name]).to be_present
|
||||
end
|
||||
@ -69,6 +85,14 @@ describe ExecutionEnvironment do
|
||||
expect(execution_environment.errors[:user_type]).to be_present
|
||||
end
|
||||
|
||||
it 'validates the format of the exposed ports' do
|
||||
execution_environment.update(exposed_ports: '1,')
|
||||
expect(execution_environment.errors[:exposed_ports]).to be_present
|
||||
|
||||
execution_environment.update(exposed_ports: '1,a')
|
||||
expect(execution_environment.errors[:exposed_ports]).to be_present
|
||||
end
|
||||
|
||||
describe '#valid_test_setup?' do
|
||||
context 'with a test command and a testing framework' do
|
||||
before { execution_environment.update(test_command: FactoryBot.attributes_for(:ruby)[:test_command], testing_framework: FactoryBot.attributes_for(:ruby)[:testing_framework]) }
|
||||
@ -121,25 +145,29 @@ describe ExecutionEnvironment do
|
||||
end
|
||||
end
|
||||
|
||||
describe '#working_docker_image?', docker: true do
|
||||
describe '#working_docker_image?' do
|
||||
let(:working_docker_image?) { execution_environment.send(:working_docker_image?) }
|
||||
let(:runner) { instance_double 'runner' }
|
||||
|
||||
before { allow(DockerClient).to receive(:find_image_by_tag).and_return(Object.new) }
|
||||
before do
|
||||
allow(Runner).to receive(:for).with(execution_environment.author, execution_environment).and_return runner
|
||||
end
|
||||
|
||||
it 'instantiates a Docker client' do
|
||||
expect(DockerClient).to receive(:new).with(execution_environment: execution_environment).and_call_original
|
||||
allow_any_instance_of(DockerClient).to receive(:execute_arbitrary_command).and_return({})
|
||||
it 'instantiates a Runner' do
|
||||
allow(runner).to receive(:execute_command).and_return({})
|
||||
working_docker_image?
|
||||
expect(runner).to have_received(:execute_command).once
|
||||
end
|
||||
|
||||
it 'executes the validation command' do
|
||||
allow_any_instance_of(DockerClient).to receive(:execute_arbitrary_command).with(ExecutionEnvironment::VALIDATION_COMMAND).and_return({})
|
||||
allow(runner).to receive(:execute_command).and_return({})
|
||||
working_docker_image?
|
||||
expect(runner).to have_received(:execute_command).with(ExecutionEnvironment::VALIDATION_COMMAND)
|
||||
end
|
||||
|
||||
context 'when the command produces an error' do
|
||||
it 'adds an error' do
|
||||
allow_any_instance_of(DockerClient).to receive(:execute_arbitrary_command).and_return(stderr: 'command not found')
|
||||
allow(runner).to receive(:execute_command).and_return(stderr: 'command not found')
|
||||
working_docker_image?
|
||||
expect(execution_environment.errors[:docker_image]).to be_present
|
||||
end
|
||||
@ -147,10 +175,26 @@ describe ExecutionEnvironment do
|
||||
|
||||
context 'when the Docker client produces an error' do
|
||||
it 'adds an error' do
|
||||
allow_any_instance_of(DockerClient).to receive(:execute_arbitrary_command).and_raise(DockerClient::Error)
|
||||
allow(runner).to receive(:execute_command).and_raise(Runner::Error)
|
||||
working_docker_image?
|
||||
expect(execution_environment.errors[:docker_image]).to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#exposed_ports_list' do
|
||||
it 'returns an empty string if no ports are exposed' do
|
||||
execution_environment.exposed_ports = []
|
||||
expect(execution_environment.exposed_ports_list).to eq('')
|
||||
end
|
||||
|
||||
it 'returns an string with comma-separated integers representing the exposed ports' do
|
||||
execution_environment.exposed_ports = [1, 2, 3]
|
||||
expect(execution_environment.exposed_ports_list).to eq('1, 2, 3')
|
||||
|
||||
execution_environment.exposed_ports.each do |port|
|
||||
expect(execution_environment.exposed_ports_list).to include(port.to_s)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
277
spec/models/runner_spec.rb
Normal file
277
spec/models/runner_spec.rb
Normal file
@ -0,0 +1,277 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
describe Runner do
|
||||
let(:runner_id) { FactoryBot.attributes_for(:runner)[:runner_id] }
|
||||
let(:strategy_class) { described_class.strategy_class }
|
||||
let(:strategy) { instance_double(strategy_class) }
|
||||
|
||||
describe 'attribute validation' do
|
||||
let(:runner) { FactoryBot.create :runner }
|
||||
|
||||
it 'validates the presence of the runner id' do
|
||||
described_class.skip_callback(:validation, :before, :request_id)
|
||||
runner.update(runner_id: nil)
|
||||
expect(runner.errors[:runner_id]).to be_present
|
||||
described_class.set_callback(:validation, :before, :request_id)
|
||||
end
|
||||
|
||||
it 'validates the presence of an execution environment' do
|
||||
runner.update(execution_environment: nil)
|
||||
expect(runner.errors[:execution_environment]).to be_present
|
||||
end
|
||||
|
||||
it 'validates the presence of a user' do
|
||||
runner.update(user: nil)
|
||||
expect(runner.errors[:user]).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
describe '::strategy_class' do
|
||||
shared_examples 'uses the strategy defined in the constant' do |strategy, strategy_class|
|
||||
let(:codeocean_config) { instance_double(CodeOcean::Config) }
|
||||
let(:runner_management_config) { {runner_management: {enabled: true, strategy: strategy}} }
|
||||
|
||||
before do
|
||||
# Ensure to reset the memorized helper
|
||||
described_class.instance_variable_set :@strategy_class, nil
|
||||
allow(CodeOcean::Config).to receive(:new).with(:code_ocean).and_return(codeocean_config)
|
||||
allow(codeocean_config).to receive(:read).and_return(runner_management_config)
|
||||
end
|
||||
|
||||
it "uses #{strategy_class} as strategy class for constant #{strategy}" do
|
||||
expect(described_class.strategy_class).to eq(strategy_class)
|
||||
end
|
||||
end
|
||||
|
||||
available_strategies = {
|
||||
poseidon: Runner::Strategy::Poseidon,
|
||||
docker_container_pool: Runner::Strategy::DockerContainerPool,
|
||||
}
|
||||
available_strategies.each do |strategy, strategy_class|
|
||||
it_behaves_like 'uses the strategy defined in the constant', strategy, strategy_class
|
||||
end
|
||||
end
|
||||
|
||||
describe '#destroy_at_management' do
|
||||
let(:runner) { described_class.create }
|
||||
|
||||
before do
|
||||
allow(strategy_class).to receive(:request_from_management).and_return(runner_id)
|
||||
allow(strategy_class).to receive(:new).and_return(strategy)
|
||||
end
|
||||
|
||||
it 'delegates to its strategy' do
|
||||
expect(strategy).to receive(:destroy_at_management)
|
||||
runner.destroy_at_management
|
||||
end
|
||||
end
|
||||
|
||||
describe '#attach to execution' do
|
||||
let(:runner) { described_class.create }
|
||||
let(:command) { 'ls' }
|
||||
let(:event_loop) { instance_double(Runner::EventLoop) }
|
||||
let(:connection) { instance_double(Runner::Connection) }
|
||||
|
||||
before do
|
||||
allow(strategy_class).to receive(:request_from_management).and_return(runner_id)
|
||||
allow(strategy_class).to receive(:new).and_return(strategy)
|
||||
allow(event_loop).to receive(:wait)
|
||||
allow(connection).to receive(:error).and_return(nil)
|
||||
allow(Runner::EventLoop).to receive(:new).and_return(event_loop)
|
||||
allow(strategy).to receive(:attach_to_execution).and_return(connection)
|
||||
end
|
||||
|
||||
it 'delegates to its strategy' do
|
||||
expect(strategy).to receive(:attach_to_execution)
|
||||
runner.attach_to_execution(command)
|
||||
end
|
||||
|
||||
it 'returns the execution time' do
|
||||
starting_time = Time.zone.now
|
||||
execution_time = runner.attach_to_execution(command)
|
||||
test_time = Time.zone.now - starting_time
|
||||
expect(execution_time).to be_between(0.0, test_time).exclusive
|
||||
end
|
||||
|
||||
it 'blocks until the event loop is stopped' do
|
||||
allow(event_loop).to receive(:wait) { sleep(1) }
|
||||
execution_time = runner.attach_to_execution(command)
|
||||
expect(execution_time).to be > 1
|
||||
end
|
||||
|
||||
context 'when an error is returned' do
|
||||
let(:error_message) { 'timeout' }
|
||||
let(:error) { Runner::Error::ExecutionTimeout.new(error_message) }
|
||||
|
||||
before { allow(connection).to receive(:error).and_return(error) }
|
||||
|
||||
it 'raises the error' do
|
||||
expect { runner.attach_to_execution(command) }.to raise_error do |raised_error|
|
||||
expect(raised_error).to be_a(Runner::Error::ExecutionTimeout)
|
||||
expect(raised_error.message).to eq(error_message)
|
||||
end
|
||||
end
|
||||
|
||||
it 'attaches the execution time to the error' do
|
||||
starting_time = Time.zone.now
|
||||
expect { runner.attach_to_execution(command) }.to raise_error do |raised_error|
|
||||
test_time = Time.zone.now - starting_time
|
||||
expect(raised_error.execution_duration).to be_between(0.0, test_time).exclusive
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#copy_files' do
|
||||
let(:runner) { described_class.create }
|
||||
|
||||
before do
|
||||
allow(strategy_class).to receive(:request_from_management).and_return(runner_id)
|
||||
allow(strategy_class).to receive(:new).and_return(strategy)
|
||||
end
|
||||
|
||||
it 'delegates to its strategy' do
|
||||
expect(strategy).to receive(:copy_files).once
|
||||
runner.copy_files(nil)
|
||||
end
|
||||
|
||||
context 'when a RunnerNotFound exception is raised' do
|
||||
before do
|
||||
was_called = false
|
||||
allow(strategy).to receive(:copy_files) do
|
||||
unless was_called
|
||||
was_called = true
|
||||
raise Runner::Error::RunnerNotFound.new
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'requests a new id' do
|
||||
expect(runner).to receive(:request_new_id)
|
||||
runner.copy_files(nil)
|
||||
end
|
||||
|
||||
it 'calls copy_file twice' do
|
||||
# copy_files is called again after a new runner was requested.
|
||||
expect(strategy).to receive(:copy_files).twice
|
||||
runner.copy_files(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'creation' do
|
||||
let(:user) { FactoryBot.create :external_user }
|
||||
let(:execution_environment) { FactoryBot.create :ruby }
|
||||
let(:create_action) { -> { described_class.create(user: user, execution_environment: execution_environment) } }
|
||||
|
||||
it 'requests a runner id from the runner management' do
|
||||
expect(strategy_class).to receive(:request_from_management)
|
||||
create_action.call
|
||||
end
|
||||
|
||||
it 'returns a valid runner' do
|
||||
allow(strategy_class).to receive(:request_from_management).and_return(runner_id)
|
||||
expect(create_action.call).to be_valid
|
||||
end
|
||||
|
||||
it 'sets the strategy' do
|
||||
allow(strategy_class).to receive(:request_from_management).and_return(runner_id)
|
||||
strategy = strategy_class.new(runner_id, execution_environment)
|
||||
allow(strategy_class).to receive(:new).with(runner_id, execution_environment).and_return(strategy)
|
||||
runner = create_action.call
|
||||
expect(runner.strategy).to eq(strategy)
|
||||
end
|
||||
|
||||
it 'does not call the runner management again while a runner id is set' do
|
||||
expect(strategy_class).to receive(:request_from_management).and_return(runner_id).once
|
||||
runner = create_action.call
|
||||
runner.update(user: FactoryBot.create(:external_user))
|
||||
end
|
||||
end
|
||||
|
||||
describe '#request_new_id' do
|
||||
let(:runner) { FactoryBot.create :runner }
|
||||
|
||||
context 'when the environment is available in the runner management' do
|
||||
it 'requests the runner management' do
|
||||
expect(strategy_class).to receive(:request_from_management)
|
||||
runner.send(:request_new_id)
|
||||
end
|
||||
|
||||
it 'updates the runner id' do
|
||||
allow(strategy_class).to receive(:request_from_management).and_return(runner_id)
|
||||
runner.send(:request_new_id)
|
||||
expect(runner.runner_id).to eq(runner_id)
|
||||
end
|
||||
|
||||
it 'updates the strategy' do
|
||||
allow(strategy_class).to receive(:request_from_management).and_return(runner_id)
|
||||
strategy = strategy_class.new(runner_id, runner.execution_environment)
|
||||
allow(strategy_class).to receive(:new).with(runner_id, runner.execution_environment).and_return(strategy)
|
||||
runner.send(:request_new_id)
|
||||
expect(runner.strategy).to eq(strategy)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the environment could not be found in the runner management' do
|
||||
let(:environment_id) { runner.execution_environment.id }
|
||||
|
||||
before { allow(strategy_class).to receive(:request_from_management).and_raise(Runner::Error::EnvironmentNotFound) }
|
||||
|
||||
it 'syncs the execution environment' do
|
||||
expect(strategy_class).to receive(:sync_environment).with(runner.execution_environment)
|
||||
runner.send(:request_new_id)
|
||||
rescue Runner::Error::EnvironmentNotFound
|
||||
# Ignored because this error is expected (see tests below).
|
||||
end
|
||||
|
||||
it 'raises an error when the environment could be synced' do
|
||||
allow(strategy_class).to receive(:sync_environment).with(runner.execution_environment).and_return(true)
|
||||
expect { runner.send(:request_new_id) }.to raise_error(Runner::Error::EnvironmentNotFound, /#{environment_id}.*successfully synced/)
|
||||
end
|
||||
|
||||
it 'raises an error when the environment could not be synced' do
|
||||
allow(strategy_class).to receive(:sync_environment).with(runner.execution_environment).and_return(false)
|
||||
expect { runner.send(:request_new_id) }.to raise_error(Runner::Error::EnvironmentNotFound, /#{environment_id}.*could not be synced/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '::for' do
|
||||
let(:user) { FactoryBot.create :external_user }
|
||||
let(:exercise) { FactoryBot.create :fibonacci }
|
||||
|
||||
context 'when the runner could not be saved' do
|
||||
before { allow(strategy_class).to receive(:request_from_management).and_return(nil) }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { described_class.for(user, exercise.execution_environment) }.to raise_error(Runner::Error::Unknown, /could not be saved/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a runner already exists' do
|
||||
let!(:existing_runner) { FactoryBot.create(:runner, user: user, execution_environment: exercise.execution_environment) }
|
||||
|
||||
it 'returns the existing runner' do
|
||||
new_runner = described_class.for(user, exercise.execution_environment)
|
||||
expect(new_runner).to eq(existing_runner)
|
||||
end
|
||||
|
||||
it 'sets the strategy' do
|
||||
runner = described_class.for(user, exercise.execution_environment)
|
||||
expect(runner.strategy).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no runner exists' do
|
||||
before { allow(strategy_class).to receive(:request_from_management).and_return(runner_id) }
|
||||
|
||||
it 'returns a new runner' do
|
||||
runner = described_class.for(user, exercise.execution_environment)
|
||||
expect(runner).to be_valid
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
@ -141,4 +141,35 @@ describe Submission do
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_score' do
|
||||
let(:runner) { FactoryBot.create :runner }
|
||||
|
||||
before do
|
||||
allow(Runner).to receive(:for).and_return(runner)
|
||||
allow(runner).to receive(:copy_files)
|
||||
allow(runner).to receive(:attach_to_execution).and_return(1.0)
|
||||
end
|
||||
|
||||
after { submission.calculate_score }
|
||||
|
||||
it 'executes every teacher-defined test file' do
|
||||
allow(submission).to receive(:combine_file_scores)
|
||||
submission.collect_files.select(&:teacher_defined_assessment?).each do |file|
|
||||
expect(submission).to receive(:score_file).with(any_args, file)
|
||||
end
|
||||
end
|
||||
|
||||
it 'scores the submission' do
|
||||
expect(submission).to receive(:combine_file_scores)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#combine_file_scores' do
|
||||
after { submission.send(:combine_file_scores, []) }
|
||||
|
||||
it 'assigns a score to the submissions' do
|
||||
expect(submission).to receive(:update).with(score: anything)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -58,4 +58,20 @@ describe ExecutionEnvironmentPolicy do
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
permissions(:sync_all_to_runner_management?) do
|
||||
it 'grants access to the admin' do
|
||||
expect(policy).to permit(FactoryBot.build(:admin))
|
||||
end
|
||||
|
||||
shared_examples 'it does not grant access' do |user|
|
||||
it "does not grant access to a user with role #{user.role}" do
|
||||
expect(policy).not_to permit(user)
|
||||
end
|
||||
end
|
||||
|
||||
%i[teacher external_user].each do |user|
|
||||
include_examples 'it does not grant access', FactoryBot.build(user)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -1,27 +0,0 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
CONTAINER = Docker::Container.send(:new, Docker::Connection.new('http://example.org', {}), 'id' => SecureRandom.hex)
|
||||
IMAGE = Docker::Image.new(Docker::Connection.new('http://example.org', {}), 'id' => SecureRandom.hex, 'RepoTags' => [FactoryBot.attributes_for(:ruby)[:docker_image]])
|
||||
|
||||
RSpec.configure do |config|
|
||||
config.before(:each) do |example|
|
||||
unless example.metadata[:docker]
|
||||
allow(DockerClient).to receive(:check_availability!).and_return(true)
|
||||
allow(DockerClient).to receive(:create_container).and_return(CONTAINER)
|
||||
allow(DockerClient).to receive(:find_image_by_tag).and_return(IMAGE)
|
||||
allow(DockerClient).to receive(:image_tags).and_return([IMAGE])
|
||||
allow(DockerClient).to receive(:local_workspace_path).and_return(Dir.mktmpdir)
|
||||
allow_any_instance_of(DockerClient).to receive(:send_command).and_return({})
|
||||
allow_any_instance_of(ExecutionEnvironment).to receive(:working_docker_image?)
|
||||
end
|
||||
end
|
||||
|
||||
config.after(:suite) do
|
||||
examples = RSpec.world.filtered_examples.values.flatten
|
||||
has_docker_tests = examples.any? {|example| example.metadata[:docker] }
|
||||
next unless has_docker_tests
|
||||
|
||||
FileUtils.rm_rf(Rails.root.join('tmp/files/test'))
|
||||
`which docker && test -n "$(docker ps --all --quiet)" && docker rm --force $(docker ps --all --quiet)`
|
||||
end
|
||||
end
|
Reference in New Issue
Block a user