Apply automatic rubocop fixes

This commit is contained in:
Sebastian Serth
2021-05-14 10:51:44 +02:00
parent fe4000916c
commit 6cbecb5b39
440 changed files with 2705 additions and 1853 deletions

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
module ActiveModel
module Validations
class BooleanPresenceValidator < EachValidator

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
class Assessor
MAXIMUM_SCORE = 1
@ -9,9 +11,9 @@ class Assessor
end
def calculate_score(test_outcome)
score = 0.0;
if(test_outcome[:passed].to_f != 0.0 && test_outcome[:count].to_f != 0.0)
score = (test_outcome[:passed].to_f / test_outcome[:count].to_f)
score = 0.0
if test_outcome[:passed].to_f != 0.0 && test_outcome[:count].to_f != 0.0
score = (test_outcome[:passed].to_f / test_outcome[:count])
# prevent negative scores
score = [0.0, score].max
end
@ -23,7 +25,7 @@ class Assessor
if options[:execution_environment].testing_framework?
@testing_framework_adapter = Kernel.const_get(options[:execution_environment].testing_framework).new
else
fail(Error, 'No testing framework adapter set!')
raise Error.new('No testing framework adapter set!')
end
end

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
module CodeOcean
class Config
def initialize(filename)
@ -7,10 +9,10 @@ module CodeOcean
def read(options = {})
path = Rails.root.join('config', "#{@filename}.yml#{options[:erb] ? '.erb' : ''}")
if ::File.exist?(path)
content = options[:erb] ? YAML.load(ERB.new(::File.new(path, 'r').read).result) : YAML.load_file(path)
content = options[:erb] ? YAML.safe_load(ERB.new(::File.new(path, 'r').read).result) : YAML.load_file(path)
content[Rails.env].with_indifferent_access
else
fail(Error, "Configuration file not found: #{path}")
raise Error.new("Configuration file not found: #{path}")
end
end

View File

@ -1,21 +1,23 @@
class CppCatch2Adapter < TestingFrameworkAdapter
ALL_PASSED_REGEXP = /in\s+(\d+)\s+test case/
COUNT_REGEXP = /test cases:\s+(\d+)/
FAILURES_REGEXP = / \|\s+(\d+)\s+failed/
ASSERTION_ERROR_REGEXP = /\n(.+)error:(.+);/
def self.framework_name
'CppCatch2'
end
def parse_output(output)
if ALL_PASSED_REGEXP.match(output[:stdout])
{count: Regexp.last_match(1).to_i, passed: Regexp.last_match(1).to_i}
else
count = COUNT_REGEXP.match(output[:stdout]).try(:captures).try(:first).try(:to_i) || 0
failed = FAILURES_REGEXP.match(output[:stdout]).try(:captures).try(:first).try(:to_i) || 0
error_matches = ASSERTION_ERROR_REGEXP.match(output[:stdout]).try(:captures) || []
{count: count, failed: failed, error_messages: error_matches}
end
end
end
# frozen_string_literal: true
class CppCatch2Adapter < TestingFrameworkAdapter
ALL_PASSED_REGEXP = /in\s+(\d+)\s+test case/.freeze
COUNT_REGEXP = /test cases:\s+(\d+)/.freeze
FAILURES_REGEXP = / \|\s+(\d+)\s+failed/.freeze
ASSERTION_ERROR_REGEXP = /\n(.+)error:(.+);/.freeze
def self.framework_name
'CppCatch2'
end
def parse_output(output)
if ALL_PASSED_REGEXP.match(output[:stdout])
{count: Regexp.last_match(1).to_i, passed: Regexp.last_match(1).to_i}
else
count = COUNT_REGEXP.match(output[:stdout]).try(:captures).try(:first).try(:to_i) || 0
failed = FAILURES_REGEXP.match(output[:stdout]).try(:captures).try(:first).try(:to_i) || 0
error_matches = ASSERTION_ERROR_REGEXP.match(output[:stdout]).try(:captures) || []
{count: count, failed: failed, error_messages: error_matches}
end
end
end

View File

@ -11,7 +11,7 @@ class DockerClient
CONTAINER_WORKSPACE_PATH = '/workspace' # '/home/python/workspace' #'/tmp/workspace'
DEFAULT_MEMORY_LIMIT = 256
# Ralf: I suggest to replace this with the environment variable. Ask Hauke why this is not the case!
LOCAL_WORKSPACE_ROOT = File.expand_path(self.config[:workspace_root])
LOCAL_WORKSPACE_ROOT = File.expand_path(config[:workspace_root])
MINIMUM_MEMORY_LIMIT = 4
RECYCLE_CONTAINERS = false
RETRY_COUNT = 2
@ -19,14 +19,13 @@ class DockerClient
MAXIMUM_CONTAINER_LIFETIME = 20.minutes
SELF_DESTROY_GRACE_PERIOD = 2.minutes
attr_reader :container
attr_reader :socket
attr_reader :container, :socket
attr_accessor :tubesock
def self.check_availability!
Timeout.timeout(config[:connection_timeout]) { Docker.version }
rescue Excon::Errors::SocketError, Timeout::Error
raise(Error, "The Docker host at #{Docker.url} is not reachable!")
raise Error.new("The Docker host at #{Docker.url} is not reachable!")
end
def self.clean_container_workspace(container)
@ -37,9 +36,9 @@ class DockerClient
if local_workspace_path && Pathname.new(local_workspace_path).exist?
Pathname.new(local_workspace_path).children.each do |p|
p.rmtree
rescue Errno::ENOENT, Errno::EACCES => error
Sentry.capture_exception(error)
Rails.logger.error("clean_container_workspace: Got #{error.class.to_s}: #{error.to_s}")
rescue Errno::ENOENT, Errno::EACCES => e
Sentry.capture_exception(e)
Rails.logger.error("clean_container_workspace: Got #{e.class}: #{e}")
end
# FileUtils.rmdir(Pathname.new(local_workspace_path))
end
@ -49,7 +48,7 @@ class DockerClient
{
class_name: File.basename(filename, File.extname(filename)).camelize,
filename: filename,
module_name: File.basename(filename, File.extname(filename)).underscore
module_name: File.basename(filename, File.extname(filename)).underscore,
}
end
@ -70,32 +69,32 @@ class DockerClient
'Binds' => mapped_directories(local_workspace_path),
'PortBindings' => mapped_ports(execution_environment),
# Resource limitations.
'NanoCPUs' => 4 * 1000000000, # CPU quota in units of 10^-9 CPUs.
'NanoCPUs' => 4 * 1_000_000_000, # CPU quota in units of 10^-9 CPUs.
'PidsLimit' => 100,
'KernelMemory' => execution_environment.memory_limit.megabytes, # if below Memory, the Docker host (!) might experience an OOM
'Memory' => execution_environment.memory_limit.megabytes,
'MemorySwap' => execution_environment.memory_limit.megabytes, # same value as Memory to disable Swap
'OomScoreAdj' => 500
'OomScoreAdj' => 500,
}
end
def create_socket(container, stderr = false)
# TODO: factor out query params
# todo separate stderr
query_params = 'logs=0&stream=1&' + (stderr ? 'stderr=1' : 'stdout=1&stdin=1')
query_params = "logs=0&stream=1&#{stderr ? 'stderr=1' : 'stdout=1&stdin=1'}"
# Headers are required by Docker
headers = {'Origin' => 'http://localhost'}
socket_url = DockerClient.config['ws_host'] + '/v1.27/containers/' + @container.id + '/attach/ws?' + query_params
socket_url = "#{DockerClient.config['ws_host']}/v1.27/containers/#{@container.id}/attach/ws?#{query_params}"
# The ping value is measured in seconds and specifies how often a Ping frame should be sent.
# Internally, Faye::WebSocket uses EventMachine and the ping value is used to wake the EventMachine thread
socket = Faye::WebSocket::Client.new(socket_url, [], headers: headers, ping: 0.1)
Rails.logger.debug 'Opening Websocket on URL ' + socket_url
Rails.logger.debug "Opening Websocket on URL #{socket_url}"
socket.on :error do |event|
Rails.logger.info 'Websocket error: ' + event.message.to_s
Rails.logger.info "Websocket error: #{event.message}"
end
socket.on :close do |_event|
Rails.logger.info 'Websocket closed.'
@ -117,10 +116,10 @@ class DockerClient
# this is however not guaranteed and caused issues on the server already. Therefore create the necessary folders manually!
local_workspace_path = generate_local_workspace_path
FileUtils.mkdir(local_workspace_path)
FileUtils.chmod_R(0777, local_workspace_path)
FileUtils.chmod_R(0o777, local_workspace_path)
container = Docker::Container.create(container_creation_options(execution_environment, local_workspace_path))
container.start
container.start_time = Time.now
container.start_time = Time.zone.now
container.status = :created
container.execution_environment = execution_environment
container.re_use = true
@ -130,19 +129,19 @@ class DockerClient
timeout = Random.rand(MINIMUM_CONTAINER_LIFETIME..MAXIMUM_CONTAINER_LIFETIME) # seconds
sleep(timeout)
container.re_use = false
if container.status != :executing
container.docker_client.kill_container(container, false)
Rails.logger.info('Killing container in status ' + container.status.to_s + ' after ' + (Time.now - container.start_time).to_s + ' seconds.')
else
if container.status == :executing
Thread.new do
timeout = SELF_DESTROY_GRACE_PERIOD.to_i
sleep(timeout)
container.docker_client.kill_container(container, false)
Rails.logger.info('Force killing container in status ' + container.status.to_s + ' after ' + (Time.now - container.start_time).to_s + ' seconds.')
Rails.logger.info("Force killing container in status #{container.status} after #{Time.zone.now - container.start_time} seconds.")
ensure
# guarantee that the thread is releasing the DB connection after it is done
ActiveRecord::Base.connection_pool.release_connection
end
else
container.docker_client.kill_container(container, false)
Rails.logger.info("Killing container in status #{container.status} after #{Time.zone.now - container.start_time} seconds.")
end
ensure
# guarantee that the thread is releasing the DB connection after it is done
@ -150,8 +149,8 @@ class DockerClient
end
container
rescue Docker::Error::NotFoundError => error
Rails.logger.error('create_container: Got Docker::Error::NotFoundError: ' + error.to_s)
rescue Docker::Error::NotFoundError => e
Rails.logger.error("create_container: Got Docker::Error::NotFoundError: #{e}")
destroy_container(container)
# (tries += 1) <= RETRY_COUNT ? retry : raise(error)
end
@ -169,7 +168,7 @@ class DockerClient
end
FileUtils.chmod_R('+rwX', self.class.local_workspace_path(container))
rescue Docker::Error::NotFoundError => e
Rails.logger.info('create_workspace_files: Rescued from Docker::Error::NotFoundError: ' + e.to_s)
Rails.logger.info("create_workspace_files: Rescued from Docker::Error::NotFoundError: #{e}")
end
private :create_workspace_files
@ -187,7 +186,7 @@ class DockerClient
# create a temporary dir, put all files in it, and put it into the container. the dir is automatically removed when leaving the block.
Dir.mktmpdir do |dir|
submission.collect_files.each do |file|
disk_file = File.new(dir + '/' + (file.path || '') + file.name_with_extension, 'w')
disk_file = File.new("#{dir}/#{file.path || ''}#{file.name_with_extension}", 'w')
disk_file.write(file.content)
disk_file.close
end
@ -198,7 +197,7 @@ class DockerClient
# container.exec(['bash', '-c', 'chown -R python ' + CONTAINER_WORKSPACE_PATH])
# container.exec(['bash', '-c', 'chgrp -G python ' + CONTAINER_WORKSPACE_PATH])
rescue StandardError => e
Rails.logger.error('create workspace folder: Rescued from StandardError: ' + e.to_s)
Rails.logger.error("create workspace folder: Rescued from StandardError: #{e}")
end
# sleep 1000
@ -207,38 +206,39 @@ class DockerClient
# tar the files in dir and put the tar to CONTAINER_WORKSPACE_PATH in the container
container.archive_in(dir, CONTAINER_WORKSPACE_PATH, overwrite: false)
rescue StandardError => e
Rails.logger.error('insert tar: Rescued from StandardError: ' + e.to_s)
Rails.logger.error("insert tar: Rescued from StandardError: #{e}")
end
# Rails.logger.info('command: tar -xf ' + CONTAINER_WORKSPACE_PATH + '/' + dir.split('/tmp/')[1] + ' -C ' + CONTAINER_WORKSPACE_PATH)
begin
# untar the tar file placed in the CONTAINER_WORKSPACE_PATH
container.exec(['bash', '-c', 'tar -xf ' + CONTAINER_WORKSPACE_PATH + '/' + dir.split('/tmp/')[1] + ' -C ' + CONTAINER_WORKSPACE_PATH])
container.exec(['bash', '-c',
"tar -xf #{CONTAINER_WORKSPACE_PATH}/#{dir.split('/tmp/')[1]} -C #{CONTAINER_WORKSPACE_PATH}"])
rescue StandardError => e
Rails.logger.error('untar: Rescued from StandardError: ' + e.to_s)
Rails.logger.error("untar: Rescued from StandardError: #{e}")
end
# sleep 1000
end
rescue StandardError => e
Rails.logger.error('create_workspace_files_transmit: Rescued from StandardError: ' + e.to_s)
Rails.logger.error("create_workspace_files_transmit: Rescued from StandardError: #{e}")
end
def self.destroy_container(container)
@socket&.close
Rails.logger.info('destroying container ' + container.to_s)
Rails.logger.info("destroying container #{container}")
# Checks only if container assignment is not nil and not whether the container itself is still present.
if container && !DockerContainerPool.config[:active]
container.kill
container.port_bindings.values.each { |port| PortPool.release(port) }
container.port_bindings.each_value {|port| PortPool.release(port) }
begin
clean_container_workspace(container)
FileUtils.rmtree(local_workspace_path(container))
rescue Errno::ENOENT, Errno::EACCES => error
Sentry.capture_exception(error)
Rails.logger.error("clean_container_workspace: Got #{error.class.to_s}: #{error.to_s}")
rescue Errno::ENOENT, Errno::EACCES => e
Sentry.capture_exception(e)
Rails.logger.error("clean_container_workspace: Got #{e.class}: #{e}")
end
# Checks only if container assignment is not nil and not whether the container itself is still present.
@ -246,11 +246,11 @@ class DockerClient
elsif container
DockerContainerPool.destroy_container(container)
end
rescue Docker::Error::NotFoundError => error
Rails.logger.error('destroy_container: Rescued from Docker::Error::NotFoundError: ' + error.to_s)
rescue Docker::Error::NotFoundError => e
Rails.logger.error("destroy_container: Rescued from Docker::Error::NotFoundError: #{e}")
Rails.logger.error('No further actions are done concerning that.')
rescue Docker::Error::ConflictError => error
Rails.logger.error('destroy_container: Rescued from Docker::Error::ConflictError: ' + error.to_s)
rescue Docker::Error::ConflictError => e
Rails.logger.error("destroy_container: Rescued from Docker::Error::ConflictError: #{e}")
Rails.logger.error('No further actions are done concerning that.')
end
@ -263,21 +263,22 @@ class DockerClient
# only used by score and execute_arbitrary_command
def execute_command(command, before_execution_block, output_consuming_block)
# tries ||= 0
container_request_time = Time.now
container_request_time = Time.zone.now
@container = DockerContainerPool.get_container(@execution_environment)
waiting_for_container_time = Time.now - container_request_time
waiting_for_container_time = Time.zone.now - container_request_time
if @container
@container.status = :executing
before_execution_block.try(:call)
execution_request_time = Time.now
execution_request_time = Time.zone.now
command_result = send_command(command, @container, &output_consuming_block)
container_execution_time = Time.now - execution_request_time
container_execution_time = Time.zone.now - execution_request_time
command_result.merge!(waiting_for_container_time: waiting_for_container_time)
command_result.merge!(container_execution_time: container_execution_time)
command_result[:waiting_for_container_time] = waiting_for_container_time
command_result[:container_execution_time] = container_execution_time
command_result
else
{status: :container_depleted, waiting_for_container_time: waiting_for_container_time, container_execution_time: nil}
{status: :container_depleted, waiting_for_container_time: waiting_for_container_time,
container_execution_time: nil}
end
rescue Excon::Errors::SocketError => e
# socket errors seems to be normal when using exec
@ -298,7 +299,7 @@ class DockerClient
# Prevent catching this error here
raise
rescue StandardError => e
Rails.logger.error('execute_websocket_command: Rescued from StandardError caused by before_execution_block.call: ' + e.to_s)
Rails.logger.error("execute_websocket_command: Rescued from StandardError caused by before_execution_block.call: #{e}")
end
# TODO: catch exception if socket could not be created
@socket ||= create_socket(@container)
@ -309,16 +310,16 @@ class DockerClient
end
def kill_after_timeout(container)
"""
"
We need to start a second thread to kill the websocket connection,
as it is impossible to determine whether further input is requested.
"""
"
container.status = :executing
@thread = Thread.new do
timeout = (@execution_environment.permitted_execution_time.to_i) # seconds
timeout = @execution_environment.permitted_execution_time.to_i # seconds
sleep(timeout)
if container && container.status != :available
Rails.logger.info('Killing container after timeout of ' + timeout.to_s + ' seconds.')
Rails.logger.info("Killing container after timeout of #{timeout} seconds.")
# send timeout to the tubesock socket
# FIXME: 2nd thread to notify user.
@tubesock&.send_data JSON.dump({'cmd' => 'timeout'})
@ -338,7 +339,7 @@ class DockerClient
ActiveRecord::Base.connection_pool.release_connection
end
else
Rails.logger.info('Container' + container.to_s + ' already removed.')
Rails.logger.info("Container#{container} already removed.")
end
ensure
# guarantee that the thread is releasing the DB connection after it is done
@ -351,25 +352,30 @@ class DockerClient
end
def exit_container(container)
Rails.logger.debug('exiting container ' + container.to_s)
Rails.logger.debug("exiting container #{container}")
# exit the timeout thread if it is still alive
exit_thread_if_alive
@socket.close
# if we use pooling and recylce the containers, put it back. otherwise, destroy it.
DockerContainerPool.config[:active] && RECYCLE_CONTAINERS ? self.class.return_container(container, @execution_environment) : self.class.destroy_container(container)
if DockerContainerPool.config[:active] && RECYCLE_CONTAINERS
self.class.return_container(container,
@execution_environment)
else
self.class.destroy_container(container)
end
end
def kill_container(container, _create_new = true)
exit_thread_if_alive
Rails.logger.info('killing container ' + container.to_s)
Rails.logger.info("killing container #{container}")
self.class.destroy_container(container)
end
def execute_run_command(submission, filename, &block)
"""
"
Run commands by attaching a websocket to Docker.
"""
filepath = submission.collect_files.find { |f| f.name_with_extension == filename }.filepath
"
filepath = submission.collect_files.find {|f| f.name_with_extension == filename }.filepath
command = submission.execution_environment.run_command % command_substitutions(filepath)
create_workspace_files = proc { create_workspace_files(container, submission) }
open_websocket_connection(command, create_workspace_files, block)
@ -377,15 +383,15 @@ class DockerClient
end
def execute_test_command(submission, filename, &block)
"""
"
Stick to existing Docker API with exec command.
"""
file = submission.collect_files.find { |f| f.name_with_extension == filename }
"
file = submission.collect_files.find {|f| f.name_with_extension == filename }
filepath = file.filepath
command = submission.execution_environment.test_command % command_substitutions(filepath)
create_workspace_files = proc { create_workspace_files(container, submission) }
test_result = execute_command(command, create_workspace_files, block)
test_result.merge!(file_role: file.role)
test_result[:file_role] = file.role
test_result
end
@ -404,7 +410,7 @@ class DockerClient
end
def self.image_tags
Docker::Image.all.map { |image| image.info['RepoTags'] }.flatten.reject { |tag| tag.nil? || tag.include?('<none>') }
Docker::Image.all.map {|image| image.info['RepoTags'] }.flatten.reject {|tag| tag.nil? || tag.include?('<none>') }
end
def initialize(options = {})
@ -417,7 +423,7 @@ class DockerClient
def self.initialize_environment
# TODO: Move to DockerContainerPool
raise(Error, 'Docker configuration missing!') unless config[:connection_timeout] && config[:workspace_root]
raise Error.new('Docker configuration missing!') unless config[:connection_timeout] && config[:workspace_root]
Docker.url = config[:host] if config[:host]
# TODO: availability check disabled for performance reasons. Reconsider if this is necessary.
@ -427,11 +433,13 @@ class DockerClient
end
def local_file_path(options = {})
resulting_file_path = File.join(self.class.local_workspace_path(options[:container]), options[:file].path || '', options[:file].name_with_extension)
resulting_file_path = File.join(self.class.local_workspace_path(options[:container]), options[:file].path || '',
options[:file].name_with_extension)
absolute_path = File.expand_path(resulting_file_path)
unless absolute_path.start_with? self.class.local_workspace_path(options[:container]).to_s
raise(FilepathError, 'Filepath not allowed')
raise FilepathError.new('Filepath not allowed')
end
absolute_path
end
@ -457,12 +465,12 @@ class DockerClient
end
def self.return_container(container, execution_environment)
Rails.logger.debug('returning container ' + container.to_s)
Rails.logger.debug("returning container #{container}")
begin
clean_container_workspace(container)
rescue Docker::Error::NotFoundError => error
rescue Docker::Error::NotFoundError => e
# FIXME: Create new container?
Rails.logger.info('return_container: Rescued from Docker::Error::NotFoundError: ' + error.to_s)
Rails.logger.info("return_container: Rescued from Docker::Error::NotFoundError: #{e}")
Rails.logger.info('Nothing is done here additionally. The container will be exchanged upon its next retrieval.')
end
DockerContainerPool.return_container(container, execution_environment)
@ -477,21 +485,30 @@ class DockerClient
Timeout.timeout(@execution_environment.permitted_execution_time.to_i) do
# TODO: check phusion doku again if we need -i -t options here
# https://stackoverflow.com/questions/363223/how-do-i-get-both-stdout-and-stderr-to-go-to-the-terminal-and-a-log-file
output = container.exec(['bash', '-c', "#{command} 1> >(tee -a /tmp/stdout.log) 2> >(tee -a /tmp/stderr.log >&2); rm -f /tmp/std*.log"], tty: false)
output = container.exec(
['bash', '-c',
"#{command} 1> >(tee -a /tmp/stdout.log) 2> >(tee -a /tmp/stderr.log >&2); rm -f /tmp/std*.log"], tty: false
)
end
Rails.logger.debug 'output from container.exec'
Rails.logger.debug output
if output.nil?
kill_container(container)
else
result = {status: output[2] == 0 ? :ok : :failed, stdout: output[0].join.force_encoding('utf-8'), stderr: output[1].join.force_encoding('utf-8')}
result = {status: (output[2]).zero? ? :ok : :failed, stdout: output[0].join.force_encoding('utf-8'),
stderr: output[1].join.force_encoding('utf-8')}
end
# if we use pooling and recylce the containers, put it back. otherwise, destroy it.
DockerContainerPool.config[:active] && RECYCLE_CONTAINERS ? self.class.return_container(container, @execution_environment) : self.class.destroy_container(container)
if DockerContainerPool.config[:active] && RECYCLE_CONTAINERS
self.class.return_container(container,
@execution_environment)
else
self.class.destroy_container(container)
end
result
rescue Timeout::Error
Rails.logger.info('got timeout error for container ' + container.to_s)
Rails.logger.info("got timeout error for container #{container}")
stdout = container.exec(['cat', '/tmp/stdout.log'])[0].join.force_encoding('utf-8')
stderr = container.exec(['cat', '/tmp/stderr.log'])[0].join.force_encoding('utf-8')
kill_container(container)
@ -501,5 +518,6 @@ class DockerClient
private :send_command
class Error < RuntimeError; end
class FilepathError < RuntimeError; end
end

View File

@ -1,10 +1,7 @@
module DockerContainerMixin
# frozen_string_literal: true
attr_accessor :start_time
attr_accessor :status
attr_accessor :re_use
attr_accessor :execution_environment
attr_accessor :docker_client
module DockerContainerMixin
attr_accessor :start_time, :status, :re_use, :execution_environment, :docker_client
def binds
host_config['Binds']
@ -12,7 +9,7 @@ module DockerContainerMixin
def port_bindings
# Don't use cached version as this might be changed during runtime
json['HostConfig']['PortBindings'].try(:map) { |key, value| [key.to_i, value.first['HostPort'].to_i] }.to_h
json['HostConfig']['PortBindings'].try(:map) {|key, value| [key.to_i, value.first['HostPort'].to_i] }.to_h
end
def host_config

View File

@ -1,30 +1,32 @@
# frozen_string_literal: true
require 'concurrent/future'
require 'concurrent/timer_task'
class DockerContainerPool
def self.config
#TODO: Why erb?
# TODO: Why erb?
@config ||= CodeOcean::Config.new(:docker).read(erb: true)[:pool]
end
def self.create_container(execution_environment)
Rails.logger.info('trying to create container for execution environment: ' + execution_environment.to_s)
Rails.logger.info("trying to create container for execution environment: #{execution_environment}")
container = DockerClient.create_container(execution_environment)
container.status = 'available' # FIXME: String vs Symbol usage?
#Rails.logger.debug('created container ' + container.to_s + ' for execution environment ' + execution_environment.to_s)
# Rails.logger.debug('created container ' + container.to_s + ' for execution environment ' + execution_environment.to_s)
container
rescue StandardError => e
Sentry.set_extras({container: container.inspect, execution_environment: execution_environment.inspect, config: config.inspect})
Sentry.set_extras({container: container.inspect, execution_environment: execution_environment.inspect,
config: config.inspect})
Sentry.capture_exception(e)
nil
end
def self.return_container(container, execution_environment)
Faraday.get(config[:location] + "/docker_container_pool/return_container/" + container.id)
Faraday.get("#{config[:location]}/docker_container_pool/return_container/#{container.id}")
rescue StandardError => e
Sentry.set_extras({container: container.inspect, execution_environment: execution_environment.inspect, config: config.inspect})
Sentry.set_extras({container: container.inspect, execution_environment: execution_environment.inspect,
config: config.inspect})
Sentry.capture_exception(e)
nil
end
@ -33,10 +35,11 @@ class DockerContainerPool
# if pooling is active, do pooling, otherwise just create an container and return it
if config[:active]
begin
container_id = JSON.parse(Faraday.get(config[:location] + "/docker_container_pool/get_container/" + execution_environment.id.to_s).body)['id']
Docker::Container.get(container_id) unless container_id.blank?
container_id = JSON.parse(Faraday.get("#{config[:location]}/docker_container_pool/get_container/#{execution_environment.id}").body)['id']
Docker::Container.get(container_id) if container_id.present?
rescue StandardError => e
Sentry.set_extras({container_id: container_id.inspect, execution_environment: execution_environment.inspect, config: config.inspect})
Sentry.set_extras({container_id: container_id.inspect, execution_environment: execution_environment.inspect,
config: config.inspect})
Sentry.capture_exception(e)
nil
end
@ -46,11 +49,11 @@ class DockerContainerPool
end
def self.destroy_container(container)
Faraday.get(config[:location] + "/docker_container_pool/destroy_container/" + container.id)
Faraday.get("#{config[:location]}/docker_container_pool/destroy_container/#{container.id}")
end
def self.quantities
response = JSON.parse(Faraday.get(config[:location] + "/docker_container_pool/quantities").body)
response = JSON.parse(Faraday.get("#{config[:location]}/docker_container_pool/quantities").body)
response.transform_keys(&:to_i)
rescue StandardError => e
Sentry.set_extras({response: response.inspect})
@ -59,7 +62,7 @@ class DockerContainerPool
end
def self.dump_info
JSON.parse(Faraday.get(config[:location] + "/docker_container_pool/dump_info").body)
JSON.parse(Faraday.get("#{config[:location]}/docker_container_pool/dump_info").body)
rescue StandardError => e
Sentry.capture_exception(e)
nil

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
class FileTree < Tree::TreeNode
def file_icon(file)
if file.file_type.audio?
@ -25,10 +27,10 @@ class FileTree < Tree::TreeNode
def initialize(files = [])
super(root_label)
files.uniq{|f| f.name_with_extension}.each do |file|
files.uniq(&:name_with_extension).each do |file|
parent = self
(file.path || '').split('/').each do |segment|
node = parent.children.detect { |child| child.name == segment } || parent.add(Tree::TreeNode.new(segment))
node = parent.children.detect {|child| child.name == segment } || parent.add(Tree::TreeNode.new(segment))
parent = node
end
parent.add(Tree::TreeNode.new(file.name_with_extension, file))
@ -37,14 +39,14 @@ class FileTree < Tree::TreeNode
def map_to_js_tree(node)
{
children: node.children.map { |child| map_to_js_tree(child) },
children: node.children.map {|child| map_to_js_tree(child) },
icon: node_icon(node),
id: node.content.try(:ancestor_id),
state: {
disabled: !node.is_leaf?,
opened: !node.is_leaf?
opened: !node.is_leaf?,
},
text: node.name
text: node.name,
}
end
private :map_to_js_tree
@ -68,8 +70,8 @@ class FileTree < Tree::TreeNode
def to_js_tree
{
core: {
data: map_to_js_tree(self)
}
data: map_to_js_tree(self),
},
}.to_json
end
end

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
require 'rails/generators'
class TestingFrameworkAdapterGenerator < Rails::Generators::NamedBase
@ -5,26 +7,26 @@ class TestingFrameworkAdapterGenerator < Rails::Generators::NamedBase
SPEC_PATH = ->(name) { Rails.root.join('spec', 'lib', "#{name.underscore}_adapter_spec.rb") }
def create_testing_framework_adapter
create_file ADAPTER_PATH.call(file_name), <<-code
class #{file_name.camelize}Adapter < TestingFrameworkAdapter
def self.framework_name
'#{file_name.camelize}'
end
def parse_output(output)
end
end
code
create_file ADAPTER_PATH.call(file_name), <<~CODE
class #{file_name.camelize}Adapter < TestingFrameworkAdapter
def self.framework_name
'#{file_name.camelize}'
end
#{' '}
def parse_output(output)
end
end
CODE
end
def create_spec
create_file SPEC_PATH.call(file_name), <<-code
require 'rails_helper'
describe #{file_name.camelize}Adapter do
describe '#parse_output' do
end
end
code
create_file SPEC_PATH.call(file_name), <<~CODE
require 'rails_helper'
#{' '}
describe #{file_name.camelize}Adapter do
describe '#parse_output' do
end
end
CODE
end
end

View File

@ -1,8 +1,10 @@
# frozen_string_literal: true
class JunitAdapter < TestingFrameworkAdapter
COUNT_REGEXP = /Tests run: (\d+)/
FAILURES_REGEXP = /Failures: (\d+)/
SUCCESS_REGEXP = /OK \((\d+) test[s]?\)/
ASSERTION_ERROR_REGEXP = /java\.lang\.AssertionError:?\s(.*?)\tat org.junit|org\.junit\.ComparisonFailure:\s(.*?)\tat org.junit/m
COUNT_REGEXP = /Tests run: (\d+)/.freeze
FAILURES_REGEXP = /Failures: (\d+)/.freeze
SUCCESS_REGEXP = /OK \((\d+) tests?\)/.freeze
ASSERTION_ERROR_REGEXP = /java\.lang\.AssertionError:?\s(.*?)\tat org.junit|org\.junit\.ComparisonFailure:\s(.*?)\tat org.junit/m.freeze
def self.framework_name
'JUnit'

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class MochaAdapter < TestingFrameworkAdapter
SUCCESS_REGEXP = /(\d+) passing/
FAILURES_REGEXP = /(\d+) failing/
SUCCESS_REGEXP = /(\d+) passing/.freeze
FAILURES_REGEXP = /(\d+) failing/.freeze
def self.framework_name
'Mocha'
@ -11,6 +13,6 @@ class MochaAdapter < TestingFrameworkAdapter
matches_failed = FAILURES_REGEXP.match(output[:stdout])
failed = matches_failed ? matches_failed.captures.first.to_i : 0
success = matches_success ? matches_success.captures.first.to_i : 0
{count: success+failed, failed: failed}
{count: success + failed, failed: failed}
end
end

View File

@ -1,10 +1,12 @@
# frozen_string_literal: true
class NonceStore
def self.build_cache_key(nonce)
"lti_nonce_#{nonce}"
end
def self.add(nonce)
Rails.cache.write(build_cache_key(nonce), Time.now, expires_in: Lti::MAXIMUM_SESSION_AGE)
Rails.cache.write(build_cache_key(nonce), Time.zone.now, expires_in: Lti::MAXIMUM_SESSION_AGE)
end
def self.delete(nonce)

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
class PortPool
PORT_RANGE = DockerClient.config[:ports]
@ -11,6 +13,6 @@ class PortPool
end
def self.release(port)
@available_ports << port if PORT_RANGE.include?(port) && !@available_ports.include?(port)
@available_ports << port if PORT_RANGE.include?(port) && @available_ports.exclude?(port)
end
end

View File

@ -42,11 +42,11 @@ module Prometheus
def initialize_rfc_metrics
# Initialize rfc metric
@rfc_count.observe(RequestForComment.unsolved.where(full_score_reached: false).count,
state: RequestForComment::ONGOING)
state: RequestForComment::ONGOING)
@rfc_count.observe(RequestForComment.unsolved.where(full_score_reached: true).count,
state: RequestForComment::SOFT_SOLVED)
state: RequestForComment::SOFT_SOLVED)
@rfc_count.observe(RequestForComment.where(solved: true).count,
state: RequestForComment::SOLVED)
state: RequestForComment::SOLVED)
# count of rfcs with comments
@rfc_commented_count.observe(RequestForComment.joins(:comments).distinct.count(:id))
@ -56,8 +56,8 @@ module Prometheus
Rails.logger.debug("Prometheus metric updated for #{object.class.name}")
case object
when RequestForComment
update_rfc(object)
when RequestForComment
update_rfc(object)
end
end
@ -66,10 +66,10 @@ module Prometheus
Rails.logger.debug("Prometheus instance count increased for #{object.class.name}")
case object
when RequestForComment
create_rfc(object)
when Comment
create_comment(object)
when RequestForComment
create_rfc(object)
when Comment
create_comment(object)
end
end
@ -78,8 +78,8 @@ module Prometheus
Rails.logger.debug("Prometheus instance count decreased for #{object.class.name}")
case object
when Comment
destroy_comment(object)
when Comment
destroy_comment(object)
end
end

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class PyLintAdapter < TestingFrameworkAdapter
REGEXP = /Your code has been rated at (-?\d+\.?\d*)\/(\d+\.?\d*)/
ASSERTION_ERROR_REGEXP = /^(.*?\.py):(\d+):(.*?)\(([^,]*?), ([^,]*?),([^,]*?)\) (.*?)$/
REGEXP = %r{Your code has been rated at (-?\d+\.?\d*)/(\d+\.?\d*)}.freeze
ASSERTION_ERROR_REGEXP = /^(.*?\.py):(\d+):(.*?)\(([^,]*?), ([^,]*?),([^,]*?)\) (.*?)$/.freeze
def self.framework_name
'PyLint'
@ -22,14 +24,14 @@ class PyLintAdapter < TestingFrameworkAdapter
assertion_error_matches = Timeout.timeout(2.seconds) do
output[:stdout].scan(ASSERTION_ERROR_REGEXP).map do |match|
{
file_name: match[0].strip,
file_name: match[0].strip,
line: match[1].to_i,
severity: match[2].strip,
code: match[3].strip,
name: match[4].strip,
# e.g. function name, nil if outside of a function. Not always available
scope: match[5].strip.presence,
result: match[6].strip
result: match[6].strip,
}
end || []
end
@ -37,8 +39,9 @@ class PyLintAdapter < TestingFrameworkAdapter
Sentry.capture_message({stdout: output[:stdout], regex: ASSERTION_ERROR_REGEXP}.to_json)
assertion_error_matches = []
end
concatenated_errors = assertion_error_matches.map { |result| "#{result[:name]}: #{result[:result]}" }.flatten
{count: count, failed: failed, error_messages: concatenated_errors, detailed_linter_results: assertion_error_matches}
concatenated_errors = assertion_error_matches.map {|result| "#{result[:name]}: #{result[:result]}" }.flatten
{count: count, failed: failed, error_messages: concatenated_errors,
detailed_linter_results: assertion_error_matches}
end
def self.translate_linter(assessment, locale)
@ -47,7 +50,7 @@ class PyLintAdapter < TestingFrameworkAdapter
I18n.locale = locale || I18n.default_locale
return assessment unless assessment[:detailed_linter_results].present?
return assessment if assessment[:detailed_linter_results].blank?
assessment[:detailed_linter_results].map! do |message|
severity = message[:severity]
@ -73,7 +76,7 @@ class PyLintAdapter < TestingFrameworkAdapter
replacement = {}
end
replacement.merge!(default: message[:result])
replacement[:default] = message[:result]
message[:result] = I18n.t("linter.#{severity}.#{name}.replacement", replacement)
message
end

View File

@ -1,8 +1,10 @@
# frozen_string_literal: true
class PyUnitAdapter < TestingFrameworkAdapter
COUNT_REGEXP = /Ran (\d+) test/
FAILURES_REGEXP = /FAILED \(.*failures=(\d+).*\)/
ERRORS_REGEXP = /FAILED \(.*errors=(\d+).*\)/
ASSERTION_ERROR_REGEXP = /^(ERROR|FAIL):\ (.*?)\ .*?^[^\.\n]*?(Error|Exception):\s((\s|\S)*?)(>>>.*?)*\s\s(-|=){70}/m
COUNT_REGEXP = /Ran (\d+) test/.freeze
FAILURES_REGEXP = /FAILED \(.*failures=(\d+).*\)/.freeze
ERRORS_REGEXP = /FAILED \(.*errors=(\d+).*\)/.freeze
ASSERTION_ERROR_REGEXP = /^(ERROR|FAIL):\ (.*?)\ .*?^[^.\n]*?(Error|Exception):\s((\s|\S)*?)(>>>.*?)*\s\s(-|=){70}/m.freeze
def self.framework_name
'PyUnit'
@ -16,7 +18,7 @@ class PyUnitAdapter < TestingFrameworkAdapter
errors = error_matches ? error_matches.captures.try(:first).to_i : 0
begin
assertion_error_matches = Timeout.timeout(2.seconds) do
output[:stderr].scan(ASSERTION_ERROR_REGEXP).map { |match|
output[:stderr].scan(ASSERTION_ERROR_REGEXP).map do |match|
testname = match[1]
error = match[3].strip
@ -25,7 +27,7 @@ class PyUnitAdapter < TestingFrameworkAdapter
else
"#{testname}: #{error}"
end
}.flatten || []
end.flatten || []
end
rescue Timeout::Error
Sentry.capture_message({stderr: output[:stderr], regex: ASSERTION_ERROR_REGEXP}.to_json)

View File

@ -1,5 +1,6 @@
class PyUnitAndPyLintAdapter < TestingFrameworkAdapter
# frozen_string_literal: true
class PyUnitAndPyLintAdapter < TestingFrameworkAdapter
def self.framework_name
'PyUnit and PyLint'
end

View File

@ -1,22 +1,21 @@
# frozen_string_literal: true
class Python20CourseWeek
def self.get_for(exercise)
case exercise.title
when /Python20 Aufgabe 1/
1
when /Python20 Aufgabe 2/
2
when /Python20 Aufgabe 3/
3
when /Python20 Aufgabe 4/
4
when /Python20 Snake/
4
else
# Not part of the Python20 course
nil
when /Python20 Aufgabe 1/
1
when /Python20 Aufgabe 2/
2
when /Python20 Aufgabe 3/
3
when /Python20 Aufgabe 4/
4
when /Python20 Snake/
4
else
# Not part of the Python20 course
nil
end
end

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class RScriptAdapter < TestingFrameworkAdapter
REGEXP = /(\d+) examples?, (\d+) passed?/
ASSERTION_ERROR_REGEXP = /AssertionError:\s(.*)/
REGEXP = /(\d+) examples?, (\d+) passed?/.freeze
ASSERTION_ERROR_REGEXP = /AssertionError:\s(.*)/.freeze
def self.framework_name
'R Script'

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
class RspecAdapter < TestingFrameworkAdapter
REGEXP = /(\d+) examples?, (\d+) failures?/
REGEXP = /(\d+) examples?, (\d+) failures?/.freeze
def self.framework_name
'RSpec 3'

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
module SeedsHelper
def self.read_seed_file(filename)
file = File.new(seed_file_path(filename), 'r')

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class SqlResultSetComparatorAdapter < TestingFrameworkAdapter
MISSING_TUPLES_REGEXP = /Missing tuples: \[\]/
UNEXPECTED_TUPLES_REGEXP = /Unexpected tuples: \[\]/
MISSING_TUPLES_REGEXP = /Missing tuples: \[\]/.freeze
UNEXPECTED_TUPLES_REGEXP = /Unexpected tuples: \[\]/.freeze
def self.framework_name
'SqlResultSetComparator'

View File

@ -111,7 +111,7 @@ namespace :detect_exercise_anomalies do
end
def notify_users(collection, anomalies)
by_id_and_type = proc { |u| {user_id: u[:user_id], user_type: u[:user_type]} }
by_id_and_type = proc {|u| {user_id: u[:user_id], user_type: u[:user_type]} }
log('Sending E-Mails to best and worst performing users of each anomaly...', 2)
anomalies.each do |exercise_id, average_working_time|
@ -122,7 +122,7 @@ namespace :detect_exercise_anomalies do
users = {}
%i[performers_by_time performers_by_score].each do |method|
# merge users found by multiple methods returning a hash {best: [], worst: []}
users = users.merge(send(method, exercise, NUMBER_OF_USERS_PER_CLASS)) { |_key, this, other| this + other }
users = users.merge(send(method, exercise, NUMBER_OF_USERS_PER_CLASS)) {|_key, this, other| this + other }
end
# write reasons for feedback emails to db
@ -140,7 +140,8 @@ namespace :detect_exercise_anomalies do
users_to_notify.each do |u|
user = u[:user_type] == InternalUser.name ? InternalUser.find(u[:user_id]) : ExternalUser.find(u[:user_id])
host = CodeOcean::Application.config.action_mailer.default_url_options[:host]
feedback_link = Rails.application.routes.url_helpers.url_for(action: :new, controller: :user_exercise_feedbacks, exercise_id: exercise.id, host: host)
feedback_link = Rails.application.routes.url_helpers.url_for(action: :new,
controller: :user_exercise_feedbacks, exercise_id: exercise.id, host: host)
UserMailer.exercise_anomaly_needs_feedback(user, exercise, feedback_link).deliver
end
log("Asked #{users_to_notify.size} users for feedback.", 2)
@ -149,7 +150,7 @@ namespace :detect_exercise_anomalies do
def performers_by_score(exercise, users)
submissions = exercise.last_submission_per_user.where.not(score: nil).order(score: :desc)
map_block = proc { |item| {user_id: item.user_id, user_type: item.user_type, value: item.score, reason: 'score'} }
map_block = proc {|item| {user_id: item.user_id, user_type: item.user_type, value: item.score, reason: 'score'} }
best_performers = submissions.first(users).to_a.map(&map_block)
worst_performers = submissions.last(users).to_a.map(&map_block)
{best: best_performers, worst: worst_performers}
@ -161,8 +162,10 @@ namespace :detect_exercise_anomalies do
value: time_to_f(item['working_time']), reason: 'time'}
end
avg_score = exercise.average_score
working_times.reject! { |item| item[:value].nil? or item[:value] <= MIN_USER_WORKING_TIME or item[:score] < avg_score }
working_times.sort_by! { |item| item[:value] }
working_times.reject! do |item|
item[:value].nil? or item[:value] <= MIN_USER_WORKING_TIME or item[:score] < avg_score
end
working_times.sort_by! {|item| item[:value] }
{best: working_times.first(users), worst: working_times.last(users)}
end

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
namespace :docker do
desc 'Remove all Docker containers and dangling Docker images (using the CLI)'
task :clean_up do
task clean_up: :environment do
`test -n "$(docker ps --all --quiet)" && docker rm --force $(docker ps --all --quiet)`
`test -n "docker images --filter dangling=true --quiet" && docker rmi $(docker images --filter dangling=true --quiet)`
end

View File

@ -1,17 +1,17 @@
# frozen_string_literal: true
namespace :user do
require 'csv'
require 'csv'
desc 'write displaynames retrieved from the account service as csv into the codeocean database'
task :write_displaynames, [:file_path_read] => [ :environment ] do |t, args|
desc 'write displaynames retrieved from the account service as csv into the codeocean database'
csv_input = CSV.read(args[:file_path_read], headers:true)
task :write_displaynames, [:file_path_read] => [:environment] do |_t, args|
csv_input = CSV.read(args[:file_path_read], headers: true)
csv_input.each do |row|
user = ExternalUser.find_by(:external_id => row[0])
user = ExternalUser.find_by(external_id: row[0])
puts "Change name from #{user.name} to #{row[1]}"
user.update(name: row[1])
end
end
end

View File

@ -1,3 +1,5 @@
# frozen_string_literal: true
class TestingFrameworkAdapter
def augment_output(options = {})
if !options[:count]
@ -15,7 +17,7 @@ class TestingFrameworkAdapter
end
def parse_output(*)
fail(NotImplementedError, "#{self.class} should implement #parse_output!")
raise NotImplementedError.new("#{self.class} should implement #parse_output!")
end
private :parse_output