diff --git a/.github/workflows/cbrain_ci.yaml b/.github/workflows/cbrain_ci.yaml index 6495e1ec4..9f76043fb 100644 --- a/.github/workflows/cbrain_ci.yaml +++ b/.github/workflows/cbrain_ci.yaml @@ -24,7 +24,7 @@ jobs: ########################################################### services: mariadb: - image: mariadb/server + image: mariadb env: # the docker container's autosetup use MYSQL_ variables MYSQL_ROOT_PASSWORD: that_is_nothing MYSQL_DATABASE: cbrain_test @@ -38,13 +38,13 @@ jobs: ########################################################### - name: Checkout CBRAIN Codebase - uses: actions/checkout@v2 + uses: actions/checkout@v4 ########################################################### - name: Setup Ruby uses: ruby/setup-ruby@v1 with: - ruby-version: 2.6.6 + ruby-version: 2.7.2 ########################################################### - name: Setup BrainPortal And Bourreau Names @@ -67,7 +67,7 @@ jobs: ########################################################### - name: Reload Cached Gems - uses: actions/cache@v1 # speeds up 'Prepare Ruby Gems' below + uses: actions/cache@v3 # speeds up 'Prepare Ruby Gems' below with: path: gem-cache key: ${{ runner.os }}-gems-${{ hashFiles('*/Gemfile') }} diff --git a/.travis.yml.old b/.travis.yml.old deleted file mode 100644 index bf8e7cc23..000000000 --- a/.travis.yml.old +++ /dev/null @@ -1,14 +0,0 @@ -# -# This used to be our Travis CI test script. -# This has been replaced by GitHub actions. -# -services: -- docker -script: -- bash Travis/travis_ci.sh -notifications: - slack: - rooms: - - secure: "gtJ4qanurja+/Ew0ChaeStjsZVC65nGC1uVPLEnZo05q0q8EsmMXpUCvnWSJu7xl42mrivOEmNns6xrrevGuKg0PiVZtKaj9njCGT2SRYZcvrAVZl17Q5pnNIFeRkmdiKp18sE071FKS11a8M4+K2nV1o6eupfOqNUWAWAmQAdI=" - - secure: "UEz9wB+m2nih2YnsBBLQMfGHBuYJ3WlsOy/IymWomog4ViBq7Q5UOW5RbtNukWhqJp8A/klieGIOcl2yTK3glyW3f04WDzg5zoVrTNVWsiwmRTa7L5D9+sw7/wlx9KE3x3ogYYgq77JkC33TQ9sOL8bMHYNbGVbiUuj+W08TsbSt8u7CR4XDIN3Fr3jpKXISoTxmG1rHyhlg1oO0Zb9h8IQ+8ySQo/6ecPhtyflv4C27p7RS3V+H9Ynu9kKd/gwsLtrcl61DxlD3LDipwYoCCQhrHX1CBD1Ti1h9ZoW1wwUuzxsdliWF+VQvNYSkTRV/yFmUmQRGxbzB3wkCmepOwgnhzU93TPxl02mzDjSkqYkh1JQ+/9JBHXiNQKamYWrNShJc3nnH98E7oeGvbHvbUdu3J+XT2JYbjTY4UdhCKlzw4ZifKpg2wIXXpH9bYKSo2LLJv4ocKaAUC2C+/BjKel/wd4FkYZix4Q58ISPfm51rytWJJvrbY7DODxcKmbVR3//8Jh+3/VFan+MOkH+AO7vjG7Gxym7jYwz2X9bElCMAR6NHH8EVeRSzzoSD8dYIQz8p/OSyFVN7CfR568+QisUhzzQ03TdCMwoNfGq97OKnWulXRKhqXSV3FktnOFJbJiHCf9cusDvZVApWqN7bEyO0bvstwwrRW7Zozcok9j8=" - - secure: "PI4Mo1Top2Nr4LFA19iHjK/abPhNKguXsbvcEEK8FLptzApeyorLb3/sgCXE6BRghHpK+lf02LkFNXaOvSG6myJrM5lP4XPpWBwCIfntpdbebzGet8pp6BZ19EO7tV75ckjPInBqQUs4Zo+SYxA65UJffCO094DSrReLCe6DTyFN/JGAZrV/AbK3g4bcw6ne6IIRa3r/o2tA96Pw9PJDzmAu7XwARZvmtsj/vg/UdwSsih7Hq1gDsQVCKfFfqQS2kUpF0HCVWZyghRbvEa26xH/hPR3ZNL4KLGf0GhFJV6aA19JhmdvvIpklZK1VuXrI/xO0Pt1en/GvSbRCD9Ngj1VY8aujelflmHIHJhAoJXwl25HLMHf+EYfMQYNdHXkARmL+rVLt1RqfEqmlfdBdbzlz2fP9VcZyugP12vqoTLIosNWXvxUVh1qyp63v3Kg1I6W4Vbm1F6ll8ChCR2uPVEebr7Px9eaa3Q8z6zb96a0DkPu1Q0AziJs3bu5gtufHjqdgqLfTjG/dROTeswMV7htOe8noLDMIO9qlS2qFJ6pb43aMFonMFHVjPDkRv9GWvTqQSdjAAcyF43ZN7Kizu6/LWrSaOF0D6ONQxa0KiMhtxVQc//walqN/InU3PNHIVXsGTqznffqLejVFcm2EVoxu/0abr1f+Tx2EthlBOgk=" diff --git a/Bourreau/Gemfile b/Bourreau/Gemfile index cb0798fb2..3094bcea8 100644 --- a/Bourreau/Gemfile +++ b/Bourreau/Gemfile @@ -31,6 +31,7 @@ gem 'rails', '~> 5.0.6' # Network components mostly gem "net-ssh", :require => 'net/ssh' gem "net-sftp", :require => 'net/sftp' +gem "x25519" gem "rbnacl" gem "ed25519" gem "rbnacl-libsodium" @@ -70,7 +71,8 @@ end group :test do gem "rspec" gem "rspec-rails" - gem "factory_bot_rails", :require => false + gem "factory_bot", '6.4.2', :require => false + gem "factory_bot_rails", '6.4.2', :require => false gem "rails-controller-testing" gem "rspec-mocks" gem "rspec-activemodel-mocks" diff --git a/Bourreau/app/models/bourreau_worker.rb b/Bourreau/app/models/bourreau_worker.rb index 231c0564c..f37789965 100644 --- a/Bourreau/app/models/bourreau_worker.rb +++ b/Bourreau/app/models/bourreau_worker.rb @@ -212,7 +212,9 @@ def process_task_list(tasks_todo_rel) #:nodoc: user_max_tasks = @rr.meta["task_limit_user_#{user_id}".to_sym] user_max_tasks = @rr.meta[:task_limit_user_default] if user_max_tasks.blank? user_max_tasks = user_max_tasks.to_i # nil, "" and "0" means unlimited - user_tasks = by_user[user_id].shuffle # go through tasks in random order + # Go through tasks in random order, but with non-New states having higher priority + user_tasks = (by_user[user_id].select { |t| t.status == 'New' }).shuffle + + (by_user[user_id].select { |t| t.status != 'New' }).shuffle # tasks are pop()ed # Loop for each task while user_tasks.size > 0 diff --git a/Bourreau/app/models/squashifier_en_cbrain_ssh_data_provider.rb b/Bourreau/app/models/squashifier_en_cbrain_ssh_data_provider.rb new file mode 120000 index 000000000..59016b55a --- /dev/null +++ b/Bourreau/app/models/squashifier_en_cbrain_ssh_data_provider.rb @@ -0,0 +1 @@ +../../../BrainPortal/app/models/squashifier_en_cbrain_ssh_data_provider.rb \ No newline at end of file diff --git a/Bourreau/config/initializers/validation_bourreau.rb b/Bourreau/config/initializers/validation_bourreau.rb index a43888a18..6cce6f35a 100644 --- a/Bourreau/config/initializers/validation_bourreau.rb +++ b/Bourreau/config/initializers/validation_bourreau.rb @@ -52,7 +52,10 @@ puts "C> \t environment variable 'CBRAIN_SKIP_VALIDATIONS' to '1'.\n" CbrainSystemChecks.check(:all) BourreauSystemChecks.check([ - :a050_ensure_proper_cluster_management_layer_is_loaded, :z000_ensure_we_have_a_forwarded_ssh_agent, + :a000_ensure_models_are_preloaded, + :a005_ensure_boutiques_descriptors_are_loaded, + :a050_ensure_proper_cluster_management_layer_is_loaded, + :z000_ensure_we_have_a_forwarded_ssh_agent, ]) end Process.setproctitle "CBRAIN Console #{RemoteResource.current_resource.class} #{RemoteResource.current_resource.name} #{CBRAIN::Instance_Name}" diff --git a/Bourreau/lib/bourreau_system_checks.rb b/Bourreau/lib/bourreau_system_checks.rb index 7b13161ac..b0d243c70 100644 --- a/Bourreau/lib/bourreau_system_checks.rb +++ b/Bourreau/lib/bourreau_system_checks.rb @@ -484,6 +484,23 @@ def self.a100_ensure_dp_cache_symlink_exists #:nodoc: + def self.a110_ensure_task_class_git_commits_cached + + #---------------------------------------------------------------------------- + puts "C> Ensuring git commits for tasks classes are pre-cached..." + #---------------------------------------------------------------------------- + + myself = RemoteResource.current_resource + + ToolConfig.where(:bourreau_id => myself.id) + .map {|tc| tc.cbrain_task_class rescue nil} + .uniq + .compact # to remove the nil + .each { |klass| klass.revision_info.self_update } + end + + + def self.z000_ensure_we_have_a_forwarded_ssh_agent #:nodoc: #---------------------------------------------------------------------------- @@ -501,4 +518,3 @@ def self.z000_ensure_we_have_a_forwarded_ssh_agent #:nodoc: end end - diff --git a/Bourreau/lib/boutiques_collection_basenames_list_maker.rb b/Bourreau/lib/boutiques_collection_basenames_list_maker.rb new file mode 120000 index 000000000..0d8942f80 --- /dev/null +++ b/Bourreau/lib/boutiques_collection_basenames_list_maker.rb @@ -0,0 +1 @@ +../../BrainPortal/lib/boutiques_collection_basenames_list_maker.rb \ No newline at end of file diff --git a/Bourreau/lib/boutiques_ext3_capturer.rb b/Bourreau/lib/boutiques_ext3_capturer.rb new file mode 120000 index 000000000..d33b38e1c --- /dev/null +++ b/Bourreau/lib/boutiques_ext3_capturer.rb @@ -0,0 +1 @@ +../../BrainPortal/lib/boutiques_ext3_capturer.rb \ No newline at end of file diff --git a/Bourreau/lib/boutiques_input_value_fixer.rb b/Bourreau/lib/boutiques_input_value_fixer.rb new file mode 120000 index 000000000..153073c7c --- /dev/null +++ b/Bourreau/lib/boutiques_input_value_fixer.rb @@ -0,0 +1 @@ +../../BrainPortal/lib/boutiques_input_value_fixer.rb \ No newline at end of file diff --git a/Bourreau/lib/boutiques_save_std_out_std_err.rb b/Bourreau/lib/boutiques_save_std_out_std_err.rb new file mode 120000 index 000000000..7336628e0 --- /dev/null +++ b/Bourreau/lib/boutiques_save_std_out_std_err.rb @@ -0,0 +1 @@ +../../BrainPortal/lib/boutiques_save_std_out_std_err.rb \ No newline at end of file diff --git a/Bourreau/lib/boutiques_task_logs_copier.rb b/Bourreau/lib/boutiques_task_logs_copier.rb new file mode 120000 index 000000000..b84290e0c --- /dev/null +++ b/Bourreau/lib/boutiques_task_logs_copier.rb @@ -0,0 +1 @@ +../../BrainPortal/lib/boutiques_task_logs_copier.rb \ No newline at end of file diff --git a/Bourreau/lib/data_provider_test_connection_error.rb b/Bourreau/lib/data_provider_test_connection_error.rb new file mode 120000 index 000000000..ee01c5c55 --- /dev/null +++ b/Bourreau/lib/data_provider_test_connection_error.rb @@ -0,0 +1 @@ +../../BrainPortal/lib/data_provider_test_connection_error.rb \ No newline at end of file diff --git a/BrainPortal/Gemfile b/BrainPortal/Gemfile index 58571094f..95c062d97 100644 --- a/BrainPortal/Gemfile +++ b/BrainPortal/Gemfile @@ -31,10 +31,13 @@ gem 'rails', '~> 5.0.6' # Network components mostly gem "net-ssh", :require => 'net/ssh' gem "net-sftp", :require => 'net/sftp' -gem "rbnacl" +gem "x25519" gem "ed25519" -gem "rbnacl-libsodium" gem "bcrypt_pbkdf" +# You can comment-out the following two gems if you don't mind not having 'userkey' data providers. +# They are usually the most tricky to compile and integrate into the app. +gem "rbnacl" +gem "rbnacl-libsodium" # Other utilities gem "sys-proctable", '~> 1.2.0' @@ -46,6 +49,7 @@ gem "json-schema" gem "aws-sdk-s3", '~> 1' gem 'zenodo_client', '>= 1.0.0', :git => 'https://github.com/aces/zenodo-client-gem' gem 'jwt' +gem "rack-cors" # User interface gems gem "will_paginate" @@ -65,7 +69,7 @@ gem "activemodel-serializers-xml" gem 'sassc-rails' # "rake assets:precompile" fails if this gems is put into the :assets group group :assets do - gem 'therubyracer' + #gem 'therubyracer' # very old; consider installing ANY other JS engine, like 'node'. If not, try 'mini_racer'. gem 'coffee-rails' gem 'uglifier' end @@ -90,7 +94,8 @@ end group :test do gem "rspec" gem "rspec-rails" - gem "factory_bot_rails", :require => false + gem "factory_bot", '6.4.2', :require => false + gem "factory_bot_rails", '6.4.2', :require => false gem "rails-controller-testing" gem "rspec-mocks" gem "rspec-activemodel-mocks" diff --git a/BrainPortal/app/assets/stylesheets/cbrain.css.erb b/BrainPortal/app/assets/stylesheets/cbrain.css.erb index cc28238fb..b925a5dd2 100644 --- a/BrainPortal/app/assets/stylesheets/cbrain.css.erb +++ b/BrainPortal/app/assets/stylesheets/cbrain.css.erb @@ -3,7 +3,7 @@ # # CBRAIN Project # -# Copyright (C) 2008-2012 +# Copyright (C) 2008-2023 # The Royal Institution for the Advancement of Learning # McGill University # @@ -441,9 +441,9 @@ table.bordered { table.public_tools_list { border: 0px; - margin-left: auto; - margin-right: auto; - margin-bottom: 2em; + display: table-cell; + padding-bottom: 1em; + padding-right: 1em; } table.public_tools_list tr { @@ -458,7 +458,7 @@ table.public_tools_list th { table.public_tools_list td { border: 0px; border-bottom: 0.2em solid #999; - max-width: 60em; + max-width: 30em; } @@ -804,6 +804,10 @@ pre { line-height: 1.3em; border: 0.2em solid black; padding: 0.4em; + word-break: break-all; + overflow-wrap: break-word; + text-align: left; + table-layout: fixed; } .script_preview { @@ -880,7 +884,7 @@ pre { } .errorExplanation { - width: 400px; + width: 600px; border: 2px solid red; padding: 7px; padding-bottom: 12px; @@ -1138,18 +1142,16 @@ pre { float: left; text-align: center; text-decoration: none; - color: white; padding: 0.4em; margin: 2em ; width: 18em; height: 9em; - border: 0.4em gray solid; + border: 0.5em black solid; border-radius: 1em; cursor: pointer; } -.project_button h4{ - color: white; +.project_button h4 { font-weight: bold; border-bottom: none; margin-bottom: 0; @@ -1163,13 +1165,24 @@ pre { .project_button_bottom_link { font-size: 10px; - color: white; font-weight: bold; text-decoration: none; position: absolute; bottom: 1px; } +.project_user_count { + color: #660000; +} + +.project_files_count { + color: #006600; +} + +.project_tasks_count { + color: #000066; +} + .project_button_description { font-style: italic; line-height: 1.2em; @@ -1180,48 +1193,30 @@ pre { } .project_edit_button { - left: 5px; + left: 1em; } .project_delete_button { - right: 5px; + right: 1em; } -.system_project_point { color: #d41c1c; } -.system_project { background: #d41c1c; } -.system_project:active { background: #d41c1c; } - -.everyone_project_point { color: #af0b0b; } -.everyone_project { background: #af0b0b; } -.everyone_project:active { background: #af0b0b; } - -.site_project_point { color: #8c0953; } -.site_project { background: #8c0953; } -.site_project:active { background: #8c0953; } +.everyone_project { background: #bbbbbb; } +.everyone_project:active { background: #bbbbbb; } -.user_project_point { color: #500d75; } -.user_project { background: #500d75; } -.user_project:active { background: #500d75; } +.site_project { background: #dbf7cc; } +.site_project:active { background: #dbf7cc; } -.shared_project_point { color: #00bf09; } -.shared_project { background: #00bf09; } -.shared_project:active { background: #00bf09; } +.user_project { background: #bbbbbb; } +.user_project:active { background: #bbbbbb; } -.public_project_point { color: #9e5400; } -.public_project { background: #9e5400; } -.public_project:active { background: #9e5400; } +.shared_project { background: #A5D1DF; } +.shared_project:active { background: #A5D1DF; } -.personal_project_point { color: #008686; } -.personal_project { background: #008686; } -.personal_project:active { background: #008686; } +.public_project { background: #fdcece; } +.public_project:active { background: #fdcece; } -.invisible_project_point { color: #2b97c1; } -.invisible_project { background: #2b97c1; } -.invisible_project:active { background: #2b97c1; } - -.empty_project_point { color: #d7ca0a; } -.empty_project { background: #d7ca0a; } -.empty_project:active { background: #d7ca0a; } +.private_project { background: #eeeeee; } +.private_project:active { background: #eeeeee; } .giant { margin-top: 0.3em; @@ -2460,6 +2455,7 @@ img { color: black; padding: 5px; border: 1px solid black; + max-width: 42em; } .white_bg { @@ -2623,6 +2619,26 @@ img { display: table-cell; } +/* % ######################################################### */ +/* % Disk Quota Styles */ +/* % ######################################################### */ + +.disk_quota_user_quota_highlight { + background-color: #ffd; /* light yellow */ +} + +.disk_quota_dp_quota_highlight { + background-color: #ffd; /* light yellow */ +} + +.disk_quota_exceed_bytes { + background-color: #fdd; /* light pink */ +} + +.disk_quota_exceed_files { + background-color: #fdd; /* light pink */ +} + /* % ######################################################### */ /* % Report Generator Styles */ /* % ######################################################### */ diff --git a/BrainPortal/app/assets/stylesheets/neurohub.scss.erb b/BrainPortal/app/assets/stylesheets/neurohub.scss.erb index 1010e4ae1..ef6aeab25 100644 --- a/BrainPortal/app/assets/stylesheets/neurohub.scss.erb +++ b/BrainPortal/app/assets/stylesheets/neurohub.scss.erb @@ -3220,6 +3220,7 @@ $DASHBOARD_CARD_MIN_WIDTH: 25; border: 1px solid $DEFAULT_ALT; border-radius: 2px; overflow: scroll; + box-sizing: border-box; } .license-list { background: $PRIMARY_WASH; diff --git a/BrainPortal/app/controllers/data_providers_controller.rb b/BrainPortal/app/controllers/data_providers_controller.rb index e1113e722..24cd5e3f7 100644 --- a/BrainPortal/app/controllers/data_providers_controller.rb +++ b/BrainPortal/app/controllers/data_providers_controller.rb @@ -2,7 +2,7 @@ # # CBRAIN Project # -# Copyright (C) 2008-2012 +# Copyright (C) 2008-2023 # The Royal Institution for the Advancement of Learning # McGill University # @@ -28,11 +28,12 @@ class DataProvidersController < ApplicationController Revision_info=CbrainFileRevision[__FILE__] #:nodoc: api_available :only => [ :index, :show, :is_alive, - :browse, :register, :unregister, :delete ] + :browse, :register, :unregister, :delete, + :create_personal, :check_personal, ] before_action :login_required before_action :manager_role_required, :only => [:new, :create] - before_action :admin_role_required, :only => [:report, :repair] + before_action :admin_role_required, :only => [:new, :create, :report, :repair] def index #:nodoc: @scope = scope_from_session @@ -61,7 +62,6 @@ def index #:nodoc: def show #:nodoc: data_provider_id = params[:id] @provider = DataProvider.find(data_provider_id) - cb_notice "Provider not accessible by current user." unless @provider.can_be_accessed_by?(current_user) respond_to do |format| @@ -86,8 +86,8 @@ def new #:nodoc: @typelist = get_type_list end - def create #:nodoc: - @provider = DataProvider.sti_new(data_provider_params) + def create #:nodoc: + @provider = DataProvider.sti_new(data_provider_params) @provider.user_id ||= current_user.id # disabled field in form DOES NOT send value! @provider.group_id ||= current_assignable_group.id @@ -110,6 +110,49 @@ def create #:nodoc: end end + def new_personal #:nodoc: + provider_group_id = current_assignable_group.id + @provider = UserkeyFlatDirSshDataProvider.new( :user_id => current_user.id, + :group_id => provider_group_id, + :online => true, + :read_only => false + ) + @groups = current_user.assignable_groups + end + + # create by normal user, only UserkeyFlatDirSshDataProvider + def create_personal + normal_params = params.require_as_params(:data_provider) + .permit(:name, :description, :group_id, + :remote_user, :remote_host, + :remote_port, :remote_dir + ) + group_id = normal_params[:group_id] + current_user.assignable_group_ids.find(group_id) # ensure assignable, not sure need check visibility etc more + @provider = UserkeyFlatDirSshDataProvider.new(normal_params) + @provider.user_id = current_user.id # prevent creation of dp on behalf of other users + + if ! @provider.save + @groups = current_user.assignable_groups + respond_to do |format| + format.html { render :action => :new_personal} + format.json { render :json => @provider.errors, :status => :unprocessable_entity } + end + return + end + + @provider.addlog_context(self, "Created by #{current_user.login}") + @provider.meta[:browse_gid] = current_user.own_group.id + flash[:notice] = "Provider successfully created. Please click the Test Configuration button."\ + " This will run tests on the current storage configuration. Note that if these tests fail,"\ + " the storage will be marked 'offline'." + + respond_to do |format| + format.html { redirect_to :action => :show, :id => @provider.id} + format.json { render :json => @provider } + end + end + def update #:nodoc: @user = current_user id = params[:id] @@ -125,35 +168,29 @@ def update #:nodoc: return end - new_data_provider_attr = data_provider_params + # hacking prevention + # this guaranties that users do not change group to something + group_id = params[:group_id] + current_user.assignable_group_ids.find(group_id) if ! current_user.has_role?(:admin_user) + + new_data_provider_attr = data_provider_params(@provider) new_data_provider_attr.delete :type # Type cannot be updated once it is set. # Fields that stay the same if the form provides a blank entry: new_data_provider_attr.delete :cloud_storage_client_token if new_data_provider_attr[:cloud_storage_client_token].blank? - if @provider.update_attributes_with_logging(new_data_provider_attr, current_user, - %w( - remote_user remote_host remote_port remote_dir - not_syncable cloud_storage_client_identifier cloud_storage_client_token - cloud_storage_client_bucket_name cloud_storage_client_path_start - cloud_storage_endpoint cloud_storage_region - datalad_repository_url datalad_relative_path - containerized_path - ) - ) + if @provider.update_attributes_with_logging(new_data_provider_attr, current_user, @provider.attributes.keys) meta_flags_for_restrictions = (params[:meta] || {}).keys.grep(/\Adp_no_copy_\d+\z|\Arr_no_sync_\d+\z/) add_meta_data_from_form(@provider, [:must_move, :no_uploads, :no_viewers, :browse_gid] + meta_flags_for_restrictions) flash[:notice] = "Provider successfully updated." respond_to do |format| format.html { redirect_to :action => :show } - format.xml { render :xml => @provider } - format.json { render :json => @provider } + format.json { render :json => @provider } end else @provider.reload respond_to do |format| format.html { render :action => 'show' } - format.xml { render :xml => @provider.errors, :status => :unprocessable_entity } format.json { render :json => @provider.errors, :status => :unprocessable_entity } end end @@ -819,9 +856,59 @@ def repair end end + # This action checks that the remote side of a Ssh DataProvider is + # accessible using SSH. Regretfully, does not guaranty that connection is possible. + # If check fails it raises an exception of class DataProviderTestConnectionError + def check_personal + id = params[:id] + @provider = DataProvider.find(id) + unless @provider.has_owner_access?(current_user) + flash[:error] = "You cannot check a provider that you do not own." + respond_to do |format| + format.html { redirect_to :action => :show } + format.xml { head :forbidden } + format.json { head :forbidden } + end + return + end + + unless @provider.is_a? SshDataProvider + flash[:error] = "Presently, detailed check is only available to ssh providers." + respond_to do |format| + format.html { redirect_to :action => :show } + format.xml { head :forbidden } + format.json { head :forbidden } + end + return + end + + # Do active checks of the connection. Will + # raise DataProviderTestConnectionError if anything is wrong. + @provider.check_connection! + + # Ok, all is well. + @provider.update_column(:online, true) + flash[:notice] = "The configuration was tested and seems to be operational." + + respond_to do |format| + format.html { redirect_to :action => :show } + format.json { render :json => 'ok' } + end + + rescue DataProviderTestConnectionError => ex + flash[:error] = ex.message + flash[:error] += "\nThis storage is marked as 'offline' until this test pass." + @provider.update_column(:online, false) + + respond_to do |format| + format.html { redirect_to :action => :show } + format.json { render :json => 'not ok' } + end + end + private - def data_provider_params #:nodoc: + def data_provider_params(for_data_provider=nil) #:nodoc: if current_user.has_role?(:admin_user) params.require_as_params(:data_provider).permit( :name, :user_id, :group_id, :remote_user, :remote_host, :alternate_host, @@ -836,17 +923,15 @@ def data_provider_params #:nodoc: :license_agreements, :containerized_path ) - else - # Normal users are not allowed to change - # some parameters that would allow them to access things - # they don't control. + elsif for_data_provider.is_a?(UserkeyFlatDirSshDataProvider) params.require_as_params(:data_provider).permit( - :name, :description, :group_id, :time_zone, - :alternate_host, + :name, :description, :group_id, + :remote_user, :remote_host, :remote_port, :remote_dir, :online, :read_only, :not_syncable, - :datalad_repository_url, :datalad_relative_path, - :license_agreements, - :containerized_path + ) + else # place for future expansion; be careful to not introduce security bugs + params.require_as_params(:data_provider).permit( + :description, ) end end @@ -855,6 +940,8 @@ def get_type_list #:nodoc: data_provider_list = [ "FlatDirSshDataProvider" ] if check_role(:site_manager) || check_role(:admin_user) data_provider_list = DataProvider.descendants.map(&:name) + data_provider_list.delete(UserkeyFlatDirSshDataProvider.name) # this type is for regular users + # not for admins end grouped_options = data_provider_list.to_a.hashed_partitions { |name| name.constantize.pretty_category_name } grouped_options.delete(nil) # data providers that can not be on this list return a category name of nil, so we remove them diff --git a/BrainPortal/app/controllers/disk_quotas_controller.rb b/BrainPortal/app/controllers/disk_quotas_controller.rb index 9eb9117b1..c111f425d 100644 --- a/BrainPortal/app/controllers/disk_quotas_controller.rb +++ b/BrainPortal/app/controllers/disk_quotas_controller.rb @@ -124,6 +124,56 @@ def destroy #:nodoc: redirect_to disk_quotas_path end + # Returns a list of users with exceeded quotas + def report #:nodoc: + quota_to_user_ids = {} # quota_obj => [uid, uid...] + + # Scan DP-wide quota objects + DiskQuota.where(:user_id => 0).all.each do |quota| + exceed_size_user_ids = Userfile + .where(:data_provider_id => quota.data_provider_id) + .group(:user_id) + .sum(:size) + .select { |user_id,size| size >= quota.max_bytes } + .keys + exceed_numfiles_user_ids = Userfile + .where(:data_provider_id => quota.data_provider_id) + .group(:user_id) + .sum(:num_files) + .select { |user_id,num_files| num_files >= quota.max_files } + .keys + union_ids = exceed_size_user_ids | exceed_numfiles_user_ids + union_ids -= DiskQuota + .where(:data_provider_id => quota.data_provider_id, :user_id => union_ids) + .pluck(:user_id) # remove user IDs that have their own quota records + quota_to_user_ids[quota] = union_ids if union_ids.size > 0 + end + + # Scan user-specific quota objects + DiskQuota.where('user_id > 0').all.each do |quota| + quota_to_user_ids[quota] = [ quota.user_id ] if quota.exceeded? + end + + # Inverse relation: user_id => [ quota, quota ] + user_id_to_quotas = {} + quota_to_user_ids.each do |quota,user_ids| + user_ids.each do |user_id| + user_id_to_quotas[user_id] ||= [] + user_id_to_quotas[user_id] << quota + end + end + + # Table content: [ [ user_id, quota ], [user_id, quota] ... ] + # Note: the rows are grouped by user_id, but not sorted in any way... + @user_id_and_quota = [] + user_id_to_quotas.each do |user_id, quotas| + quotas.each do |quota| + @user_id_and_quota << [ user_id, quota ] + end + end + + end + private def disk_quota_params #:nodoc: @@ -145,10 +195,11 @@ def base_scope #:nodoc: scope end - # Tries to turn strings like '3 mb' into 3_000_000 etc - # Supported suffixes are T, G, M, K, TB, GB, MB, KB, B (case insensitive) + # Tries to turn strings like '3 mb' into 3_000_000 etc. + # Supported suffixes are T, G, M, K, TB, GB, MB, KB, B (case insensitive). + # Negative values are parsed, but the DiskQuota model only accepts the special -1 def guess_size_units(sizestring) - match = sizestring.match /\A\s*(\d*\.?\d+)\s*([tgmk]?)\s*b?\s*\z/i + match = sizestring.match(/\A\s*(-?\d*\.?\d+)\s*([tgmk]?)\s*b?\s*\z/i) return "" unless match # parsing error number = match[1] suffix = match[2].presence&.downcase || 'u' diff --git a/BrainPortal/app/controllers/groups_controller.rb b/BrainPortal/app/controllers/groups_controller.rb index 7ee6f2c33..57fc7c093 100644 --- a/BrainPortal/app/controllers/groups_controller.rb +++ b/BrainPortal/app/controllers/groups_controller.rb @@ -48,21 +48,22 @@ def index #:nodoc: @scope.custom[:button] = true if current_user.has_role?(:normal_user) && @scope.custom[:button].nil? + view_mode = (@scope.custom[:button].present?) ? :button : :list + @base_scope = current_user.listable_groups.includes(:site) @view_scope = @scope.apply(@base_scope) - @scope.pagination ||= Scope::Pagination.from_hash({ :per_page => 50 }) - @groups = @scope.pagination.apply(@view_scope) - @groups = (@groups.to_a << 'ALL') if @scope.custom[:button] + if view_mode == :list + @scope.pagination ||= Scope::Pagination.from_hash({ :per_page => 50 }) + @groups = @scope.pagination.apply(@view_scope) + else + @groups = @view_scope.to_a + end # For regular groups @group_id_2_userfile_counts = Userfile.find_all_accessible_by_user(current_user, :access_requested => :read).group("group_id").count @group_id_2_task_counts = CbrainTask.find_all_accessible_by_user(current_user).group("group_id").count @group_id_2_user_counts = User.joins(:groups).group("group_id").count.convert_keys!(&:to_i) # .joins make keys as string - @group_id_2_tool_counts = Tool.find_all_accessible_by_user(current_user).group("group_id").count - @group_id_2_data_provider_counts = DataProvider.find_all_accessible_by_user(current_user).group("group_id").count - @group_id_2_bourreau_counts = Bourreau.find_all_accessible_by_user(current_user).group("group_id").count - @group_id_2_brain_portal_counts = BrainPortal.find_all_accessible_by_user(current_user).group("group_id").count # For `ALL` group @group_id_2_userfile_counts[nil] = Userfile.find_all_accessible_by_user(current_user, :access_requested => :read).count @@ -275,12 +276,12 @@ def group_params #:nodoc: params.require_as_params(:group).permit( :name, :description, :not_assignable, :site_id, :creator_id, :invisible, :track_usage, - :user_ids => [] + :public, :user_ids => [] ) else # non admin users params.require_as_params(:group).permit( :name, :description, :not_assignable, - :user_ids => [] + :public, :user_ids => [] ) end end diff --git a/BrainPortal/app/controllers/nh_loris_hooks_controller.rb b/BrainPortal/app/controllers/nh_loris_hooks_controller.rb index 66d4a9867..9679b9990 100644 --- a/BrainPortal/app/controllers/nh_loris_hooks_controller.rb +++ b/BrainPortal/app/controllers/nh_loris_hooks_controller.rb @@ -64,6 +64,11 @@ def file_list_maker s_group = Group.where_id_or_name(source_group_id).first if source_group_id base = Userfile.where(nil) # As seen in userfiles_controller base = Userfile.restrict_access_on_query(current_user, base, :access_requested => :read) + + extended_values = ExtendedCbrainFileList.roots_to_fullpaths(source_basenames) + source_basenames = extended_values.keys + is_extended = extended_values.any?{|parent_dir,pathlist| pathlist.present? } + userfiles = base.where(:name => source_basenames) userfiles = userfiles.where(:data_provider_id => s_dp.id) if s_dp userfiles = userfiles.where(:group_id => s_group.id) if s_group @@ -71,17 +76,22 @@ def file_list_maker # It is an error not to find exactly the same number of files as in # the params' basenames array found_names = userfiles.pluck(:name) - file_count = found_names.size - exp_count = source_basenames.size + file_count = found_names.size + exp_count = source_basenames.size if (file_count == 0) || (strict_match && (file_count != exp_count)) cb_error "Could not find an exact match for the files. Found #{file_count} of #{exp_count} files" end - # Create CbrainFileList content and save it to DP - cblist_content = CbrainFileList.create_csv_file_from_userfiles(userfiles) - - # Save result file - result = create_file_for_request(CbrainFileList, "Loris-DQT-List.cbcsv", cblist_content) + result = nil + if is_extended + new_extended = extended_values.transform_values {|relpaths| { :all_to_keep => relpaths }.to_json } + userfiles = ExtendedCbrainFileList.extended_userfiles_by_name(userfiles, new_extended) + cblist_content = ExtendedCbrainFileList.create_csv_file_from_userfiles(userfiles) + result = create_file_for_request(ExtendedCbrainFileList, "Extended-Loris-DQT-List.cbcsv", cblist_content) + else + cblist_content = CbrainFileList.create_csv_file_from_userfiles(userfiles) + result = create_file_for_request(CbrainFileList, "Loris-DQT-List.cbcsv", cblist_content) + end # Info message and unmatched entries extra_response = { diff --git a/BrainPortal/app/controllers/nh_storages_controller.rb b/BrainPortal/app/controllers/nh_storages_controller.rb index ecda963b8..be5281ec1 100644 --- a/BrainPortal/app/controllers/nh_storages_controller.rb +++ b/BrainPortal/app/controllers/nh_storages_controller.rb @@ -29,9 +29,6 @@ class NhStoragesController < NeurohubApplicationController before_action :login_required - # A private exception class when testing connectivity - class UserKeyTestConnectionError < RuntimeError ; end - def new #:nodoc: @nh_dp = UserkeyFlatDirSshDataProvider.new @nh_projects = find_nh_projects(current_user) @@ -204,70 +201,19 @@ def check @nh_dp.update_column(:online, true) - master = @nh_dp.master # This is a handler for the connection, not persistent. - tmpfile = "/tmp/dp_check.#{Process.pid}.#{rand(1000000)}" - - # Check #1: the SSH connection can be established - if ! master.is_alive? - test_error "Cannot establish the SSH connection. Check the configuration: username, hostname, port are valid, and SSH key is installed." - end - - # Check #2: we can run "true" on the remote site and get no output - status = master.remote_shell_command_reader("true", - :stdin => "/dev/null", - :stdout => "#{tmpfile}.out", - :stderr => "#{tmpfile}.err", - ) - stdout = File.read("#{tmpfile}.out") rescue "Error capturing stdout" - stderr = File.read("#{tmpfile}.err") rescue "Error capturing stderr" - if stdout.size != 0 - stdout.strip! if stdout.present? # just to make it pretty while still reporting whitespace-only strings - test_error "Remote shell is not clean: got some bytes on stdout: '#{stdout}'" - end - if stderr.size != 0 - stderr.strip! if stdout.present? - test_error "Remote shell is not clean: got some bytes on stderr: '#{stderr}'" - end - if ! status - test_error "Got non-zero return code when trying to run 'true' on remote side." - end - - # Check #3: the remote directory exists - master.remote_shell_command_reader "test -d #{@nh_dp.remote_dir.bash_escape} && echo DIR-OK", :stdout => tmpfile - out = File.read(tmpfile) - if out != "DIR-OK\n" - test_error "The remote directory doesn't seem to exist." - end - - # Check #4: the remote directory is readable - master.remote_shell_command_reader "test -r #{@nh_dp.remote_dir.bash_escape} && test -x #{@nh_dp.remote_dir.bash_escape} && echo DIR-READ", :stdout => tmpfile - out = File.read(tmpfile) - if out != "DIR-READ\n" - test_error "The remote directory doesn't seem to be readable" - end + # Performs an active check of the connection; will + # raise DataProviderTestConnectionError if something is wrong. + @nh_dp.check_connection! # Ok, all is well. flash[:notice] = "The configuration was tested and seems to be operational." redirect_to :action => :show - rescue UserKeyTestConnectionError => ex + rescue DataProviderTestConnectionError => ex flash[:error] = ex.message flash[:error] += "\nThis storage is marked as 'offline' until this test pass." @nh_dp.update_column(:online, false) redirect_to :action => :show - - ensure - File.unlink "#{tmpfile}.out" rescue true - File.unlink "#{tmpfile}.err" rescue true - - end - - private - - # Utility method to raise an exception - # when testing for a DP's configuration. - def test_error(message) #:nodoc: - raise UserKeyTestConnectionError.new(message) end end diff --git a/BrainPortal/app/controllers/noc_controller.rb b/BrainPortal/app/controllers/noc_controller.rb index 4c84835a3..6414c4dd9 100644 --- a/BrainPortal/app/controllers/noc_controller.rb +++ b/BrainPortal/app/controllers/noc_controller.rb @@ -255,7 +255,7 @@ def gather_info(since_when) #:nodoc: # Count of active statuses info[:status_counts] = b.is_a?(BrainPortal) ? [] : - b.cbrain_tasks.where(["updated_at > ?", since_when ]) + b.cbrain_tasks.where(["updated_at > ? or status in (?)", since_when, CbrainTask::RUNNING_STATUS ]) .group(:status) .count .to_a # [ [ status, count ], [ status, count ] ... ] diff --git a/BrainPortal/app/controllers/portal_controller.rb b/BrainPortal/app/controllers/portal_controller.rb index 86b410585..fdb65b600 100644 --- a/BrainPortal/app/controllers/portal_controller.rb +++ b/BrainPortal/app/controllers/portal_controller.rb @@ -27,9 +27,9 @@ class PortalController < ApplicationController include DateRangeRestriction - api_available :only => [ :swagger ] # GET /swagger returns the .json specification + api_available :only => [ :swagger, :stats ] # GET /swagger returns the .json specification - before_action :login_required, :except => [ :credits, :about_us, :welcome, :swagger, :available ] # welcome is here so that the redirect to the login page doesn't show the error message + before_action :login_required, :except => [ :credits, :about_us, :welcome, :swagger, :available, :stats ] # welcome is here so that the redirect to the login page doesn't show the error message before_action :admin_role_required, :only => :portal_log # Display a user's home page with information about their account. @@ -253,7 +253,6 @@ def available #:nodoc: .select { |t| t.tool_configs.to_a.any? { |tc| tc.bourreau_id.present? && tc.bourreau_id > 0 && - tc.bourreau.try(:online) && # comment out to show them all tc.version_name.present? } } @@ -444,6 +443,22 @@ def swagger end end + # Return information about the usage of the platform. + def stats + @stats = RemoteResource.current_resource.meta[:stats] || {} + @stats_by_client = @stats[:UserAgents] || {} + @stats_by_contr_action = compile_total_stats(@stats) + + @last_reset = (RemoteResource.current_resource.meta.md_for_key(:stats).created_at || Time.at(0)).utc.iso8601 + @stats[:lastReset] = @last_reset + + respond_to do |format| + format.html + format.xml { render :xml => @stats } + format.json { render :json => @stats } + end + end + private def merge_vals_as_array(*sub_reports) #:nodoc: @@ -486,4 +501,26 @@ def colorize_logs(data) #:nodoc: data end + # From the raw stats accumulated for all clients, + # controllers and actions, compile two other + # secondary stats: the sums by clients, and + # the sums by pair "controller,service". + def compile_total_stats(stats={}) #:nodoc: + stats_by_contr_action = {} + + # stats['AllAgents'] is { 'controller' => { 'action' => [1,2] , ... }, ... } + all_agents = stats['AllAgents'] || stats[:AllAgents] || {} + all_agents.each do |controller, by_action| + by_action.each do |action, counts| + # By controller and action + contr_action = "#{controller},#{action}" + stats_by_contr_action[contr_action] ||= [0,0] + stats_by_contr_action[contr_action][0] += counts[0] + stats_by_contr_action[contr_action][1] += counts[1] + end + end + + return stats_by_contr_action + end + end diff --git a/BrainPortal/app/controllers/service_controller.rb b/BrainPortal/app/controllers/service_controller.rb deleted file mode 100644 index 0269d3fa6..000000000 --- a/BrainPortal/app/controllers/service_controller.rb +++ /dev/null @@ -1,222 +0,0 @@ - -# -# CBRAIN Project -# -# Copyright (C) 2008-2012 -# The Royal Institution for the Advancement of Learning -# McGill University -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -# Controller for the Service. -# Implement actions as defined by the CANARIE Web Service Monitoring API. -# -# By default, all these actions are commented out in the route.rb file, -# so for most installations this controller is NOT USED at all. -class ServiceController < ApplicationController - - Revision_info=CbrainFileRevision[__FILE__] #:nodoc: - - api_available - - # Return basic identification and provenance - # information about the platform - def info - @info = { :name => "CBRAIN Data Platform", - :synopsis => <<-SYNOPSYS.strip.gsub(/\s\s+/, " "), # the ugly gsub is because of CANARIE - - The CBRAIN platform is a web-based, collaborative research platform - designed to address major issues in "Big Data" research through a - single consistent framework by providing researchers the ability - to easily execute large-scale pipelines for computational research. - The platform's primary objective is to provide a user-friendly, - extensible, integrated, and robust yet lightweight collaborative - neuroimaging research platform providing transparent access to the - heterogeneous computing and data resources available across Canada - and around the world. - - SYNOPSYS - :version => CbrainFileRevision.cbrain_head_tag, - :institution => "McGill University", - :releaseTime => Time.parse(CbrainFileRevision.cbrain_head_revinfo.datetime).utc.iso8601, - :researchSubject => "Multi-discipline", - :supportEmail => RemoteResource.current_resource.support_email, - :tags => [ "neurology", "CBRAIN", "data transfer", "cluster", - "supercomputer", "task", "data modeling", "visualization", - ], - } - - respond_to do |format| - format.html - format.xml { render :xml => @info } - format.json { render :json => @info } - end - end - - # Return information about the usage of the platform. - def stats - stats = RemoteResource.current_resource.meta[:stats] || {} - stats_by_contr_action = compile_total_stats(stats) - - @summary_stats = stats['UserAgents'].dup - @last_reset = (RemoteResource.current_resource.meta.md_for_key(:stats).try(:created_at) || Time.now).utc.iso8601 - authenticated_actions = count_authenticated_actions(stats_by_contr_action) - @summary_stats["TotalActions"] = authenticated_actions - @summary_stats["lastReset"] = @last_reset - - # CANARIE only wants TWO fields. :-( - @json_stats = { - "Total Actions" => authenticated_actions, - "lastReset" => @last_reset, - } - - respond_to do |format| - format.html - format.xml { render :xml => @summary_stats } - format.json { render :json => @json_stats } - end - end - - # Return the online documentation. - def doc - redirect_to 'https://github.com/aces/cbrain/wiki' - end - - # Return release note describing the current version - # of the platform APIs. - def releasenotes - redirect_to 'https://github.com/aces/cbrain/blob/master/Release-Notes.md' - end - - # Provides information on how to get support - # for the platform. - def support - about_us_url = url_for(:controller => :portal, :action => :about_us) - @support = { :supportEmail => RemoteResource.current_resource.support_email, - :aboutUs => about_us_url, - :helpUrl => RemoteResource.current_resource.help_url, - } - - respond_to do |format| - format.html - format.xml { render :xml => @support } - format.json { render :json => @support } - end - end - - # Return link to the source code of the platform - def source - redirect_to 'https://github.com/aces/cbrain' - end - - # Redirects to the main login page. - def tryme - redirect_to '/login' - end - - # Allows users to view platform's - # licencing/usage term. - def licence - respond_to do |format| - format.html { redirect_to :controller => :portal, :action => :about_us } - format.xml { head :not_acceptable } - format.json { head :not_acceptable } - end - end - - # Allows user to view the software provenance - def provenance - respond_to do |format| - format.html { render 'portal/provenance' } - format.xml { head :not_acceptable } - format.json { head :not_acceptable } - end - end - - # Base information - def factsheet - respond_to do |format| - format.html { redirect_to :controller => :portal, :action => :about_us } - format.xml { head :not_acceptable } - format.json { head :not_acceptable } - end - end - - # Return information about the usage of the platform. - def detailed_stats - @stats = RemoteResource.current_resource.meta[:stats] || {} - @stats_by_client = @stats[:UserAgents] || {} - @stats_by_contr_action = compile_total_stats(@stats) - - @last_reset = (RemoteResource.current_resource.meta.md_for_key(:stats).created_at || Time.at(0)).utc.iso8601 - @stats[:lastReset] = @last_reset - - respond_to do |format| - format.html - format.xml { render :xml => @stats } - format.json { render :json => @stats } - end - end - - private - - # From the raw stats accumulated for all clients, - # controllers and actions, compile two other - # secondary stats: the sums by clients, and - # the sums by pair "controller,service". - def compile_total_stats(stats={}) #:nodoc: - stats_by_contr_action = {} - - # stats['AllAgents'] is { 'controller' => { 'action' => [1,2] , ... }, ... } - all_agents = stats['AllAgents'] || stats[:AllAgents] || {} - all_agents.each do |controller, by_action| - by_action.each do |action, counts| - # By controller and action - contr_action = "#{controller},#{action}" - stats_by_contr_action[contr_action] ||= [0,0] - stats_by_contr_action[contr_action][0] += counts[0] - stats_by_contr_action[contr_action][1] += counts[1] - end - end - - return stats_by_contr_action - end - - # Returns a count of all actions that require - # being authenticated; there is a built-in - # exception list to ignore actions that can - # be invoked externally without authentication - # (for instance, /service/* or /portal/welcome) - # Returns the sum of successful and unsuccessful - # actions. - def count_authenticated_actions(stats_by_contr_action = {}) #:nodoc: - tot = 0; - stats_by_contr_action.keys.sort.each do |contr_action| - next if contr_action == 'portal,welcome' - next if contr_action == 'portal,credits' - next if contr_action == 'portal,about_us' - controller = contr_action.split(",").first - next if controller == 'service' # all of them - next if controller == 'controls' # show - next if controller == 'sessions' # new, show, destroy, create - next if controller == 'nh_sessions' - next if controller == 'noc' - counts = stats_by_contr_action[contr_action] || [0,0] - tot += counts[0] + counts[1] # OK + FAIL - end - tot - end - -end diff --git a/BrainPortal/app/controllers/sessions_controller.rb b/BrainPortal/app/controllers/sessions_controller.rb index 0b254994d..8410ca194 100644 --- a/BrainPortal/app/controllers/sessions_controller.rb +++ b/BrainPortal/app/controllers/sessions_controller.rb @@ -331,7 +331,7 @@ def user_tracking(portal,origin='CBRAIN') #:nodoc: # the same information afterwards. Thus the weird style alignment. user.addlog( "Logged in on #{portal.name}/#{origin} with #{authentication_mechanism} from #{pretty_host} using #{pretty_brow}") portal.addlog("User #{user.login} logged in on #{origin} with #{authentication_mechanism} from #{pretty_host} using #{pretty_brow}") - user.update_attribute(:last_connected_at, Time.now) + user.update_column(:last_connected_at, Time.now) # Admin users start with some differences in behavior if user.has_role?(:admin_user) diff --git a/BrainPortal/app/controllers/tasks_controller.rb b/BrainPortal/app/controllers/tasks_controller.rb index 5a0fabb11..c6d6a2319 100644 --- a/BrainPortal/app/controllers/tasks_controller.rb +++ b/BrainPortal/app/controllers/tasks_controller.rb @@ -187,7 +187,7 @@ def new #:nodoc: if tool_config_id # the prefered method @tool_config = ToolConfig.find(tool_config_id) if ! @tool_config.can_be_accessed_by?(current_user) - raise ActiveRecord::RecordNotFound("Cannot access ToolConfig ##{tool_config_id}") + raise ActiveRecord::RecordNotFound.new("Cannot access ToolConfig ##{tool_config_id}") end else # Try to propose a version; usually that's when we get just a tool_id if tool_id.blank? @@ -197,7 +197,7 @@ def new #:nodoc: end tool = Tool.find(tool_id) if ! tool.can_be_accessed_by?(current_user) - raise ActiveRecord::RecordNotFound("Cannot access Tool ##{tool_id}") + raise ActiveRecord::RecordNotFound.new("Cannot access Tool ##{tool_id}") end bourreau_id = Bourreau.find_all_accessible_by_user(current_user).where(:online => true).pluck(:id) if bourreau_id.nil? # try them all toolconfigs = ToolConfig.where( @@ -1502,6 +1502,8 @@ def handle_preset_actions #:nodoc: if (! preset_id.blank?) && preset = CbrainTask.where(:id => preset_id, :status => [ 'Preset', 'SitePreset' ]).first old_params = @task.params.clone @task.params = preset.params + @task.description = @task.description || "" + @task.description += "\n\nDescription from preset configuration:\n\n#{preset.description}" if preset.description.present? @task.restore_untouchable_attributes(old_params, :include_unpresetable => true) if preset.group && preset.group.can_be_accessed_by?(current_user) @task.group = preset.group diff --git a/BrainPortal/app/controllers/tool_configs_controller.rb b/BrainPortal/app/controllers/tool_configs_controller.rb index d287c23ec..d71b2f6da 100644 --- a/BrainPortal/app/controllers/tool_configs_controller.rb +++ b/BrainPortal/app/controllers/tool_configs_controller.rb @@ -238,9 +238,16 @@ def update #:nodoc: respond_to do |format| new_record = @tool_config.new_record? - if @tool_config.save_with_logging(current_user, %w( env_array script_prologue script_epilogue ncpus extra_qsub_args - container_image_userfile_id containerhub_image_name - container_engine container_index_location )) + if @tool_config.save_with_logging(current_user, + %w( version_name env_array script_prologue script_epilogue ncpus extra_qsub_args + container_image_userfile_id containerhub_image_name + container_engine container_index_location container_exec_args + inputs_readonly + singularity_overlays_specs singularity_use_short_workdir + boutiques_descriptor_path + ) + ) + if new_record flash[:notice] = "Tool configuration is successfully created." else @@ -298,6 +305,7 @@ def tool_config_params #:nodoc: :group_id, :ncpus, :container_image_userfile_id, :containerhub_image_name, :container_index_location, :inputs_readonly, :container_engine, :extra_qsub_args, :singularity_overlays_specs, :container_exec_args, + :singularity_use_short_workdir, :boutiques_descriptor_path, # The configuration of a tool in a VM managed by a # ScirCloud Bourreau is defined by the following diff --git a/BrainPortal/app/controllers/userfiles_controller.rb b/BrainPortal/app/controllers/userfiles_controller.rb index f9869e118..3204b36d5 100644 --- a/BrainPortal/app/controllers/userfiles_controller.rb +++ b/BrainPortal/app/controllers/userfiles_controller.rb @@ -239,7 +239,7 @@ def stream # Find and validate target userfile @userfile = Userfile.find_accessible_by_user(userfile_id, current_user, :access_requested => :read) if @userfile.nil? - raise ActiveRecord::RecordNotFound("Could not retrieve a userfile with ID: #{userfile_id}") + raise ActiveRecord::RecordNotFound.new("Could not retrieve a userfile with ID: #{userfile_id}") end # If it's a SingleFile @@ -433,8 +433,16 @@ def sync_multiple #:nodoc: SyncStatus.where(:userfile_id => @userfiles.map(&:id), :status => [ "InSync" ]).all.each do |ss| updated += 1 if ss.status_transition(ss.status,"ProvNewer") end - flash[:notice] = "Marked #{updated} files as newer on their Data Provider." - redirect_to :action => :index + respond_to do |format| + format.html do + flash[:notice] = "Marked #{updated} files as newer on their Data Provider." + redirect_to :action => :index + end + format.json do + render :json => { :notice => "Marked #{updated} files as newer on provider" }, + :status => :ok + end + end return end @@ -1091,7 +1099,7 @@ def change_provider #:nodoc: next if orig_provider.id == data_provider_id # no support for copy to same provider in the interface, yet. res = nil if task == :move - raise "not owner" unless u.has_owner_access?(current_user) + raise RuntimeError.new("Not owner") unless u.has_owner_access?(current_user) res = u.provider_move_to_otherprovider(new_provider, :crush_destination => crush_destination) else # task is :copy my_group_id = current_assignable_group.id @@ -1145,10 +1153,15 @@ def delete_files #:nodoc: failed_list = {} CBRAIN.spawn_with_active_records_if(! api_request?, current_user, "Delete files") do idlist = to_delete.raw_first_column(:id).shuffle + reset_dpids = {} idlist.each_with_index do |userfile_id,count| userfile = Userfile.find(userfile_id) rescue nil # that way we instantiate one record at a time next unless userfile # in case it was destroyed externally Process.setproctitle "Delete ID=#{userfile.id} #{count+1}/#{idlist.size}" + if ! reset_dpids[userfile.data_provider_id] + userfile.data_provider.reset_connection if userfile.data_provider.respond_to?(:reset_connection) + reset_dpids[userfile.data_provider_id] = true + end begin userfile.destroy deleted_success_list << userfile @@ -1703,13 +1716,13 @@ def extract_from_archive(archive_file_name, file_type = nil, attributes = {}) #: :variable_text => report ) end - rescue => e - Message.send_message(current_user, - :message_type => 'error', - :header => "File extraction failed", - :description => "Some errors occurred while extracting files from archive '#{archive_file_name}'", - :variable_text => e.message - ) + rescue => e + Message.send_message(current_user, + :message_type => 'error', + :header => "File extraction failed", + :description => "Some errors occurred while extracting files from archive '#{archive_file_name}'", + :variable_text => e.message + ) end # This method creates a tar file of the userfiles listed @@ -2103,7 +2116,7 @@ def userfile_for_viewer # Otherwise we want to view a file inside a FileCollection. # Create a fake Userfile to pass information to the viewer sub_file_info = @top_userfile.provider_collection_index.detect { |u| u.name == sub_file_name } - raise ActiveRecord::RecordNotFound("Could not retrieve a file with the name #{sub_file_name} inside the FileCollection") if !sub_file_info + raise ActiveRecord::RecordNotFound.new("Could not retrieve a file with the name #{sub_file_name} inside the FileCollection") if !sub_file_info # Find the class for the new userfile object that will be used for viewing viewer_userfile_class = viewer_class_name.try(:constantize) || @top_userfile.class diff --git a/BrainPortal/app/helpers/basic_helper.rb b/BrainPortal/app/helpers/basic_helper.rb index a17570719..fa1f68526 100644 --- a/BrainPortal/app/helpers/basic_helper.rb +++ b/BrainPortal/app/helpers/basic_helper.rb @@ -80,5 +80,12 @@ def tree_view_icon(level = 0) (' ' * 4 * (level.presence || 0) + '↳').html_safe end + # Renders 1234567 as 1,234,567 + def number_with_commas(number) + s = number.to_s + return s if s !~ /\A\d+\z/ # anything not a series of digits is just returned as is + s.reverse.gsub(/(\d\d\d)(?=\d)/, '\1,').reverse + end + end diff --git a/BrainPortal/app/helpers/disk_quotas_helper.rb b/BrainPortal/app/helpers/disk_quotas_helper.rb new file mode 100644 index 000000000..60810c2d2 --- /dev/null +++ b/BrainPortal/app/helpers/disk_quotas_helper.rb @@ -0,0 +1,38 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# Helper methods for Disk Quota views. +module DiskQuotasHelper + + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + # Returns a DiskQuota max_bytes in pretty form: 'None allowed' in red, or '126 MB' etc (colored) + def pretty_quota_max_bytes(quota) + quota.none_allowed? ? red_if(true, 'None allowed') : colored_pretty_size(quota.max_bytes) + end + + # Returns a DiskQuota max_files in pretty form: 'None allowed' in red, or just a number + def pretty_quota_max_files(quota) + quota.none_allowed? ? red_if(true, 'None allowed') : number_with_commas(quota.max_files) + end + +end diff --git a/BrainPortal/app/helpers/dynamic_form_helper.rb b/BrainPortal/app/helpers/dynamic_form_helper.rb index 3aa1da1ce..238580ebe 100644 --- a/BrainPortal/app/helpers/dynamic_form_helper.rb +++ b/BrainPortal/app/helpers/dynamic_form_helper.rb @@ -49,15 +49,18 @@ def submit_button(value, options = {}) # Create a checkbox that will select or deselect all checkboxes on the page # of class +checkbox_class+. # Most +options+ are just treated as HTML attributes. - # Except +options[:persistant_name]+; if provided, - # an additional hidden input will be added to track - # the state of the select_all checkbox. + # + # Except: + # - +options[:persistant_name]+: if provided, an additional hidden + # input will be added to track the state of the select_all checkbox. def select_all_checkbox(checkbox_class, options = {}) options[:class] ||= "" options[:class] += " select_all" - options["data-checkbox-class"] = checkbox_class - atts = options.reject{|x| x.to_s === "persistant_name"}.to_html_attributes + options["data-checkbox-class"] = checkbox_class + options["data-persistant-name"] = options[:persistant_name].present? + + atts = options.reject { |x| x.to_s == "persistant_name" }.to_html_attributes # Most common case just the select_all input input = "".html_safe @@ -66,11 +69,11 @@ def select_all_checkbox(checkbox_class, options = {}) # Add the hidden input; javascript code will update # its value as needed. hidden_options = { - :name => options[:persistant_name], + :name => options[:persistant_name], "data-checkbox-class" => checkbox_class, :class => "select_all_hidden" - } - + } + hidden_atts = hidden_options.to_html_attributes hidden_input = "".html_safe inputs = "#{input} #{hidden_input}".html_safe diff --git a/BrainPortal/app/helpers/groups_helper.rb b/BrainPortal/app/helpers/groups_helper.rb index a53aefdd2..edaff110c 100644 --- a/BrainPortal/app/helpers/groups_helper.rb +++ b/BrainPortal/app/helpers/groups_helper.rb @@ -33,24 +33,15 @@ def css_group_type(group, group_user_count = nil) return group.to_s.downcase unless group.is_a?(Group) # SystemGroup subclasses; UserGroup => "user", EveryoneGroup => "everyone" - return group.class.name.demodulize.match(/\A([A-Z][a-z]+)/).to_s.downcase if group.is_a?(SystemGroup) + return "user" if group.is_a?(UserGroup) + return "everyone" if group.is_a?(EveryoneGroup) + return "site" if group.is_a?(SiteGroup) group_user_count ||= group.users.count - return "invisible" if group.invisible? return "public" if group.public? - return "empty" if group_user_count == 0 return "shared" if group_user_count > 1 - return "personal" + return "private" end - # Produces a centered legend for every distinct group type in +groups+ - def group_legend(groups) - return if groups.blank? - - center_legend(nil, groups.map { |g| css_group_type(g) }.uniq.map { |g| - # 9675: UTF8 white circle, 9679: UTF8 black circle - ["&##{g == "all" ? "x25ef" : "x2b24"};", "#{g.titleize} Project"] - }) - end end diff --git a/BrainPortal/app/helpers/select_box_helper.rb b/BrainPortal/app/helpers/select_box_helper.rb index 63ca15f7b..bea85d59e 100644 --- a/BrainPortal/app/helpers/select_box_helper.rb +++ b/BrainPortal/app/helpers/select_box_helper.rb @@ -152,7 +152,7 @@ def group_select(parameter_name = "group_id", options = {}, select_tag_options = end # Step 3: Other project categories, in that order - [ "Shared Work Projects", "Empty Work Projects", "Site Projects", "User Projects", "System Projects", "Invisible Projects", "Everyone Projects" ].each do |proj| + [ "Shared Work Projects", "Empty Work Projects", "Site Projects", "User Projects", "System Projects", "Invisible Projects", "Everyone Projects", "Public Projects" ].each do |proj| ordered_category_grouped << [ proj, category_grouped_pairs.delete(proj) ] if category_grouped_pairs[proj] end diff --git a/BrainPortal/app/helpers/userfiles_helper.rb b/BrainPortal/app/helpers/userfiles_helper.rb index 493d7eaa8..ad0eb5089 100644 --- a/BrainPortal/app/helpers/userfiles_helper.rb +++ b/BrainPortal/app/helpers/userfiles_helper.rb @@ -79,7 +79,7 @@ def file_link_table(previous_userfile, next_userfile, sort_index, options = {}) # Generates links to pretty file content for files inside FileCollections. # Generates a download link if no viewer code can be found for the files. - def data_link(file_name, userfile) + def data_link(file_name, userfile, replace_div_id="sub_viewer_filecollection_cbrain") full_path_name = Pathname.new(userfile.cache_full_path.dirname + file_name) display_name = full_path_name.basename.to_s @@ -87,9 +87,12 @@ def data_link(file_name, userfile) file_lstat = full_path_name.lstat # lstat doesn't follow symlinks, so we can tell if it is one - return h(display_name) unless file_lstat.file? + # return if userfile class is a FileCollection and file is not a file (i.e. a directory) + return h(display_name) if userfile.is_a?(FileCollection) && !file_lstat.file? + + matched_class = SingleFile.descendants.unshift(SingleFile).find { |c| file_name =~ c.file_name_pattern } + matched_class ||= userfile.class if userfile.is_a?(SingleFile) - matched_class = SingleFile.descendants.unshift(SingleFile).find { |c| file_name =~ c.file_name_pattern } viewer = matched_class.class_viewers.first.partial rescue nil if matched_class && viewer @@ -100,7 +103,7 @@ def data_link(file_name, userfile) :viewer => viewer, :viewer_userfile_class => matched_class ), - :replace => "sub_viewer_filecollection_cbrain", + :replace => replace_div_id, } ) do (""+display_name+"").html_safe diff --git a/BrainPortal/app/models/application_record.rb b/BrainPortal/app/models/application_record.rb index 6bf174128..7689f069e 100644 --- a/BrainPortal/app/models/application_record.rb +++ b/BrainPortal/app/models/application_record.rb @@ -96,6 +96,8 @@ def self.default_api_limit #:nodoc: end # Useful generic scopes for console users. + scope :uhour, -> { where [ "#{self.quoted_table_name}.updated_at >= ?", 1.hour.ago ] } + scope :chour, -> { where [ "#{self.quoted_table_name}.created_at >= ?", 1.hour.ago ] } scope :utoday, -> { where [ "#{self.quoted_table_name}.updated_at >= ?", Time.now.at_beginning_of_day ] } scope :ctoday, -> { where [ "#{self.quoted_table_name}.created_at >= ?", Time.now.at_beginning_of_day ] } # Note: the following two scopes imply that the week starts on Monday morning diff --git a/BrainPortal/app/models/bourreau.rb b/BrainPortal/app/models/bourreau.rb index 31622acbb..2124e9e65 100644 --- a/BrainPortal/app/models/bourreau.rb +++ b/BrainPortal/app/models/bourreau.rb @@ -454,7 +454,11 @@ def self.process_command_alter_tasks(command) CBRAIN.spawn_with_active_records(:admin, "AlterTask #{newstatus}") do + signaled_finish = false # set to true when receiving TERM + Signal.trap("TERM") { signaled_finish = true } + taskids.shuffle.each_with_index do |task_id,count| + break if signaled_finish # ends the entire task ID list Process.setproctitle "AlterTask #{newstatus} ID=#{task_id} #{count+1}/#{taskids.size}" task = CbrainTask.where(:id => task_id, :bourreau_id => myself.id).first next unless task # doesn't even exist? just ignore it diff --git a/BrainPortal/app/models/boutiques_cluster_task.rb b/BrainPortal/app/models/boutiques_cluster_task.rb index b8a0a9a25..be695e44e 100644 --- a/BrainPortal/app/models/boutiques_cluster_task.rb +++ b/BrainPortal/app/models/boutiques_cluster_task.rb @@ -137,7 +137,6 @@ def cluster_commands #:nodoc: end # Write down the file with the boutiques descriptor itself - boutiques_json_basename = "boutiques.#{self.run_id}.json" File.open(boutiques_json_basename, "w") do |fh| cleaned_desc = descriptor.dup cleaned_desc.delete("groups") if cleaned_desc.groups.size == 0 # bosh is picky @@ -152,7 +151,11 @@ def cluster_commands #:nodoc: #{boutiques_json_basename.bash_escape} SIMULATE simulate_com.gsub!("\n"," ") - simulout = IO.popen(simulate_com) { |fh| fh.read } + begin + simulout = IO.popen(simulate_com) { |fh| fh.read } + rescue => ex + cb_error "The 'bosh exec simulate' command failed: #{ex.class} #{ex.message}" + end simul_status = $? # a Process::Status object if ! simul_status.success? cb_error "The 'bosh exec simulate' command failed with return code #{simul_status.exitstatus}" @@ -161,7 +164,9 @@ def cluster_commands #:nodoc: commands = <<-COMMANDS # Main tool command, generated with bosh exec simulate #{simulout.strip} - echo $? > #{exit_status_filename.bash_escape} + status=$? + echo $status > #{exit_status_filename.bash_escape} + bash -c "exit $status" # clumsy but I can't think of any better way COMMANDS else # exec launch mode # The bosh launch command. This is all a single line, but broken up @@ -171,7 +176,9 @@ def cluster_commands #:nodoc: bosh exec launch \\ #{boutiques_json_basename.bash_escape} \\ #{self.invoke_json_basename.bash_escape} - echo $? > #{exit_status_filename.bash_escape} + status=$? + echo $status > #{exit_status_filename.bash_escape} + bash -c "exit $status" # clumsy but I can't think of any better way COMMANDS end commands.gsub!(/(\S) +(\S)/,'\1 \2') # make pretty @@ -245,8 +252,13 @@ def save_results #:nodoc: self.addlog("Attempting to save result file #{path}") name, userfile_class = name_and_type_for_output_file(output, path) - # Save the file (possible overwrite if race condition) + # Select an alternative and safe output type when guessing it produces a mismatch + userfile_class = SingleFile if File.file?(path) && !(userfile_class <= SingleFile) + userfile_class = FileCollection if File.directory?(path) && !(userfile_class <= FileCollection) + + # Save the file outfile = safe_userfile_find_or_new(userfile_class, :name => name) + new_out = outfile.new_record? unless outfile.save messages = outfile.errors.full_messages.join("; ") @@ -257,17 +269,22 @@ def save_results #:nodoc: end # Transfer content to DataProvider + self.addlog("Created result file '#{name}' (ID #{outfile.id})") if new_out + self.addlog("Reused result file '#{name}' (ID #{outfile.id})") if ! new_out + self.addlog("Uploading content to #{outfile.data_provider.type} '#{outfile.data_provider.name}' (ID #{outfile.data_provider_id})") outfile.cache_copy_from_local_file(path) + self.addlog("Content uploaded") + + # Record ID of output file in task's params params["_cbrain_output_#{output.id}"] ||= [] params["_cbrain_output_#{output.id}"] |= [ outfile.id ] - self.addlog("Saved result file #{name}") # Add provenance logs all_file_input_ids = descriptor.file_inputs.map do |input| invoke_params[input.id] end.compact.uniq parent_userfiles = Userfile.where(:id => all_file_input_ids).to_a - self.addlog_to_userfiles_these_created_these(parent_userfiles, [outfile]) if parent_userfiles.present? + self.addlog_to_userfiles_these_created_these(parent_userfiles, [outfile], "", 2) if parent_userfiles.present? # If there is only one input file, we move the output under it if parent_userfiles.size == 1 @@ -295,7 +312,7 @@ def save_results #:nodoc: self.addlog "Attempting to update input '#{userfile.name}' on DataProvider '#{userfile.data_provider.name}'" userfile.cache_is_newer userfile.sync_to_provider - self.addlog_to_userfiles_processed(userfile, "(content modified in place)") + self.addlog_to_userfiles_processed(userfile, "(content modified in place)", 1) end end @@ -323,7 +340,8 @@ def name_and_type_for_output_file(output, pathname) desc = descriptor_for_save_results custom = desc.custom || {} # 'custom' is not packaged as an object, just a hash idlist = custom['cbrain:no-run-id-for-outputs'].presence # list of IDs where no run id inserted - no_run_id = true if idlist && idlist.include?(output.id) + # We allow no_run_id only if the dest DP is MultiLevel, presumably the output goes to "a/b/c/basename_without_id" + no_run_id = true if idlist && idlist.include?(output.id) && self.results_data_provider.has_browse_path_capabilities? # Get basename, use it to guess the class name = File.basename(pathname) @@ -393,7 +411,13 @@ def boutiques_bosh_exec_mode # Returns the basename of the JSON file # that holds the 'invoke' structure for bosh. def invoke_json_basename - "invoke.#{self.run_id}.json" + ".invoke.#{self.run_id}.json" + end + + # Returns the basename of the JSON file + # that holds the boutiques descriptor for bosh. + def boutiques_json_basename + ".boutiques.#{self.run_id}.json" end # Return true or false depending on if diff --git a/BrainPortal/app/models/boutiques_portal_task.rb b/BrainPortal/app/models/boutiques_portal_task.rb index 4b525e653..d2bc2f900 100644 --- a/BrainPortal/app/models/boutiques_portal_task.rb +++ b/BrainPortal/app/models/boutiques_portal_task.rb @@ -27,7 +27,9 @@ class BoutiquesPortalTask < PortalTask # This method returns the BoutiquesDescriptor # directly associated with the ToolConfig for the task def boutiques_descriptor - self.tool_config.boutiques_descriptor + self.tool_config.boutiques_descriptor || + self.find_compatible_placeholder_descriptor || # Workaround #1 for misconfigured portal + self.generate_placeholder_descriptor # Workaround #2 for misconfigured portal end # This method returns the same descriptor as @@ -218,7 +220,7 @@ def after_form # Check the content of all CbrainFileLists (cbcsv) # ------------------------------------------------ # Get all the input cbcsv files - cbcsvs = self.cbcsv_files + cbcsvs = self.cbcsv_files # [ [input, cbcsv_userfile], [input, cbcsv_userfile], ... ] numRows = nil # Keep track of number of files per cbcsv # Validate each cbcsv (all columns match per row, user has access to the file) for input, cbcsv in cbcsvs @@ -230,8 +232,8 @@ def after_form # If the number of rows does not match, error # We need only check this for inputs that are not "list". if ! input.list - currNumRows = (cbcsv.ordered_raw_ids || []).length - numRows = numRows.nil? ? currNumRows : numRows + currNumRows = (cbcsv.ordered_raw_ids || []).length + numRows ||= currNumRows if currNumRows != numRows params_errors.add(invokename, " does not have the same number of files (#{currNumRows}) as in other present cbcsvs (#{numRows})") next @@ -263,6 +265,18 @@ def self.add_pretty_params_names(inputs) def final_task_list #:nodoc: descriptor = self.descriptor_for_final_task_list self.addlog(descriptor.file_revision_info.format("%f rev. %s %a %d")) + valid_input_keys = descriptor.inputs.map(&:id) + + # Add author(s) information + authors = Array(descriptor.custom['cbrain:author']) + authors = authors.empty? ? "No CBRAIN author information" : + authors.join(", ") + self.addlog("CBRAIN Author(s): #{authors}") + + # Add information about Boutiques module + boutiques_module_information().each do |log_info| + self.addlog(log_info) + end # -------------------------------------- # Special case where there is a single file input @@ -277,27 +291,36 @@ def final_task_list #:nodoc: if descriptor.file_inputs.size == 1 input = descriptor.file_inputs.first - fillTask = lambda do |userfile,tsk| - tsk.invoke_params[input.id] = userfile.id + fillTask = lambda do |userfile,tsk,extra_params=nil| + tsk.params[:interface_userfile_ids] |= [ userfile.id.to_s ] + tsk.invoke_params[input.id] = userfile.id tsk.sanitize_param(input) - tsk.description ||= '' - tsk.description += " #{input.id}: #{userfile.name}" + tsk.description = "#{input.id}: #{userfile.name}\n#{tsk.description}".strip + tsk.invoke_params.merge!(extra_params.slice(*valid_input_keys)) if extra_params tsk.description.strip! tsk end - tasklist = self.params[:interface_userfile_ids].map do |userfile_id| + original_userfiles_ids = self.params[:interface_userfile_ids].dup + self.params[:interface_userfile_ids] = [] # zap it; we'll re-introduce each userfile.id as needed + tasklist = original_userfiles_ids.map do |userfile_id| f = Userfile.find_accessible_by_user( userfile_id, self.user, :access_requested => file_access_symbol() ) # One task for that file if (! f.is_a?( CbrainFileList ) || input.list) # in case of a list input, we *do* assign it the CbFileList task = self.dup fillTask.( f, task ) - else # One task per userfile in the CbrainFileList - ufiles = f.userfiles_accessible_by_user!( self.user, nil, nil, file_access_symbol() ) - # Skip files that are purposefully nil (e.g. given id 0 by the user) - subtasks = ufiles.select { |u| ! u.nil? }.map { |u| fillTask.( u, self.dup ) } + ufiles = f.userfiles_accessible_by_user!( self.user, nil, nil, file_access_symbol() ) + ordered_extra_params = f.is_a?(ExtendedCbrainFileList) ? f.ordered_params : [] + + # Fill subtasks array + subtasks = [] + ufiles.each_with_index do |u, index| + next if u.nil? + subtasks << fillTask.( u, self.dup, ordered_extra_params[index]) + end + subtasks # an array of tasks end end @@ -311,7 +334,7 @@ def final_task_list #:nodoc: # -------------------------------------- # Grab all the cbcsv input files - cbcsvs = self.cbcsv_files(descriptor) + cbcsvs = self.cbcsv_files(descriptor) # [ [input, cbcsv_userfile], [input, cbcsv_userfile], ... ] cbcsvs.reject! { |pair| pair[0].list } # ignore file inputs with list=true; they just get the CBCSV directly # Default case: just return self as a single task @@ -322,9 +345,17 @@ def final_task_list #:nodoc: # Array with the actual userfiles corresponding to the cbcsv mapCbcsvToUserfiles = cbcsvs.map { |f| f[1].ordered_raw_ids.map { |i| (i==0) ? nil : i } } + # Array with the actual extra json_params corresponding to the cbcsv + mapCbcsvToParams = cbcsvs.map do |f| + cbcsv = f[1] + cbcsv.is_a?(ExtendedCbrainFileList) ? + cbcsv.ordered_params : [] + end + # Task list to fill and total number of tasks to output - tasklist = [] - nTasks = mapCbcsvToUserfiles[0].length + tasklist = [] + nTasks = mapCbcsvToUserfiles[0].length + # Iterate over each task that needs to be generated for i in 0..(nTasks - 1) # Clone this task @@ -335,6 +366,8 @@ def final_task_list #:nodoc: #currTask.params[:interface_userfile_ids] << mapCbcsvToUserfiles unless currId.nil? currTask.invoke_params[cinput.id] = currId # If id = 0 or nil, currId = nil currTask.invoke_params.delete(cinput.id) if currId.nil? + extra_params_from_Cbcsv = mapCbcsvToParams[j][i] || {} + currTask.invoke_params.merge!(extra_params_from_Cbcsv.slice(*valid_input_keys)) end # Add the new task to our tasklist tasklist << currTask @@ -385,7 +418,7 @@ def isInactive(input) ( val.nil? || # most of the time, the interface sends NO value at all, which is what we prefer (type == 'Flag' && val == "0") || # checkboxes send their values as strings 0 and 1, - (type == 'Flag' && val == false) # but normally they are transformed into bool in sanitize_params + (type == 'Flag' && val == false) # but normally they are transformed into bool in sanitize_param() ) end @@ -408,6 +441,7 @@ def ascertainCbcsvUserAccess(f,id) msg2 = lambda { |e| " cbcsv accessibility error in #{f.name}! Possibly due to cbcsv malformation. (Received error: #{e.inspect})" } errFlag = true # Whether the error checking found a problem begin # Check that the user has access to all of the files in the cbcsv + f.sync_to_cache # We need the content of the cbcsv f.userfiles_accessible_by_user!(self.user, nil, nil, file_access_symbol()) # side effect: cache entries within f for i in f.ordered_raw_ids.select{ |r| (! r.nil?) && (r.to_s != '0') } accessible = Userfile.find_accessible_by_user( i, self.user, :access_requested => file_access_symbol() ) rescue nil @@ -448,6 +482,11 @@ def sanitize_param(input) name = input.id type = input.type.downcase.to_sym # old code convention from previous integrator + # For strings, we support a special list of parameters + # that can be empty strings. + descriptor = self.descriptor_for_after_form + empty_string_allowed = Array(descriptor.custom['cbrain:allow_empty_strings']).include?(name) + # Taken userfile names. An error will be raised if two input files have the # same name. @taken_files ||= Set.new @@ -476,18 +515,17 @@ def sanitize_param(input) when :string value = value.to_s if value.is_a?(Symbol) params_errors.add(invokename, " not a string (#{value})") unless value.is_a?(String) - params_errors.add(invokename, " is blank") if value.blank? + params_errors.add(invokename, " is blank") if value.blank? && !empty_string_allowed # The following two checks are to prevent cases when # a string param is used as a path params_errors.add(invokename, " cannot contain newlines") if value.to_s =~ /[\n\r]/ params_errors.add(invokename, " cannot start with this character") if value.to_s =~ /^[\.\/]+/ + params_errors.add(invokename, " cannot move up dirs") if value.to_s.include? "/../" # Try to match against various common representation of true and false when :flag - if value.is_a?(String) - value = true if value =~ /\A(true|t|yes|y|on|1)\z/i - value = false if value =~ /\A(false|f|no|n|off|0|)\z/i - end + value = true if value.to_s =~ /\A(true|t|yes|y|on|1)\z/i + value = false if value.to_s =~ /\A(false|f|no|n|off|0|)\z/i if ! [ true, false ].include?(value) params_errors.add(invokename, ": not true or false (#{value})") @@ -622,4 +660,118 @@ def file_access_symbol @_file_access ||= (self.class.properties[:readonly_input_files].present? || self.tool_config.try(:inputs_readonly) ? :read : :write) end + # In the case of a misconfiguration of the portal, or if the file for + # the Boutiques descriptor has disappeared, this method will look at + # other ToolConfigs associated with the tool of the task and try find + # a replacement descriptor. It's not garanteed that this descriptor + # is compatible with the params of the task but it's probably good + # enough to show the task to the user. + # + # The task object itself will get a permanent ActiveRecord + # error added to its base to prevent any saving/editing/launching. + # + # See also the method generate_placeholder_descriptor which, in case + # we can't find a descriptor here, is called to create a fake + # descriptor out of thin air. + # + # Known limitations: if the original task was integrated with + # special modules that have custom entries that are different + # than the ones in the found descriptor, the callback methods + # setup(), before_form() etc might crash. + def find_compatible_placeholder_descriptor + + # Build a list of tool configs to scan + tool_configs = self.tool.tool_configs.order("created_at desc").to_a + + # Find the most recently created one (hopefully, backwards compatible) + compat_tool_config = tool_configs.detect { |tc| tc.boutiques_descriptor } # first one is the one we use + return nil if ! compat_tool_config + + # Add persistent errors, to make sure the task cannot be touched. + if self.errors.blank? + cur_version = self.tool_config.version_name + alt_version = compat_tool_config.version_name + self.errors.add(:base, "The Boutiques Descriptor for version '#{cur_version}' of this task has disappeared. This is a configuration error, contact the administrators.") + self.errors.add(:base, "A descriptor from another version ('#{alt_version}') is currently in use to allow you to view the parameters.") + self.errors.add(:unsavable, ": The parameters for this task cannot be modified.") # this special error prevents the save button from working + end + + compat_tool_config.boutiques_descriptor + end + + # In the case of a misconfiguration of the portal, or if the file for + # the Boutiques descriptor has disappeared, this method will look at the + # current params of the task and create out of thin air a new fake + # descriptor for it. This allows the user to (at least) view the task + # in the interface. + # + # The task object itself will get a permanent ActiveRecord + # error added to its base to prevent any saving/editing/launching. + # + # Known limitations: if the original task was integrated with + # special modules that require custom entries in the descriptor, + # the callback methods setup(), before_form() etc might crash. + # That's because the descriptor generated here contains an empty + # "custom" entry. + def generate_placeholder_descriptor + + # Add persistent errors, to make sure the task cannot be touched. + if self.errors.blank? + cur_version = self.tool_config.version_name + self.errors.add(:base, "The Boutiques Descriptor for version '#{cur_version}' of this task has disappeared. This is a configuration error, contact the administrators.") + self.errors.add(:base, "A replacement descriptor is currently in use to allow you to view the parameters.") + self.errors.add(:base, "Because the type information for the parameters is missing, they are all shown as strings.") + self.errors.add(:unsavable, ": The parameters for this task cannot be modified.") # this special error prevents the save button from working + end + + # Main descriptor + fake = BoutiquesSupport::BoutiquesDescriptor.new( + :name => self.pretty_type, + :description => 'Missing Boutiques Descriptor Placeholder', + "tool-version" => self.tool_config.version_name, + "schema-version" => "0.5", + "command-line" => "false", # as in, the unix command 'false' + :custom => { 'cbrain:integrator_modules' => {} }, # we can't do better than that + ) + + # Create fake inputs for files + (self.params[:interface_userfile_ids] || []).each do |userfile_id| + fake.inputs << BoutiquesSupport::Input.new( + :id => "inputfile-#{userfile_id}", + :name => "inputfile-#{userfile_id}", + :type => 'File', + :description => "Fake input file", + ) + end + + # Create fake inputs for all other params (including those that are files) + (self.invoke_params || {}).keys.each do |input_id| + fake.inputs << BoutiquesSupport::Input.new( + :id => input_id, + :name => input_id, + :type => 'String', + :description => "Fake string input for key '#{input_id}'", + ) + end + + fake + end + + private + + # Prepare an array with revision information of + # all the Boutiques integrator modules used by the + # tools. + def boutiques_module_information #:nodoc: + descriptor = self.descriptor_for_final_task_list + + integrator_modules = descriptor.custom['cbrain:integrator_modules'] || {} + + integrator_modules.map do |module_name, _| + module_name = module_name.constantize + rev_info = module_name::Revision_info + rev_info.format("%f rev. %s %a %d") + end + end + end diff --git a/BrainPortal/app/models/cbrain_task.rb b/BrainPortal/app/models/cbrain_task.rb index acf752f91..56578bc9d 100644 --- a/BrainPortal/app/models/cbrain_task.rb +++ b/BrainPortal/app/models/cbrain_task.rb @@ -98,10 +98,15 @@ class CbrainTask < ApplicationRecord else value = s end - where(:status => value) + where("cbrain_tasks.status" => value) } - scope :active, lambda { status( :active ) } + scope :active, -> { status( :active ) } + scope :failed, -> { status( :failed ) } + scope :failed_setup, -> { where( "cbrain_tasks.status" => 'Failed To Setup' ) } + scope :failed_cluster, -> { where( "cbrain_tasks.status" => 'Failed On Cluster' ) } + scope :failed_post, -> { where( "cbrain_tasks.status" => 'Failed To PostProcess' ) } + scope :completed, -> { where( "cbrain_tasks.status" => 'Completed' ) } scope :real_tasks, -> { where( "cbrain_tasks.status <> 'Preset' AND cbrain_tasks.status <> 'SitePreset'" ) } diff --git a/BrainPortal/app/models/cluster_task.rb b/BrainPortal/app/models/cluster_task.rb index 5ffad9202..e2623f0c2 100644 --- a/BrainPortal/app/models/cluster_task.rb +++ b/BrainPortal/app/models/cluster_task.rb @@ -20,12 +20,6 @@ # along with this program. If not, see . # -require 'stringio' -require 'base64' -require 'fileutils' -require 'json' -require 'json-schema' - #Abstract model representing a job running on a cluster. This is the core class for #launching GridEngine/PBS/MOAB/UNIX jobs (etc) using Scir. # @@ -521,14 +515,14 @@ def path_is_in_workdir?(path) #:nodoc: # and add a log entry to each userfile identifying that # it was processed by the current task. An optional # comment can be appended to the message. - def addlog_to_userfiles_processed(userfiles,comment = "") + def addlog_to_userfiles_processed(userfiles, comment = "", caller_level=0) userfiles = [ userfiles ] unless userfiles.is_a?(Array) myname = self.fullname mylink = "/tasks/#{self.id}" # can't use show_task_path() on Bourreau side mymarkup = "[[#{myname}][#{mylink}]]" userfiles.each do |u| next unless u.is_a?(Userfile) && u.id - u.addlog_context(self,"Processed by task #{mymarkup} #{comment}",3) + u.addlog_context(self,"Processed by task #{mymarkup} #{comment}",3+caller_level) end end @@ -536,14 +530,14 @@ def addlog_to_userfiles_processed(userfiles,comment = "") # and add a log entry to each userfile identifying that # it was created by the current task. An optional # comment can be appended to the message. - def addlog_to_userfiles_created(userfiles,comment = "") + def addlog_to_userfiles_created(userfiles, comment = "", caller_level=0) userfiles = [ userfiles ] unless userfiles.is_a?(Array) myname = self.fullname mylink = "/tasks/#{self.id}" # can't use show_task_path() on Bourreau side mymarkup = "[[#{myname}][#{mylink}]]" userfiles.each do |u| next unless u.is_a?(Userfile) && u.id - u.addlog_context(self,"Created/updated by #{mymarkup} #{comment}",3) + u.addlog_context(self,"Created/updated by #{mymarkup} #{comment}",3+caller_level) end end @@ -552,7 +546,7 @@ def addlog_to_userfiles_created(userfiles,comment = "") # and records for each created file what were the creators, and for # each creator file what files were created, along with a link # to the task itself. An optional comment can be appended to the header message. - def addlog_to_userfiles_these_created_these(creatorlist, createdlist, comment = "") + def addlog_to_userfiles_these_created_these(creatorlist, createdlist, comment="", caller_level=0) # Two lists of userfiles. Make sure their contents are OK. creatorlist = Array(creatorlist).select { |u| u.is_a?(Userfile) && u.id } @@ -570,9 +564,9 @@ def addlog_to_userfiles_these_created_these(creatorlist, createdlist, comment = # Add an entry to each creator files, listing created files creatorlist.each do |creator| if createdlist.size == 1 # a common case; create shorter log entry then. - creator.addlog_context(self, "Used by task #{mymarkup} to create #{createdMarkups[0]}. #{comment}", 4) + creator.addlog_context(self, "Used by task #{mymarkup} to create #{createdMarkups[0]}. #{comment}", 4+caller_level) else - creator.addlog_context(self, "Used by task #{mymarkup}, list of #{createdlist.size} created files follow. #{comment}", 4) + creator.addlog_context(self, "Used by task #{mymarkup}, list of #{createdlist.size} created files follow. #{comment}", 4+caller_level) createdMarkups.each_slice(5).each do |files_4| creator.addlog(files_4.join(", ")) end @@ -582,9 +576,9 @@ def addlog_to_userfiles_these_created_these(creatorlist, createdlist, comment = # Add an entry to each created files, listing creators createdlist.each do |created| if creatorlist.size == 1 # a common case; create shorter log entry then. - created.addlog_context(self, "Created/updated by task #{mymarkup} from file #{creatorMarkups[0]}. #{comment}", 4) + created.addlog_context(self, "Created/updated by task #{mymarkup} from file #{creatorMarkups[0]}. #{comment}", 4+caller_level) else - created.addlog_context(self, "Created/updated by task #{mymarkup}, list of #{creatorlist.size} used files follow. #{comment}", 4) + created.addlog_context(self, "Created/updated by task #{mymarkup}, list of #{creatorlist.size} used files follow. #{comment}", 4+caller_level) creatorMarkups.each_slice(5).each do |files_4| created.addlog(files_4.join(", ")) end @@ -624,10 +618,14 @@ def tool_config_system(command) # Build script script = "" + # flag to guaranty propagation of env variables to the singularity/apptainer + # as far I know only needed to reverse effect of --cleanenv option, and otherwise all vars are copied to the container + # yet potentially more cases may be identified + propagate = self.use_singularity? # Add prologues in specialization order - script += bourreau_glob_config.to_bash_prologue if bourreau_glob_config - script += tool_glob_config.to_bash_prologue if tool_glob_config - script += tool_config.to_bash_prologue if tool_config + script += bourreau_glob_config.to_bash_prologue propagate if bourreau_glob_config + script += tool_glob_config.to_bash_prologue propagate if tool_glob_config + script += tool_config.to_bash_prologue propagate if tool_config # Add CBRAIN special inits script += self.supplemental_cbrain_tool_config_init # Add the command @@ -775,11 +773,11 @@ def post_process saveok = saveok && self.save_results self.meta[:no_end_keyword_check] = nil end - self.update_size_of_cluster_workdir if ! saveok self.status_transition(self.status, "Failed On Cluster") self.addlog("Data processing failed on the cluster.") else + self.update_size_of_cluster_workdir # callbacks and modules might have cleaned up the files by now self.addlog("Post processing completed.") self.status_transition(self.status, "Completed") end @@ -1780,27 +1778,16 @@ def submit_cluster_job # Joined version of all the lines in the scientific script command_script = commands.join("\n") - # Add HOME switching back and forth - command_script = <<-HOME_SWITCHING -# Preserve system HOME, then switch it to the task's workdir -_cbrain_home_="$HOME" -export HOME=#{self.full_cluster_workdir.bash_escape} - -#{command_script} - -# Restore system HOME (while preserving the latest exit code) -_cbrain_status_="$?" -export HOME="$_cbrain_home_" -bash -c "exit $_cbrain_status_" - HOME_SWITCHING - # In case of Docker or Singularity, we rewrite the scientific script inside # yet another wrapper script. if self.use_docker? + command_script = wrap_new_HOME(command_script, self.full_cluster_workdir) command_script = self.docker_commands(command_script) elsif self.use_singularity? load_singularity_image - command_script = self.singularity_commands(command_script) + command_script = self.singularity_commands(command_script) # note: invokes wrap_new_HOME itself + else + command_script = wrap_new_HOME(command_script, self.full_cluster_workdir) end # Create a bash science script out of the text @@ -1813,9 +1800,9 @@ def submit_cluster_job # by ClusterTask # #{ClusterTask.revision_info.to_s} -#{bourreau_glob_config ? bourreau_glob_config.to_bash_prologue : ""} -#{tool_glob_config ? tool_glob_config.to_bash_prologue : ""} -#{tool_config ? tool_config.to_bash_prologue : ""} +#{bourreau_glob_config ? bourreau_glob_config.to_bash_prologue(self.use_singularity?) : ""} +#{tool_glob_config ? tool_glob_config.to_bash_prologue(self.use_singularity?) : ""} +#{tool_config ? tool_config.to_bash_prologue(self.use_singularity?) : ""} #{self.supplemental_cbrain_tool_config_init} # CbrainTask '#{self.name}' commands section @@ -1862,13 +1849,38 @@ def submit_cluster_job # Record runtime environment bash #{Rails.root.to_s.bash_escape}/vendor/cbrain/bin/runtime_info.sh > #{runtime_info_basename} -# stdout and stderr captured below will be re-substituted in -# the output and error of this script. -bash '#{sciencefile}' > #{science_stdout_basename} 2> #{science_stderr_basename} &2 # where stderr captured below will be substituted +# With apptainer/singularity jobs, we sometimes get an error booting the container, +# so we try up to five times. +for singularity_attempts in 1 2 3 4 5 ; do # note: the number 5 is used a bit below in an 'if' + SECONDS=0 # this is a special bash variable, see the doc + + # stdout and stderr captured below will be re-substituted in + # the output and error of this script here (this one!) + bash '#{sciencefile}' >> #{science_stdout_basename} 2>> #{science_stderr_basename} /dev/null ; then + break # move on, for any other error or even non zero successes + fi + + # Detect that final attempt to boot failed + if test $singularity_attempts -eq 5 ; then + echo "Apptainer container boot attempts all failed, giving up." + status=99 # why not + break + fi + + # Cleanup and try again + echo "Apptainer boot attempt number $singularity_attempts failed, trying again." + grep -v -i 'FATAL.*container.*creation.*failed' < #{science_stderr_basename} > #{science_stderr_basename}.clean + mv -f #{science_stderr_basename}.clean #{science_stderr_basename} +done + +echo '__CBRAIN_CAPTURE_PLACEHOLDER__' # where stdout captured above will be substituted +echo '__CBRAIN_CAPTURE_PLACEHOLDER__' 1>&2 # where stderr captured above will be substituted date "+CBRAIN Task Ending With Status $status After $SECONDS seconds, at %s : %F %T" date "+CBRAIN Task Ending With Status $status After $SECONDS seconds, at %s : %F %T" 1>&2 @@ -2305,9 +2317,7 @@ def load_docker_image_cmd #:nodoc: # Returns true if the task's ToolConfig is configured to point to a singularity image # for the task's processing. def use_singularity? - return self.tool_config.container_engine == "Singularity" && - ( self.tool_config.containerhub_image_name.present? || - self.tool_config.container_image_userfile_id.present? ) + return self.tool_config.use_singularity? end # Return the 'singularity' command to be used for the task; this is fetched @@ -2316,6 +2326,12 @@ def singularity_executable_name return self.bourreau.singularity_executable_name.presence || "singularity" end + # Returns true if the admin has configured this option in the + # task's ToolConfig attributes. + def use_singularity_short_workdir? + self.tool_config.singularity_use_short_workdir + end + # Returns the command line(s) associated with the task, wrapped in # a Singularity call if a Singularity image has to be used. +command_script+ # is the raw scientific bash script. @@ -2328,17 +2344,34 @@ def singularity_commands(command_script) # Numbers in (paren) correspond to the comment # block in the script, well below. + # (7) The path to the task's work directory + task_workdir = self.full_cluster_workdir # a string + short_workdir = "/T#{self.id}" # only used in short workdir mode + effect_workdir = use_singularity_short_workdir? ? short_workdir : task_workdir + # (1) additional singularity execution command options defined in ToolConfig container_exec_args = self.tool_config.container_exec_args.presence - # (2) The root of the shared area for all CBRAIN tasks - gridshare_dir = self.bourreau.cms_shared_dir + # (2) The root of the DataProvider cache + cache_dir = self.bourreau.dp_cache_dir - # (3) The root of the DataProvider cache - cache_dir = self.bourreau.dp_cache_dir + # (3) The root of the GridShare area (all tasks workdirs) + gridshare_dir = self.bourreau.cms_shared_dir # not mounted explicitely - # (6) The path to the task's work directory - task_workdir = self.full_cluster_workdir + # (6) Ext3 capture mounts, if any. + # These will look like "-B .capt_abcd.ext3:/path/workdir/abcd:image-src=/" + # While we are building these options, we're also creating + # the ext3 filesystems at the same time, if needed. + esc_capture_mounts = ext3capture_basenames().inject("") do |sing_opts,(basename,size)| + fs_name = ".capt_#{basename}.ext3" # e.g. .capt_work.ext3 + mountpoint = "#{effect_workdir}/#{basename}" # e.g. /path/to/workdir/work or /T123/work + install_ext3fs_filesystem(fs_name,size) + safe_mkdir(basename) + "#{sing_opts} -B #{fs_name.bash_escape}:#{mountpoint.bash_escape}:image-src=/" + end + # This list will be used to make a device number check: all components + # must be on a device different from the one for the work directory. + capture_basenames = ext3capture_basenames.map { |basename,_| basename } # (4) More -B (bind mounts) for all the local data providers. # This will be a string "-B path1 -B path2 -B path3" etc. @@ -2359,9 +2392,28 @@ def singularity_commands(command_script) "#{sing_opts} --overlay=#{path.bash_escape}:ro" end + # Wrap new HOME environment + command_script = wrap_new_HOME(command_script, effect_workdir) + # Set singularity command singularity_commands = <<-SINGULARITY_COMMANDS +# Note to developers: +# During a standard CBRAIN task, this script is invoked with no arguments +# at all. For debugging situations, an admin can invoke it with the single +# argument "shell" to bypass the tool's execution and launch a convenient +# interactive shell inside the container. + +# These two variables control the mode switching at the end of the script. +mode="exec" +sing_basename=./#{singularity_wrapper_basename.bash_escape} # note: the ./ is necessary + +# In 'shell' mode we replace them with other things. +if test $# -eq 1 -a "X$1" = "Xshell" ; then + mode="shell" + sing_basename="" +fi + # Build a local wrapper script to run in a singularity container cat << \"SINGULARITYJOB\" > #{singularity_wrapper_basename.bash_escape} #!/bin/bash @@ -2390,13 +2442,40 @@ def singularity_commands(command_script) exit 2 fi - # CBRAIN internal consistency test 4: must have the gridshare_dir mounted inside the container if test ! -d #{gridshare_dir.bash_escape} ; then echo "Container missing mount point for gridshare directory:" #{gridshare_dir.bash_escape} exit 2 fi +# CBRAIN internal consistency test 5: short task workdir (optional). +# It's possible the path below will be the same as the task workdir if no shortening +# is configured, so the test becomes trivially like test 2. +if test ! -d #{effect_workdir.bash_escape} ; then + echo "Container missing shortened task work directory:" #{effect_workdir.bash_escape} + exit 2 +fi + +# Make sure we are in the task's workdir now. +cd #{effect_workdir.bash_escape} || exit 2 + +# CBRAIN internal consistency test 6: all mounted ext3 filesystems should be +# on a device different from the task's workdir. Otherwise something went +# wrong with the mounts. Singularity or Apptainer can sometimes do that +# if the command is improperly built (order of mounts args etc). +workdir_devid=$(stat -c %d .) # dev number of task workdir +for mount in #{capture_basenames.map(&:bash_escape).join(" ")} ; do + mnt_devid=$(stat -c %d $mount 2>/dev/null) + if test -z "$mnt_devid" ; then + echo "Container missing mount point for '$mount'." + exit 2 + fi + if test "$workdir_devid" -eq "$mnt_devid" ; then + echo "Container has mount point for '$mount' but it is not mounted to an external filesystem." + exit 2 + fi +done + # Scientific commands start here #{command_script} @@ -2404,28 +2483,31 @@ def singularity_commands(command_script) # Make sure it is executable chmod 755 #{singularity_wrapper_basename.bash_escape} -# Other should have executable right on all components -# of the path in order to be mounted by singularity. -chmod o+x . .. ../.. ../../.. # Invoke Singularity with our wrapper script above. # Tricks used here: # 1) we supply (if any) additional options for the exec command -# 2) we mount the gridshare root directory -# 3) we mount the local data provider cache root directory -# 4) we mount each (if any) of the root directory for local data providers -# 5) we mount (if any) file system overlays -# 6) with -H we set the task's work directory as the singularity $HOME directory +# 2) we mount the local data provider cache root directory +# a) at its original cluster full path +# b) at /DP_Cache (used only when shortening workdir) +# 3) we mount the root of the gridshare area (for all tasks) +# 4) we mount each (if any) of the root directories for local data providers +# 5) we mount (if any) other fixed file system overlays +# 6) we mount (if any) capture ext3 filesystems +# 7) with -H we set the task's work directory as the singularity $HOME directory #{singularity_executable_name} \\ - exec \\ + $mode \\ #{container_exec_args} \\ - -B #{gridshare_dir.bash_escape} \\ -B #{cache_dir.bash_escape} \\ + -B #{cache_dir.bash_escape}:/DP_Cache \\ + -B #{gridshare_dir.bash_escape} \\ #{esc_local_dp_mountpoints} \\ #{overlay_mounts} \\ - -H #{task_workdir.bash_escape} \\ + -B #{task_workdir.bash_escape}:#{effect_workdir.bash_escape} \\ + #{esc_capture_mounts} \\ + -H #{effect_workdir.bash_escape} \\ #{container_image_name.bash_escape} \\ - ./#{singularity_wrapper_basename.bash_escape} + $sing_basename SINGULARITY_COMMANDS @@ -2440,6 +2522,24 @@ def singularity_commands(command_script) private + # Add HOME switching back and forth to a bash script; + # preserve the status returned by the script too. + def wrap_new_HOME(script, new_home) + new_home_script = <<-HOME_SWITCHING +# Preserve system HOME, then switch it +_cbrain_home_="$HOME" +export HOME=#{new_home.bash_escape} + +#{script} + +# Restore system HOME (while preserving the latest exit code) +_cbrain_status_="$?" +export HOME="$_cbrain_home_" +bash -c "exit $_cbrain_status_" + HOME_SWITCHING + new_home_script + end + # Returns an array of directory paths for all # online data providers that are local to the current # system (including smart ones). This is often needed @@ -2455,6 +2555,43 @@ def local_dp_storage_paths #:nodoc: dirs end + # Just invokes the same method on the task's ToolConfig. + def ext3capture_basenames + self.tool_config.ext3capture_basenames + end + + # This method creates an empty +filename+ with +size+ bytes + # (where size is specified like what the unix 'truncate' command accepts) + # and then formats it with a ext3 filesystem. If the filename already exists, + # nothing is done. + def install_ext3fs_filesystem(filename,size) #:nodoc: + if File.file?(filename) # already exists, all ok + self.addlog("EXT3 filesystem file '#{filename}' already exists") + return true + end + + self.addlog("Creating EXT3 filesystem in '#{filename}' with size=#{size}") + + # Create an empty file of the proper size + system("truncate -s #{size.bash_escape} #{filename.bash_escape}") + status = $? # A Process::Status object + if ! status.success? + cb_error "Cannot create EXT3 filesystem file '#{filename}': #{status.to_s}" + end + + # Format it. Only works on linux obviously + system("echo y | mkfs.ext3 -t ext3 -m 0 -q -E root_owner #{filename.bash_escape}") + status = $? # A Process::Status object + if ! status.success? + cb_error "Cannot format EXT3 filesystem file '#{filename}': #{status.to_s}" + end + + true + rescue => ex + File.unlink(filename) rescue nil # keep directory clean of broken ext3 file + raise ex + end + ################################################################## diff --git a/BrainPortal/app/models/data_provider.rb b/BrainPortal/app/models/data_provider.rb index 1fc072b47..96e0f22e6 100644 --- a/BrainPortal/app/models/data_provider.rb +++ b/BrainPortal/app/models/data_provider.rb @@ -238,6 +238,8 @@ class DataProvider < ApplicationRecord :message => 'is invalid as only paths with simple characters are valid: a-z, A-Z, 0-9, _, +, =, . and of course /', :allow_blank => true + validate :owner_is_appropriate + belongs_to :user belongs_to :group has_many :userfiles, :dependent => :restrict_with_exception @@ -731,6 +733,9 @@ def provider_move_to_otherprovider(userfile, otherprovider, options = {}) return false unless Userfile.is_legal_filename?(new_name) return false unless userfile.id # must be a fully saved file + # Check quota at destination + DiskQuota.exceeded!(new_user_id, otherprovider.id) + # Find existing destination, if any target_exists = Userfile.where( :name => new_name, @@ -828,6 +833,9 @@ def provider_copy_to_otherprovider(userfile, otherprovider, options = {}) return false unless Userfile.is_legal_filename?(new_name) return false unless userfile.id # must be a fully saved file + # Check quota at destination + DiskQuota.exceeded!(new_user_id, otherprovider.id) + # Find existing destination, if any target_exists = Userfile.where( :name => new_name, @@ -1059,6 +1067,21 @@ def for_api + ################################################################# + # Model Callbacks + ################################################################# + + # This verifies that the user_id matches an Admin user. + # For security reason, no data providers should by default + # be owned by normal users. + # + # This method can be overrided in subclasses. + def owner_is_appropriate #:nodoc: + return true if User.where(:id => self.user_id).first.is_a?(AdminUser) + self.errors.add(:user_id, 'must be an administrator') + return false + end + ################################################################# # Class-level cache-handling methods ################################################################# diff --git a/BrainPortal/app/models/disk_quota.rb b/BrainPortal/app/models/disk_quota.rb index a29696cb8..2e31c9ace 100644 --- a/BrainPortal/app/models/disk_quota.rb +++ b/BrainPortal/app/models/disk_quota.rb @@ -20,7 +20,23 @@ # along with this program. If not, see . # -# Model representing disk quotas +# Model representing disk quotas. +# +# We have two type of quota records: +# 1) User-specific quotas on a specific DP (user_id > 0) +# 2) DP-wide quotas applying for all users (user_id == 0) +# +# Quotas are verified by callbacks in the Userfile model. +# The methods here will try to fetch and process and +# user's specific quota (user_id and data_provider_id both set) +# before it falls back to the DP-wise quota (user_id == 0) +# +# The two main attributes are :max_size and :max_files +# which puts limit to the sum(size) and sum(num_files) +# of all the userfiles owned by a user on a specific DP. +# +# A quota record can be configuered with -1,-1, which prevents +# a user from creating any file at all on a DP. class DiskQuota < ApplicationRecord Revision_info=CbrainFileRevision[__FILE__] #:nodoc: @@ -29,6 +45,7 @@ class DiskQuota < ApplicationRecord validates_presence_of :data_provider_id validates_presence_of :max_bytes validates_presence_of :max_files + validate :limits_are_reasonable belongs_to :user, :optional => true # the value can be 0 but not nil belongs_to :data_provider, :optional => false @@ -39,11 +56,23 @@ class DiskQuota < ApplicationRecord attr_reader :cursize, :curfiles # values are filled when performing a check + def is_for_dp? #:nodoc: + self.user_id == 0 + end + + def is_for_user? #:nodoc: + self.user_id != 0 + end + + def none_allowed? #:nodoc: + self.max_files == -1 + end + # Returns true if currently, the user specified by +user_id+ # uses more disk space or more total files on +data_provider_id+ than # the quota limit configured by the admin. # - # The quota record for the limites is first looked up specifically for the pair + # The quota record for the limits is first looked up specifically for the pair # (user, data_provider); if no quota record is found, the pair (0, data_provider) # will be fetched instead (meaning a default quota for all users on that DP) # @@ -70,7 +99,7 @@ def self.exceeded!(user_id, data_provider_id) # Returns true if currently, the user specified by +user+ (specified by id) # uses more disk space or more total files on than configured in the limits # of this quota object. Since a quota object can contain '0' for the user attribute - # (meaning it's a default for all users), a user_id musy be given explicitely + # (meaning it's a default for all users), a user_id must be given explicitely # in argument in that case. def exceeded?(user_id = self.user_id) @@ -108,4 +137,35 @@ def exceeded!(user_id = self.user_id) raise CbrainDiskQuotaExceeded.new(user_id, self.data_provider_id) end + ##################################################### + # Validations callbacks + ##################################################### + + # Checks that both limits have proper values. + # 1) Both values are >= 0 : all OK + # 2) max_bytes == -1 and max_files == -1 : locked quota (no other negative numbers are allowed) + # + # Note that a value of 0 will still allow a user to create ONE userfile entry, + # because quota failures happen only after the quota is exceeded. That's + # why a value of -1 exists, it prevents any files from being created. + # + # A DP-wode quota of (-1,-1) will prevent ALL users from creating files on a DP + # (similar than having the DP set to read-only) but you can give special privileges + # to individual users by creating user-specific quota records. + def limits_are_reasonable + + # Already checked by other validate_presence callbacks + return false if self.max_bytes.blank? || self.max_files.blank? + + # All quotas are OK with this rule + return true if (self.max_bytes >= 0 && self.max_files >= 0) + # Only -1 in both fields is allowed if using negative numbers + return true if (self.max_bytes == -1 && self.max_files == -1) + + # Log errors + self.errors.add(:max_bytes, "must be -1, 0 or > 0") if self.max_bytes < -1 + self.errors.add(:max_files, "must be -1, 0 or > 0") if self.max_files < -1 + self.errors.add(:base, "when using -1, both limits must be set to -1") if self.max_bytes == -1 || self.max_files == -1 + end + end diff --git a/BrainPortal/app/models/remote_resource.rb b/BrainPortal/app/models/remote_resource.rb index 0ee6efdb3..750eb45e6 100644 --- a/BrainPortal/app/models/remote_resource.rb +++ b/BrainPortal/app/models/remote_resource.rb @@ -58,7 +58,6 @@ class RemoteResource < ApplicationRecord # can only be set by the admin using the Rails console; leaving them blank # means the mailers use whatever is configured in the Rails environment config. serialize :email_delivery_options - serialize :nh_email_delivery_options validates :name, :uniqueness => true, @@ -525,8 +524,8 @@ def get_ssh_public_key #:nodoc: cb_error "SSH public key only accessible for the current resource." unless self.id == self.class.current_resource.id return @ssh_public_key if @ssh_public_key home = CBRAIN::Rails_UserHome - if File.exists?("#{home}/.ssh/id_cbrain_portal.pub") - @ssh_public_key = File.read("#{home}/.ssh/id_cbrain_portal.pub") rescue "" + if File.exists?("#{home}/.ssh/id_cbrain_ed25519.pub") + @ssh_public_key = File.read("#{home}/.ssh/id_cbrain_ed25519.pub") rescue "" else @ssh_public_key = "" end diff --git a/BrainPortal/app/models/s3_flat_data_provider.rb b/BrainPortal/app/models/s3_flat_data_provider.rb index d0a297b51..dc4b42a35 100644 --- a/BrainPortal/app/models/s3_flat_data_provider.rb +++ b/BrainPortal/app/models/s3_flat_data_provider.rb @@ -77,6 +77,11 @@ def s3_connection ) end + def reset_connection #:nodoc: + Aws.empty_connection_pools! rescue nil + @s3_connection = nil + end + def impl_is_alive? #:nodoc: return true if s3_connection.connected? # Try to create the bucket once @@ -302,9 +307,11 @@ def cache_recursive_fileinfos(userfile) #:nodoc: cache_parent = cache_fullpath.parent parent_length = "#{cache_parent}/".length # used in substr below glob_pattern = userfile.is_a?(FileCollection) ? "/**/*" : "" - Dir.glob("#{userfile.cache_full_path}#{glob_pattern}").map do |fullpath| # /path/to/userfilebase/d1/d2/f1.txt - stats = File.lstat(fullpath) # not stat() ! - relpath = fullpath[parent_length,999999] # userfilebase/d1/d2/f1.txt + Dir.glob("#{userfile.cache_full_path}#{glob_pattern}", File::FNM_DOTMATCH).map do |fullpath| # /path/to/userfilebase/d1/d2/f1.txt + next if fullpath.ends_with? "/." # skip spurious entries for self-referencing sub directories + next if fullpath.ends_with? "/.." # skip spurious entries for referencing parent directories (never happens?) + stats = File.lstat(fullpath) # not stat() ! + relpath = fullpath[parent_length,999999] # userfilebase/d1/d2/f1.txt # This struct is defined in DataProvider FileInfo.new( :name => relpath, @@ -321,7 +328,7 @@ def cache_recursive_fileinfos(userfile) #:nodoc: :ctime => stats.ctime, :mtime => stats.mtime, ) - end.compact # the compact is in case we ever insert a 'next' in the map() above + end.compact end # Scan the Amazon bucket and returns a list of FileInfo objects @@ -390,8 +397,25 @@ def rsync_emulation(src_fileinfos,dst_fileinfos) #:nodoc: src_idx = src_fileinfos.index_by { |fi| fi.name } dst_idx = dst_fileinfos.index_by { |fi| fi.name } + # Hash of all possible directory prefixes at source + all_src_prefixes = src_idx + .keys # names of all source files and dirs + .map { |path| File.dirname(path) } # parents of all of them + .uniq + .map do |dirpath| + prefixes = [ dirpath ] + while (parent=File.dirname(dirpath)) != '.' + raise "Woh, got an absolute path back to root filesystem?!?" if parent == '/' + prefixes << parent + dirpath = parent + end + prefixes + end + .flatten + .index_by(&:itself) # will also do uniq + # Build two lists - delete_dest = dst_fileinfos.select { |fi| ! src_idx[fi.name] } + delete_dest = dst_fileinfos.select { |fi| ! src_idx[fi.name] && ! all_src_prefixes[fi.name] } delete_dest += dst_fileinfos.select { |fi| src_idx[fi.name] && src_idx[fi.name].symbolic_type != fi.symbolic_type } add_dest = src_fileinfos.select do |src_fi| diff --git a/BrainPortal/app/models/sing_bindmount_data_provider.rb b/BrainPortal/app/models/sing_bindmount_data_provider.rb index 8799a7f8b..516c70df4 100644 --- a/BrainPortal/app/models/sing_bindmount_data_provider.rb +++ b/BrainPortal/app/models/sing_bindmount_data_provider.rb @@ -160,10 +160,9 @@ def impl_sync_to_cache(userfile) #:nodoc: # As of rsync 3.1.2, rsync does the escaping of the remote path properly itself source_escaped = remotefull.to_s.bash_escape if self.class.local_rsync_protects_args? text = bash_this("#{rsync} -a -l --no-g --chmod=u=rwX,g=rX,Dg+s,o=r --delete #{self.rsync_excludes} #{source_colon}#{source_escaped}#{sourceslash} #{shell_escape(localfull)} 2>&1") - text.sub!(/Warning: Permanently added[^\n]+known hosts.\s*/i,"") # a common annoying warning - cb_error "Error syncing userfile to local cache, rsync returned:\n#{text}" unless text.blank? + cb_error "Error syncing userfile ##{userfile.id} to local cache, rsync returned:\n#{text}" unless text.blank? unless File.exist?(localfull) - cb_error "Error syncing userfile to local cache: no destination file found after rsync?\n" + + cb_error "Error syncing userfile ##{userfile.id} to local cache: no destination file found after rsync?\n" + "Make sure you are running rsync 3.0.6 or greater!" end true diff --git a/BrainPortal/app/models/sing_squashfs_data_provider.rb b/BrainPortal/app/models/sing_squashfs_data_provider.rb index d41e9dce4..f2c352321 100644 --- a/BrainPortal/app/models/sing_squashfs_data_provider.rb +++ b/BrainPortal/app/models/sing_squashfs_data_provider.rb @@ -128,11 +128,13 @@ def impl_sync_to_cache(userfile) #:nodoc: source_escaped = provider_is_remote ? remote_shell_escape(remotefull) : remotefull.to_s.bash_escape # As of rsync 3.1.2, rsync does the escaping of the remote path properly itself source_escaped = remotefull.to_s.bash_escape if self.class.local_rsync_protects_args? + # We need the SSH agent even when doing local transfers + CBRAIN.with_unlocked_agent + text = bash_this("#{rsync} -a -l --no-g --chmod=u=rwX,g=rX,Dg+s,o=r --delete #{self.rsync_excludes} #{source_colon}#{source_escaped}#{sourceslash} #{shell_escape(localfull)} 2>&1") - text.sub!(/Warning: Permanently added[^\n]+known hosts.\s*/i,"") # a common annoying warning - cb_error "Error syncing userfile to local cache, rsync returned:\n#{text}" unless text.blank? + cb_error "Error syncing userfile ##{userfile.id} to local cache, rsync returned:\n#{text}" unless text.blank? unless File.exist?(localfull) - cb_error "Error syncing userfile to local cache: no destination file found after rsync?\n" + + cb_error "Error syncing userfile ##{userfile.id} to local cache: no destination file found after rsync?\n" + "Make sure you are running rsync 3.0.6 or greater!" end true @@ -279,7 +281,7 @@ def get_squashfs_basenames(force = false) #:nodoc: remote_cmd = "cd #{self.remote_dir.bash_escape} && ls -1" text = self.remote_bash_this(remote_cmd) lines = text.split("\n") - @sq_files = lines.select { |l| l =~ /\A\S+\.(squashfs|sqs)\z/ }.sort + @sq_files = lines.select { |l| l =~ /\A\S+\.(squashfs|sqs|sqfs)\z/ }.sort self.meta[:squashfs_basenames] = @sq_files end diff --git a/BrainPortal/app/models/squashifier_en_cbrain_ssh_data_provider.rb b/BrainPortal/app/models/squashifier_en_cbrain_ssh_data_provider.rb new file mode 100644 index 000000000..26135a1c4 --- /dev/null +++ b/BrainPortal/app/models/squashifier_en_cbrain_ssh_data_provider.rb @@ -0,0 +1,170 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This DataProvider class implements a remote SSH-accessible data provider +# using the EnCbrainSshDataProvider file structure, but with an added +# functionality: +# +# 1) the content of any FileCollection will be locally +# squashed with the command 'mksquashfs' before being sent to the +# data provider side (aka synchronizing to the provider). +# +# 2) Conversely a FileCollection's content will be unsquashed with +# the command 'unsquashfs' when synchronizing to the cache. +# +# This DP requires the Rails application to have access to these +# two commands, of course. +# +# Note that this DP cannot be made into a 'Smart' version, since the +# content on the DP side is always different from the content on th +# cache side, even when working with both under the same host. So +# there will always be a SSH upload and download operation whenever +# syncing to or from the cache. +# +# TODO refactor to avoid code duplication. That would require creating a new +# base-class abstract method cache_full_path_for_upload() and +# cache_full_path_for_download() (suggested names) that DP subclasses +# would use to implement their data transfer methods, distinct from +# cache_full_pathname() +class SquashifierEnCbrainSshDataProvider < EnCbrainSshDataProvider + + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + # Name of the squashfs file that will contain a filecollection's squashified content + # This is a constant name independant of what the collection's own name is. + SQ_BASENAME = "CBRAIN_SquashedContent.squashfs" + + # This returns the category of the data provider + def self.pretty_category_name #:nodoc: + "Squashifier Enhanced SSH CBRAIN" + end + + def impl_sync_to_cache(userfile) #:nodoc: + return super if userfile.is_a?(SingleFile) + + # Normal code will fetch (remote) basename/basename.squashfs into (local) basename/basename.squashfs + super + + # Then we unsquash it all + fullpath = cache_full_path(userfile) # path to dir; in it is the .squashfs file + cacheparent = fullpath.parent + basename = userfile.name + tmpdirbase = ".tmp.unsq.#{Process.pid}" + unsqu_out = bash_this( + "cd #{cacheparent.to_s.bash_escape} && " + + "mv #{basename.bash_escape} #{tmpdirbase} && " + + "unsquashfs -f -n -p 1 -no-xattrs -d #{basename.bash_escape} #{tmpdirbase.bash_escape}/#{SQ_BASENAME.bash_escape} 2>&1 1>/dev/null && " + + "rm -rf #{tmpdirbase}" + ) + # Perform cleanup of expected messages (stupid unsquashfs program is too verbose) + #[ + # # In the following regexps, the trailing .* match anything to NL (but not including it) + # /^Parallel unsquashfs: Using \d+ processor.*/i, + # /^\d+ inodes.*to write.*/i, + # /^created \d+.*/i, + #].each { |regex| unsqu_out.sub!(regex,"") } + unsqu_out.strip! # remove all blanks on each side, whatever's left is the culprit + cb_error "Error syncing userfile ##{userfile.id} to local cache, unsquashfs commands returned:\n#{unsqu_out}" unless unsqu_out.blank? + true + end + + def impl_sync_to_provider(userfile) #:nodoc: + return super if userfile.is_a?(SingleFile) + + fullpath = cache_full_path(userfile) # without SQEXT + cacheparent = fullpath.parent + basename = userfile.name + tmpdirbase = ".tmp.mksq.#{Process.pid}" + # Note about the mksquashfs command: by supplying a single source argument ('basename'), the + # *content* of that directory is put directly at the top of the created squashfs filesystem. + mem_opt = self.class.mksquashfs_has_mem_option? ? "-mem 64m" : "" + mksqu_out = bash_this( + "cd #{cacheparent.to_s.bash_escape} && " + + "mkdir -p #{tmpdirbase.bash_escape} && " + + "mksquashfs #{basename.bash_escape} #{tmpdirbase.bash_escape}/#{SQ_BASENAME.bash_escape} -processors 1 -no-progress -noappend -no-xattrs -noD -noI -noF #{mem_opt} 2>&1 1>/dev/null || echo mksquashfs command failed" + ) + # Perform cleanup of expected messages (stupid mksquashfs program is too verbose) + #[ + # # In the following regexps, the trailing .* match anything to NL (but not including it) + # /^created \d+.*/i, + #].each { |regex| unsqu_out.sub!(regex,"") } + mksqu_out.strip! + cb_error "Error syncing userfile ##{userfile.id} to provider, mksquashfs commands returned:\n#{mksqu_out}" unless mksqu_out.blank? + + # Invoke the normal code; duplicated from superclasses unfortunately + + # ------- + # Prep code from EnCbrainSshDataProvider (simplified a little) + # ------- + threelevels = cache_subdirs_from_id(userfile.id) + remcachedir = Pathname.new(remote_dir) + threelevels[0] + threelevels[1] + threelevels[2] + mkdir_command = "mkdir -p #{remcachedir.to_s.bash_escape} >/dev/null 2>&1" + remote_bash_this(mkdir_command) + # ------- + # End of EnCbrainSshDataProvider code + # ------- + + # ------- + # rsync upload code from SshDataProvider, adjusted + # ------- + localfull = cacheparent + tmpdirbase + remotefull = provider_full_path(userfile) + cb_error "Error: directory #{localfull} does not exist in local cache!" unless File.exist?(localfull) + + sourceslash = "/" # constant this time + rsync = rsync_over_ssh_prefix(userfile.user, userfile) + + # Double escaping for old rsyncs + dest_escaped = remote_shell_escape(remotefull) + # As of rsync 3.1.2, rsync does the escaping of the remote path properly itself + dest_escaped = remotefull.to_s.bash_escape if self.class.local_rsync_protects_args? + + # It's IMPORTANT that the destination be specified with a bare ':' in front. + text = bash_this("#{rsync} -a -l --no-g --chmod=u=rwX,g=rX,Dg+s,o=r --delete #{self.rsync_excludes} #{shell_escape(localfull)}#{sourceslash} :#{dest_escaped} 2>&1") + cb_error "Error syncing userfile ##{userfile.id} to data provider, rsync returned:\n#{text}" unless text.blank? + unless self.provider_file_exists?(userfile).to_s =~ /file|dir/ + cb_error "Error syncing userfile ##{userfile.id} to data provider: no destination file found after rsync?\n" + + "Make sure you are running rsync 3.0.6 or greater!\n" + end + # ------- + # End of SshDataProvider code + # ------- + + true + ensure + # Cleanup local squashfs file no matter what + if cacheparent.to_s.present? && tmpdirbase.present? && File.directory?("#{cacheparent.to_s.bash_escape}/#{tmpdirbase.bash_escape}") + system "rm -rf #{cacheparent.to_s.bash_escape}/#{tmpdirbase.bash_escape}" + end + end + + # Check the capabilities of the local mksquashfs program. + # Returns true if it has -mem . Value cached in class variable. + def self.mksquashfs_has_mem_option? + return @_mksquashfs_mem_ if defined?(@_mksquashfs_mem_) + system "mksquashfs 2>&1 | grep -e -mem >/dev/null" + @_mksquashfs_mem_ = $?.success? + @_mksquashfs_mem_ + end + +end + diff --git a/BrainPortal/app/models/ssh_data_provider.rb b/BrainPortal/app/models/ssh_data_provider.rb index 60d6b0863..5bcd23252 100644 --- a/BrainPortal/app/models/ssh_data_provider.rb +++ b/BrainPortal/app/models/ssh_data_provider.rb @@ -87,10 +87,9 @@ def impl_sync_to_cache(userfile) #:nodoc: # It's IMPORTANT that the source be specified with a bare ':' in front. text = bash_this("#{rsync} -a -l --no-g --chmod=u=rwX,g=rX,Dg+s,o=r --delete #{self.rsync_excludes} :#{source_escaped}#{sourceslash} #{shell_escape(localfull)} 2>&1") - text.sub!(/Warning: Permanently added[^\n]+known hosts.\s*/i,"") # a common annoying warning - cb_error "Error syncing userfile to local cache, rsync returned:\n#{text}" unless text.blank? + cb_error "Error syncing userfile ##{userfile.id} to local cache, rsync returned:\n#{text}" unless text.blank? unless File.exist?(localfull) - cb_error "Error syncing userfile to local cache: no destination file found after rsync?\n" + + cb_error "Error syncing userfile ##{userfile.id} to local cache: no destination file found after rsync?\n" + "Make sure you are running rsync 3.0.6 or greater!" end true @@ -112,9 +111,9 @@ def impl_sync_to_provider(userfile) #:nodoc: # It's IMPORTANT that the destination be specified with a bare ':' in front. text = bash_this("#{rsync} -a -l --no-g --chmod=u=rwX,g=rX,Dg+s,o=r --delete #{self.rsync_excludes} #{shell_escape(localfull)}#{sourceslash} :#{dest_escaped} 2>&1") text.sub!(/Warning: Permanently added[^\n]+known hosts.\s*/i,"") # a common annoying warning - cb_error "Error syncing userfile to data provider, rsync returned:\n#{text}" unless text.blank? + cb_error "Error syncing userfile ##{userfile.id} to data provider, rsync returned:\n#{text}" unless text.blank? unless self.provider_file_exists?(userfile).to_s =~ /file|dir/ - cb_error "Error syncing userfile to data provider: no destination file found after rsync?\n" + + cb_error "Error syncing userfile ##{userfile.id} to data provider: no destination file found after rsync?\n" + "Make sure you are running rsync 3.0.6 or greater!\n" end true @@ -298,8 +297,68 @@ def impl_provider_repair(issue) #:nodoc: super(issue) end + # Checks connection and other common problems. + # Raises exception DataProviderTestConnectionError if connection is down or + # common config issues detected. Returns true if everything is OK. + def check_connection! + err_message = self.find_connection_issues + raise DataProviderTestConnectionError.new(err_message) if err_message.present? + true + end + protected + # Verifies the configuration and returns a string with a descriptive + # error message if something is wrong. + def find_connection_issues + master = self.master # This is a handler for the connection, not persistent. + tmpfile = "/tmp/dp_check.#{Process.pid}.#{rand(1000000)}" # prefix for .out and .err capture files + + # Check #1: the SSH connection can be established + if ! master.is_alive? + return "Cannot establish the SSH connection. Check the configuration: username, hostname, port are valid, and SSH key is installed." + end + + # Check #2: we can run "true" on the remote site and get no output + status = master.remote_shell_command_reader("true", + :stdin => "/dev/null", + :stdout => "#{tmpfile}.out", + :stderr => "#{tmpfile}.err" + ) + stdout = File.read("#{tmpfile}.out") rescue "Error capturing stdout" + stderr = File.read("#{tmpfile}.err") rescue "Error capturing stderr" + if stdout.size != 0 + stdout.strip! if stdout.present? # just to make it pretty while still reporting whitespace-only strings + return "Remote shell is not clean: got some bytes on stdout: '#{stdout}'" + end + if stderr.size != 0 + stderr.strip! if stdout.present? + return "Remote shell is not clean: got some bytes on stderr: '#{stderr}'" + end + if !status + return "Got non-zero return code when trying to run 'true' on remote side." + end + + # Check #3: the remote directory exists + master.remote_shell_command_reader "test -d #{self.remote_dir.bash_escape} && echo DIR-OK", :stdout => "#{tmpfile}.out" + out = File.read("#{tmpfile}.out") + if out != "DIR-OK\n" + return "The remote directory doesn't seem to exist." + end + + # Check #4: the remote directory is readable + master.remote_shell_command_reader "test -r #{self.remote_dir.bash_escape} && test -x #{self.remote_dir.bash_escape} && echo DIR-READ", :stdout => "#{tmpfile}.out" + out = File.read("#{tmpfile}.out") + if out != "DIR-READ\n" + return "The remote directory doesn't seem to be readable" + end + + return nil # No error messages means all is OK + ensure + File.unlink("#{tmpfile}.out") rescue nil + File.unlink("#{tmpfile}.err") rescue nil + end + # Returns a list of all files in remote directory +dirname+, with all their # associated metadata; size, permissions, access times, owner, group, etc. def remote_dir_entries(dirname, user = nil, userfile = nil) diff --git a/BrainPortal/app/models/ssh_data_provider_base.rb b/BrainPortal/app/models/ssh_data_provider_base.rb index d0dc188c9..d2b002e16 100644 --- a/BrainPortal/app/models/ssh_data_provider_base.rb +++ b/BrainPortal/app/models/ssh_data_provider_base.rb @@ -107,6 +107,29 @@ def remote_bash_this(command, user = nil, userfile = nil) self.master(user, userfile).remote_shell_command_reader(command, :stdin => '/dev/null') do |fh| text = fh.read end + + filter_out_ssh_stderr_messages(text) + end + + # Same as superclass, but because we sometimes issue local commands + # that indirectly connect through ssh, we also filter out the same error + # messages as in remote_bash_this(). + def bash_this(command) #:nodoc: + text = super + filter_out_ssh_stderr_messages(text) + end + + private + + # Remove common warning messages generally printed on stderr... + def filter_out_ssh_stderr_messages(text) + # 1) From ssh + text.sub!(/^Warning: Permanently added[^\n]+known hosts.\s*/i,"") # a common annoying warning + + # 1) From ssh-keysign when ssh to localhost from a different GID + text.sub!(/^setresgid \d+: Operation not permitted\s*/i,"") + text.sub!(/^ssh_keysign: no reply\s*/i,"") + text.sub!(/^sign using hostkey.*failed\s*/i,"") text end diff --git a/BrainPortal/app/models/sync_status.rb b/BrainPortal/app/models/sync_status.rb index 643f723e6..11bedf11c 100644 --- a/BrainPortal/app/models/sync_status.rb +++ b/BrainPortal/app/models/sync_status.rb @@ -126,42 +126,51 @@ def self.ready_to_copy_to_cache(userfile) unless userfile_id return yield end + prettyfile = "'#{userfile.name}' (##{userfile_id})" # for messages state = self.get_or_create_status(userfile_id) puts "SYNC: ToCache: #{state.pretty} Enter" if DebugMessages - # Wait until no other local client is copying the file's content - # in one direction or the other. - allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do - state.reload - state.invalidate_old_status - puts "SYNC: ToCache: #{state.pretty} Check" if DebugMessages - state.status !~ /^To/ # no ToProvider or ToCache - end - puts "SYNC: ToCache: #{state.pretty} Proceed" if DebugMessages + # This loops attempts to wait for and then lock out other + # processes on the same server. + 2.times do + + # Wait until no other local client is copying the file's content + # in one direction or the other. + allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do + state.reload + state.invalidate_old_status + puts "SYNC: ToCache: #{state.pretty} Check" if DebugMessages + state.status !~ /^To/ # no ToProvider or ToCache + end + puts "SYNC: ToCache: #{state.pretty} Proceed" if DebugMessages - if ! allok # means timeout occurred - oldstate = state.status - #state.status_transition(oldstate, "ProvNewer") # do our best; not needed? - raise "Sync error: timeout waiting for file '#{userfile_id}' " + - "in '#{oldstate}' for operation 'ToCache'." - end + if ! allok # means timeout occurred + oldstate = state.status + raise "Sync error: timeout waiting for #{prettyfile} in '#{oldstate}' for operation 'ToCache'." + end - # No need to do anything if the data is already in sync! - if state.status == "InSync" - state.update_attributes( :accessed_at => Time.now ) - return true - end + # No need to do anything if the data is already in sync! + if state.status == "InSync" + state.update_attributes( :accessed_at => Time.now ) + return true + end - if state.status == "Corrupted" - raise "Sync error: file '#{userfile_id}' marked 'Corrupted' " + - "for operation 'ToCache'." - end + # This can be set by invalidate_old_status above + if state.status == "Corrupted" + raise "Sync error: #{prettyfile} marked 'Corrupted' for operation 'ToCache'." + end + + # Adjust state to let all other processes know what + # WE want to do now. This will lock out other clients. + break if state.status_transition(state.status, "ToCache") # if we fail here, race condition + + end # loop 2 times - # Adjust state to let all other processes know what - # WE want to do now. This will lock out other clients. - state.status_transition!(state.status, "ToCache") # if we fail here, race condition puts "SYNC: ToCache: #{state.pretty} Update" if DebugMessages + if state.status != 'ToCache' + raise "Sync error: #{prettyfile} cannot be fetched after two attempts. Status=#{state.status}" + end # Wait until all other clients out there are done # transferring content to the DP side. We don't care @@ -175,8 +184,7 @@ def self.ready_to_copy_to_cache(userfile) if ! allok # means timeout occurred state.status_transition("ToCache", "ProvNewer") # checked OK - raise "Sync error: timeout waiting for other clients for " + - "file '#{userfile_id}' for operation 'ToCache'." + raise "Sync error: timeout waiting for other clients for #{prettyfile} for operation 'ToCache'." end # Now, perform the sync_to_cache operation. @@ -220,37 +228,46 @@ def self.ready_to_copy_to_dp(userfile) unless userfile_id return yield end + prettyfile = "'#{userfile.name}' (##{userfile_id})" # for messages state = self.get_or_create_status(userfile_id) puts "SYNC: ToProv: #{state.pretty} Enter" if DebugMessages - # Wait until no other local client is copying the file's content - # in one direction or the other. - allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do - state.reload - state.invalidate_old_status - puts "SYNC: ToProv: #{state.pretty} Check" if DebugMessages - state.status !~ /^To/ # no ToProvider or ToCache - end - puts "SYNC: ToProv: #{state.pretty} Proceed" if DebugMessages + # This loops attempts to wait for and then lock out other + # processes on the same server. + 2.times do + + # Wait until no other local client is copying the file's content + # in one direction or the other. + allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do + state.reload + state.invalidate_old_status + puts "SYNC: ToProv: #{state.pretty} Check" if DebugMessages + state.status !~ /^To/ # no ToProvider or ToCache + end + puts "SYNC: ToProv: #{state.pretty} Proceed" if DebugMessages - if ! allok # means timeout occurred - oldstate = state.status - #state.status_transition(oldstate, "CacheNewer") # do our best; not needed? - raise "Sync error: timeout waiting for file '#{userfile_id}' " + - "in '#{oldstate}' for operation 'ToProvider'." - end + if ! allok # means timeout occurred + oldstate = state.status + raise "Sync error: timeout waiting for #{prettyfile} in '#{oldstate}' for operation 'ToProvider'." + end - # No need to do anything if the data is already in sync! - if state.status == "InSync" - state.update_attributes( :accessed_at => Time.now ) - return true - end + # No need to do anything if the data is already in sync! + if state.status == "InSync" + state.update_attributes( :accessed_at => Time.now ) + return true + end + + # Adjust state to let all other processes know what + # WE want to do now. This will lock out other clients. + break if state.status_transition(state.status, "ToProvider") # if we fail, race condition + + end # loop 2 times - # Adjust state to let all other processes know what - # WE want to do now. This will lock out other clients. - state.status_transition!(state.status, "ToProvider") # if we fail, race condition puts "SYNC: ToProv: #{state.pretty} Update" if DebugMessages + if state.status != 'ToProvider' + raise "Sync error: #{prettyfile} cannot be uploaded after two attempts. Status=#{state.status}" + end # Wait until all other clients out there are done # transferring content to/from the provider, one way or the other. @@ -263,8 +280,7 @@ def self.ready_to_copy_to_dp(userfile) if ! allok # means timeout occurred state.status_transition("ToProvider", "CacheNewer") # checked OK - raise "Sync error: timeout waiting for other clients for " + - "file '#{userfile_id}' for operation 'ToProvider'." + raise "Sync error: timeout waiting for other clients for #{prettyfile} for operation 'ToProvider'." end # Now, perform the ToProvider operation. @@ -318,33 +334,43 @@ def self.ready_to_modify_cache(userfile, final_status = 'CacheNewer') unless userfile_id return yield end + prettyfile = "'#{userfile.name}' (##{userfile_id})" # for messages state = self.get_or_create_status(userfile_id) puts "SYNC: ModCache: #{state.pretty} Enter" if DebugMessages - # Wait until no other local client is copying the file's content - # in one direction or the other. - allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do - state.reload - state.invalidate_old_status - puts "SYNC: ModCache: #{state.pretty} Check" if DebugMessages - state.status !~ /^To/ # no ToProvider or ToCache - end - puts "SYNC: ModCache: #{state.pretty} Proceed" if DebugMessages + # This loops attempts to wait for and then lock out other + # processes on the same server. + 2.times do + + # Wait until no other local client is copying the file's content + # in one direction or the other. + allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do + state.reload + state.invalidate_old_status + puts "SYNC: ModCache: #{state.pretty} Check" if DebugMessages + state.status !~ /^To/ # no ToProvider or ToCache + end + puts "SYNC: ModCache: #{state.pretty} Proceed" if DebugMessages - if ! allok # means timeout occurred - oldstate = state.status - raise "Sync error: timeout waiting for file '#{userfile_id}' " + - "in '#{oldstate}' for operation 'ModifyCache'." - end + if ! allok # means timeout occurred + oldstate = state.status + raise "Sync error: timeout waiting for #{prettyfile} in '#{oldstate}' for operation 'ModifyCache'." + end + + # Adjust state to let all other processes know that + # we want to modify the cache. "ToCache" is not exactly + # true, as we are not copying from the DP, but it will + # still lock out other processes trying to start data + # operations, which is what we want. + break if state.status_transition(state.status, "ToCache") # if we fail, race condition + + end # loop 2 times - # Adjust state to let all other processes know that - # we want to modify the cache. "ToCache" is not exactly - # true, as we are not copying from the DP, but it will - # still lock out other processes trying to start data - # operations, which is what we want. - state.status_transition!(state.status, "ToCache") # if we fail, race condition puts "SYNC: ModCache: #{state.pretty} Update" if DebugMessages + if state.status != 'ToCache' + raise "Sync error: cache for #{prettyfile} cannot be updated after two attempts. Status=#{state.status}" + end # Now, perform the ModifyCache operation self.wrap_block( @@ -390,34 +416,43 @@ def self.ready_to_modify_dp(userfile) unless userfile_id return yield end + prettyfile = "'#{userfile.name}' (##{userfile_id})" # for messages state = self.get_or_create_status(userfile_id) puts "SYNC: ModProv: #{state.pretty} Entering" if DebugMessages - # Wait until no other local client is copying the file's content - # in one direction or the other. - allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do - state.reload - state.invalidate_old_status - puts "SYNC: ModProv: #{state.pretty} Check" if DebugMessages - state.status !~ /^To/ # no ToProvider or ToCache - end - puts "SYNC: ModProv: #{state.pretty} Proceed" if DebugMessages + # This loops attempts to wait for and then lock out other + # processes on the same server. + 2.times do + + # Wait until no other local client is copying the file's content + # in one direction or the other. + allok = repeat_every_formax_untilblock(CheckInterval,CheckMaxWait) do + state.reload + state.invalidate_old_status + puts "SYNC: ModProv: #{state.pretty} Check" if DebugMessages + state.status !~ /^To/ # no ToProvider or ToCache + end + puts "SYNC: ModProv: #{state.pretty} Proceed" if DebugMessages - if ! allok # means timeout occurred - oldstate = state.status - #state.status_transition(oldstate, "CacheNewer") # do our best; not needed? - raise "Sync error: timeout waiting for file '#{userfile_id}' " + - "in '#{oldstate}' for operation 'ModifyProvider'." - end + if ! allok # means timeout occurred + oldstate = state.status + raise "Sync error: timeout waiting for #{prettyfile} in '#{oldstate}' for operation 'ModifyProvider'." + end + + # Adjust state to let all other processes know that + # we want to modify the provider side. "ToProvider" is not + # exactly true, as we are not copying to the DP, but it will + # still lock out other processes trying to start data + # operations, which is what we want. + break if state.status_transition(state.status, "ToProvider") # if we fail, race condition + + end # loop 2 times - # Adjust state to let all other processes know that - # we want to modify the provider side. "ToProvider" is not - # exactly true, as we are not copying to the DP, but it will - # still lock out other processes trying to start data - # operations, which is what we want. - state.status_transition!(state.status, "ToProvider") # if we fail, race condition puts "SYNC: ModProv: #{state.pretty} Update" if DebugMessages + if state.status != 'ToProvider' + raise "Sync error: provider content for #{prettyfile} cannot be modified after two attempts. Status=#{state.status}" + end # Wait until all other clients out there are done # transferring content to/from the provider, one way or the other. @@ -429,8 +464,7 @@ def self.ready_to_modify_dp(userfile) end if ! allok # means timeout occurred - raise "Sync error: timeout waiting for other clients for " + - "file '#{userfile_id}' for operation 'ModifyProvider'." + raise "Sync error: timeout waiting for other clients for #{prettyfile} for operation 'ModifyProvider'." end # Now, perform the ModifyProvider operation. @@ -479,11 +513,13 @@ def self.ready_to_modify_dp(userfile) def invalidate_old_status # "InSync" state is too old for current RemoteResource - myself = RemoteResource.current_resource - expire = myself.cache_trust_expire # in seconds before now - expire = nil if expire && expire < 3600 # we don't accept thresholds less than one hour - expire = 2.years.to_i if expire && expire > 2.years.to_i - if expire and self.status == "InSync" && self.synced_at < Time.now - expire + if @expire.nil? # this value is global for the current APP (Bourreau or Portal) + myself = RemoteResource.current_resource + @expire = myself.cache_trust_expire || 0 # in seconds before now + @expire = 0 if @expire < 3600 # we don't accept thresholds less than one hour + @expire = 2.years.to_i if @expire > 2.years.to_i + end + if @expire > 0 and self.status == "InSync" && self.synced_at < Time.now - @expire puts "SYNC: Invalid: #{self.pretty} InSync Is Too Old" if DebugMessages self.status_transition(self.status, "ProvNewer") return @@ -504,7 +540,7 @@ def pretty #:nodoc: end # This method changes the status attribute - # in the current task object to +to_state+ but + # in the current sync_status object to +to_state+ but # also makes sure the current value is +from_state+ . # The change is performed in a transaction where # the record is locked, to ensure the transition is diff --git a/BrainPortal/app/models/tool_config.rb b/BrainPortal/app/models/tool_config.rb index 8d4229f16..8f85347cc 100644 --- a/BrainPortal/app/models/tool_config.rb +++ b/BrainPortal/app/models/tool_config.rb @@ -67,6 +67,21 @@ class ToolConfig < ApplicationRecord api_attr_visible :version_name, :description, :tool_id, :bourreau_id, :group_id, :ncpus + # given array of pairs variable/value builds export script, a prefix is added to variables options + def vars_to_export_script(varprefix="") + env = self.env_array || [] + commands = "" + env.each do |name_val| + name = name_val[0] + val = name_val[1] + name.strip! + #val.gsub!(/'/,"'\''") + commands += "export #{varprefix}#{name}=\"#{val}\"\n" + end + commands += "\n" if env.size > 0 + commands + end + # To make it somewhat compatible with the ResourceAccess module, # here's this model's own method for checking if it's visible to a user. def can_be_accessed_by?(user) @@ -154,8 +169,10 @@ def extended_environment # Generates a partial BASH script that initializes environment # variables and is followed a the script prologue stored in the - # object. - def to_bash_prologue + # object. For singularity prologues, special prefixes are added to + # variable names to ensure they will be propagated to the container + # even in presence of --cleanenv parameteres and such + def to_bash_prologue(singularity=false) tool = self.tool bourreau = self.bourreau group = self.group @@ -194,17 +211,25 @@ def to_bash_prologue #--------------------------------------------------- ENV_HEADER - env.each do |name_val| - name = name_val[0] - val = name_val[1] - name.strip! - #val.gsub!(/'/,"'\''") - script += "export #{name}=\"#{val}\"\n" + script += vars_to_export_script + + if singularity + script += <<-ENV_HEADER +#--------------------------------------------------- +# Ensuring that environment variables are propagated:#{env.size == 0 ? " (NONE DEFINED)" : ""} +#--------------------------------------------------- + + ENV_HEADER + script += vars_to_export_script("SINGULARITYENV_") + script += vars_to_export_script("APPTAINERENV_") # SINGULARITYENV is to be depricated + end script += "\n" if env.size > 0 prologue = self.script_prologue || "" - script += <<-SCRIPT_HEADER + script += <<-SCRIPT_HEADER + + #--------------------------------------------------- # Script Prologue:#{prologue.blank? ? " (NONE SUPPLIED)" : ""} #--------------------------------------------------- @@ -291,6 +316,13 @@ def is_at_least_version(version) end end + # true if singularity image is defined + def use_singularity? + return self.container_engine == "Singularity" && + ( self.containerhub_image_name.present? || + self.container_image_userfile_id.present? ) + end + # This method calls any custom compare_versions() method defined # in the CbrainTask subclass for the tool of the current tool_config. # Returns true if the version_name of the current tool_config @@ -333,6 +365,8 @@ def cbrain_task_class # dp:1234 # # CBRAIN db registered file # userfile:1234 + # # A ext3 capture filesystem, will NOT be returned here as an overlay + # ext3capture:basename=12G def singularity_overlays_full_paths specs = parsed_overlay_specs specs.map do |knd, id_or_name| @@ -356,6 +390,8 @@ def singularity_overlays_full_paths cb_error "Userfile with id '#{id_or_name}' for overlay fetching not found." if ! userfile userfile.sync_to_cache() rescue cb_error "Userfile with id '#{id_or_name}' for fetching overlay failed to synchronize." userfile.cache_full_path() + when 'ext3capture' + [] # flatten will remove all that else cb_error "Invalid '#{knd}:#{id_or_name}' overlay." end @@ -374,6 +410,16 @@ def data_providers_with_overlays end.compact end + # Returns pairs [ [ basename, size], ...] as in [ [ 'work', '28g' ] + def ext3capture_basenames + specs = parsed_overlay_specs + return [] if specs.empty? + specs + .map { |pair| pair[1] if pair[0] == 'ext3capture' } + .compact + .map { |basename_and_size| basename_and_size.split("=") } + end + ################################################################# # Validation methods ################################################################# @@ -429,6 +475,9 @@ def parsed_overlay_specs # userfile:333 # dp:123 # dp:dp_name + # ext3capture:basename=SIZE + # ext3capture:work=30G + # ext3capture:tool_1.1.2=15M # def validate_overlays_specs #:nodoc: specs = parsed_overlay_specs @@ -444,9 +493,9 @@ def validate_overlays_specs #:nodoc: case kind # different validations rules for file, userfile and dp specs when 'file', 'old style file' # full path specification for a local file, e.g. "file:/myfiles/c.sqs" - if id_or_name !~ /^\/\S+\.(sqs|squashfs)$/i + if id_or_name !~ /^\/\S+\.(sqs|sqfs|squashfs)$/ self.errors.add(:singularity_overlays_specs, - " contains invalid #{kind} named '#{id_or_name}'. It should be a full path that ends in .squashfs or .sqs") + " contains invalid #{kind} named '#{id_or_name}'. It should be a full path that ends in .squashfs, .sqs or .sqfs") end when 'userfile' # db-registered file spec, e.g. "userfile:42" @@ -461,9 +510,9 @@ def validate_overlays_specs #:nodoc: self.errors.add(:singularity_overlays_specs, %{" contains invalid userfile id '#{id_or_name}'. The file with id '#{id_or_name}' is not found."} ) - elsif ! userfile.name.end_with?('.sqs') && ! userfile.name.end_with?('.squashfs') + elsif userfile.name.to_s !~ /\.(sqs|sqfs|squashfs)$/ self.errors.add(:singularity_overlays_specs, - " contains invalid userfile with id '#{id_or_name}' and name '#{userfile.name}'. File name should end in .squashfs or .sqs") + " contains invalid userfile with id '#{id_or_name}' and name '#{userfile.name}'. File name should end in .squashfs, .sqs or .sqfs") # todo maybe or/and check file type? end @@ -475,6 +524,12 @@ def validate_overlays_specs #:nodoc: self.errors.add(:singularity_overlays_specs, "DataProvider '#{id_or_name}' is not a SingSquashfsDataProvider") end + when 'ext3capture' # ext3 filesystem as a basename with an initial size + # The basename is limited to letters, digits, numbers and dashes; the =SIZE suffix must end with G or M + if id_or_name !~ /\A\w[\w\.-]+=([1-9]\d*)[mg]\z/i + self.errors.add(:singularity_overlays_specs, "contains invalid ext3capture specification (must be like ext3capture:basename=1g or 2m etc)") + end + else # Other errors self.errors.add(:singularity_overlays_specs, "contains invalid specification '#{kind}:#{id_or_name}'") @@ -573,10 +628,12 @@ def self.create_from_descriptor(bourreau, tool, descriptor, record_path=false) container_info = descriptor.container_image || {} container_engine = container_info['type'].presence.try(:capitalize) - container_engine = "Singularity" if (container_engine == "Docker" && + container_engine = "Singularity" if (container_engine == "Docker" && !bourreau.docker_present? && bourreau.singularity_present? ) + container_index = container_info['index'].presence + container_index = 'docker://' if container_index == 'index.docker.io' # old convention tc = ToolConfig.create!( # Main three keys :tool_id => tool.id, @@ -593,7 +650,7 @@ def self.create_from_descriptor(bourreau, tool, descriptor, record_path=false) :boutiques_descriptor_path => (record_path.presence && descriptor.from_file), # The following three attributes are for containerization; not sure about values :container_engine => container_engine, - :container_index_location => container_info['index'].presence, + :container_index_location => container_index, :containerhub_image_name => container_info['image'].presence, ) diff --git a/BrainPortal/app/models/user.rb b/BrainPortal/app/models/user.rb index c06063e17..9b966bb6e 100644 --- a/BrainPortal/app/models/user.rb +++ b/BrainPortal/app/models/user.rb @@ -382,7 +382,7 @@ def destroy_user_sessions # If option +create_it+ is true, create the key files if necessary. # If option +ok_no_files+ is true, will return the object even if # the key files don't exist yet (default it to raise an exception) - def ssh_key(options = { :create_id => false, :ok_no_files => false }) + def ssh_key(options = { :create_it => false, :ok_no_files => false }) name = "u#{self.id}" # Avoiding username in ssh filenames or in comment. return SshKey.find_or_create(name) if options[:create_it] return SshKey.new(name) if options[:ok_no_files] diff --git a/BrainPortal/app/models/userfile.rb b/BrainPortal/app/models/userfile.rb index a15d5e2ac..0c04fea29 100644 --- a/BrainPortal/app/models/userfile.rb +++ b/BrainPortal/app/models/userfile.rb @@ -649,11 +649,11 @@ def available? # local userfile, and get to the cache quickly if it's already there. # # The +attributes+ describes a userfile's attribute for tracking - # the data; normally only the 'name' is required is should be specific + # the data. Normally only the 'name' is required and it should be specific # enough to represent a particular piece of data (e.g. a container # image name with full version in it). A block must be given to the - # method, and it will be invoked if the data is not already cached; - # it will receive a single argument, the path to the caching subsystem + # method, and it will be invoked if the data is not already cached. + # The block will receive a single argument, the path to the caching subsystem # where the userfile's data should be installed (the path is the same # as that returned by 'DataProvider#cache_full_path()'. The block is # expected to fill this path with appropriate files and/or directories. @@ -1155,7 +1155,7 @@ def validate_browse_path # This method is invoked before the creation of any file. # It will raise an CbrainDiskQuotaExceeded exception if the - # user has exceeded a quota (space, or number of files) for the DP. + # user has already exceeded a quota (space, or number of files) for the DP. def check_exceeded_quota! DiskQuota.exceeded!(self.user_id, self.data_provider_id) true diff --git a/BrainPortal/app/models/userkey_flat_dir_ssh_data_provider.rb b/BrainPortal/app/models/userkey_flat_dir_ssh_data_provider.rb index 54c56863f..8cf27ec66 100644 --- a/BrainPortal/app/models/userkey_flat_dir_ssh_data_provider.rb +++ b/BrainPortal/app/models/userkey_flat_dir_ssh_data_provider.rb @@ -88,5 +88,15 @@ def ssh_shared_options(user = nil, userfile = nil) #:nodoc: self.master(user, userfile).ssh_shared_options end + ################################################################# + # Model Callbacks + ################################################################# + + # Normally, DPs can only be owned by admins. However, this DP class + # is meant to be owned by normal users. + def owner_is_appropriate #:nodoc: + return true + end + end diff --git a/BrainPortal/app/models/work_group.rb b/BrainPortal/app/models/work_group.rb index c7f1851fe..fde5cf535 100644 --- a/BrainPortal/app/models/work_group.rb +++ b/BrainPortal/app/models/work_group.rb @@ -31,7 +31,7 @@ class WorkGroup < Group # This method optimizes the DB lookups necessary to # create the pretty_category_name of a set of WorkGroups def self.prepare_pretty_category_names(groups = [], as_user = nil) - wgs = Array(groups).select { |g| g.is_a?(WorkGroup) && !g.invisible? } + wgs = Array(groups).select { |g| g.is_a?(WorkGroup) && !g.invisible? && !g.public?} wg_ids = wgs.map(&:id) wg_ucnt = WorkGroup.joins("LEFT JOIN groups_users ON groups_users.group_id=groups.id LEFT JOIN users ON users.id=groups_users.user_id").where('groups.id' => wg_ids).group('groups.id').count('users.login') # how many users per workgroup diff --git a/BrainPortal/app/views/access_profiles/show.html.erb b/BrainPortal/app/views/access_profiles/show.html.erb index 7ce15ff9f..52bd6966b 100644 --- a/BrainPortal/app/views/access_profiles/show.html.erb +++ b/BrainPortal/app/views/access_profiles/show.html.erb @@ -65,7 +65,7 @@ <%= show_table(@access_profile, :form_helper => cf, :edit_condition => check_role(:admin_user), :header => @access_profile.new_record? ? 'Project Membership' : 'Projects In This Profile') do |t| %> <% group_names = (@access_profile.groups.sort_by(&:name).map { |g| link_to_group_if_accessible(g) }.join(", ").html_safe.presence) || "(None)" %> - <% t.edit_cell(:group_ids, :show_width => 2, :no_header => "Projects", :content => group_names) do %> + <% t.edit_cell(:group_ids, :show_width => 2, :no_header => "Projects", :td_options => { :class => "wrap" }, :content => group_names) do %> <%= render :partial => 'shared/group_tables', :locals => { :model => @access_profile } %> diff --git a/BrainPortal/app/views/bourreaux/_notes.html.erb b/BrainPortal/app/views/bourreaux/_notes.html.erb index 023251f1c..7511958df 100644 --- a/BrainPortal/app/views/bourreaux/_notes.html.erb +++ b/BrainPortal/app/views/bourreaux/_notes.html.erb @@ -18,7 +18,7 @@ # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with this program. If not, see . +# along with this program. If not, see . # -%> @@ -46,89 +46,5 @@ are several ways that the BrainPortal can be told how to connect to and manage the Execution Server, which explains all the fields in this form. -

-

Hardcoded DB and ActiveResource

-

- - In the most ordinary case, an Execution Server is installed and deployed - manually by a CBRAIN administrator; this requires choosing the - listen port (the port argument to its script/server - -p port command when it's started) and creating its - database.yml file. For such a manual installation, the - only two required field in this form will be those in the box - ActiveResource Configuration. Note that these - two fields are completely ignored if the ActiveResource connection - is being tunnelled (see below). - -

-

Hardcoded, But With Remote Control

-

- - If the fields in the section SSH Remote Control - Configuration are filled and describe the deployment - remote account of the Execution Server, then the BrainPortal will have the - ability to establish a persistent SSH master connection to that - host and allow the administrator to start and stop the Server - right from the WEB interface. Note that the UNIX user under which - the BrainPortal runs must have the proper SSH public keys for that - remote account installed. - -

-

Remote Control And Optional Tunnels

-

- - It's possible the further deviate from the standard hardcoded - Execution Server by having the BrainPortal set up SSH tunnels for - either (or both) of the ActiveResource connection and the - Database Server connection. This can be turned on by entering - port numbers in the two fields in the section Tunnelling - Configuration. For this to work, the SSH Remote - Control Configuration must be operational (see above). The - port numbers specified here must both be public TCP port that are - free on the Execution Server's host; they can be arbitrary numbers between - 1024 and 65530, not used by other services. - -

- - Tunnelling the ActiveResource connection: Entering - a port number here will tell the BrainPortal to select this - port for the Execution Server's Rails listening port (the port - argument to its script/server -p port command when it's - started). When starting the Execution Server using the interface, a SSH - tunnel will be established between a local port (not shown here - anywhere) on the BrainPortal side and the listening HTTP port - of the Server selected here. This means that the two fields in - the form's section ActiveResource Configuration - will be completely ignored. - -

- - Tunnelling the DB server connection: Entering a port - number here will tell the BrainPortal to select this port for the - Execution Server's Rails DB connection; more accurately, by doing this, the - BrainPortal will create from scratch a new database.yml - file, send it over to the Execution Server side, and force it to use - it. This database.yml will tell the Server to connect - to host 127.0.0.1 on the port number specified here. This port - will be configured to tunnel back to the BrainPortal all the way - to the DB server used by the BrainPortal itself. Note that this - can only work if the BrainPortal is connecting to its DB via a IP - domain connection, not a file socket. For MySQL for instance, - it means the BrainPortal's database.yml file must NOT - have the value localhost for the Host - field. Also, it's likely that you'll need to comment out the entry - "bind-address" in the server's my.cnf file. - -

- - A side effect of tunnelling the DB server is that any - database.yml file already present on the Execution Server side will - be backed up, and also that the newly created database.yml - file will be DELETED after the Server is started. This provides a - high security context for the DB server's connection as if there - were no database.yml file at all. And if there were, - and tunnelling is disabled by blanking out the port field, then - the backup database.yml will be restored. - diff --git a/BrainPortal/app/views/data_providers/_data_providers_table.html.erb b/BrainPortal/app/views/data_providers/_data_providers_table.html.erb index b9e168029..aa253354f 100644 --- a/BrainPortal/app/views/data_providers/_data_providers_table.html.erb +++ b/BrainPortal/app/views/data_providers/_data_providers_table.html.erb @@ -3,7 +3,7 @@ # # CBRAIN Project # -# Copyright (C) 2008-2012 +# Copyright (C) 2008-2023 # The Royal Institution for the Advancement of Learning # McGill University # @@ -30,10 +30,14 @@

<% end %> - + <% t.edit_cell :containerhub_image_name, :content => link_to_userfile_if_accessible(@tool_config.container_image), :header => "ID of the container image" do |f| %> <%= f.text_field :container_image_userfile_id %>
@@ -227,8 +227,9 @@ A specification can be either a full path (e.g. file:/a/b/data.squashfs), a path with a pattern (e.g. file:/a/b/data*.squashfs), - a registered file identified by ID (e.g. userfile:123) - or a SquashFS Data Provider identified by its ID or name (e.g. dp:123, dp:DpNameHere). + a registered file identified by ID (e.g. userfile:123), + a SquashFS Data Provider identified by its ID or name (e.g. dp:123, dp:DpNameHere) + or an ext3 capture overlay basename (e.g. ext3capture:basename=SIZE where size is 12G or 12M). In the case of a Data Provider, the overlays will be the files that the provider uses. Each overlay specification should be on a separate line. You can add comments, indicated with hash symbol #. @@ -244,6 +245,12 @@
<% end %> + <% t.boolean_edit_cell('tool_config[singularity_use_short_workdir]', + (@tool_config.singularity_use_short_workdir ? "1" : "0"), + "1", "0", + :header => "Use short workdirs inside Singularity") + %> + <% end %> diff --git a/BrainPortal/app/views/tool_configs/_tool_configs_table.html.erb b/BrainPortal/app/views/tool_configs/_tool_configs_table.html.erb index 14af8073a..d7eee7f01 100644 --- a/BrainPortal/app/views/tool_configs/_tool_configs_table.html.erb +++ b/BrainPortal/app/views/tool_configs/_tool_configs_table.html.erb @@ -60,11 +60,6 @@ <% t.pagination - t.column("Execution Server", :bourreau, - :sortable => true, - :filters => default_filters_for(@base_scope, Bourreau) - ) { |tc| tc.bourreau ? link_to_bourreau_if_accessible(tc.bourreau) : html_colorize("(All servers)",'orange')} - t.column("Tool Name", :tool, :sortable => true, :filters => default_filters_for(@base_scope, Tool) @@ -76,6 +71,12 @@ end end + t.column("Execution Server", :bourreau, + :sortable => true, + :filters => default_filters_for(@base_scope, Bourreau) + ) { |tc| tc.bourreau ? link_to_bourreau_if_accessible(tc.bourreau) : html_colorize("(All servers)",'orange')} + + t.column("Tool Project", 'tools.group_id', #:sortable => true, #:filters => default_filters_for(@base_scope, Group) diff --git a/BrainPortal/app/views/tool_configs/show.html.erb b/BrainPortal/app/views/tool_configs/show.html.erb index cd6774997..6b570a903 100644 --- a/BrainPortal/app/views/tool_configs/show.html.erb +++ b/BrainPortal/app/views/tool_configs/show.html.erb @@ -68,9 +68,10 @@

-<%= @bourreau_glob_config.to_bash_prologue if @bourreau_glob_config %>
-<%= @tool_glob_config.to_bash_prologue     if @tool_glob_config     %>
-<%= @tool_local_config.to_bash_prologue    if @tool_local_config    %>
+
+<%= @bourreau_glob_config.to_bash_prologue @tool_local_config&.use_singularity? if @bourreau_glob_config %>
+<%= @tool_glob_config.to_bash_prologue     @tool_local_config&.use_singularity? if @tool_glob_config     %>
+<%= @tool_local_config.to_bash_prologue    @tool_local_config&.use_singularity? if @tool_local_config    %>
 ##########################################
 #### [Wrapped commands would be here] ####
 ##########################################
diff --git a/BrainPortal/app/views/userfiles/_default_qc_panel.html.erb b/BrainPortal/app/views/userfiles/_default_qc_panel.html.erb
index cc68d01fd..20af26df6 100644
--- a/BrainPortal/app/views/userfiles/_default_qc_panel.html.erb
+++ b/BrainPortal/app/views/userfiles/_default_qc_panel.html.erb
@@ -23,13 +23,24 @@
 -%>
 
 <% if ! @userfile.is_locally_synced? %>
-  This file must be synced locally to view QC data.
+  
+    This file must be synced locally to view QC data.
+  
 <% else %>
-  <% if @userfile.is_a?(ImageFile) %>
-    <%= image_tag url_for(:action  => :content, :id => @userfile.id) %>
+ <% div_id_to_replace = "sub_viewer_#{@userfile.id}" %> + <% link = data_link(@userfile.name, @userfile, div_id_to_replace) %> + <% if link =~ /data-url=/ %> +
+ <%= link %> +
+ <% else %> - This file does not seem to contain any QC data, or no template is available to QC this file type. + This file does not seem to contain any QC data, or no template is available to QC this file type. <% end %> <% end %> diff --git a/BrainPortal/app/views/userfiles/_quality_control_panel.html.erb b/BrainPortal/app/views/userfiles/_quality_control_panel.html.erb index f7c436546..dee09e12e 100644 --- a/BrainPortal/app/views/userfiles/_quality_control_panel.html.erb +++ b/BrainPortal/app/views/userfiles/_quality_control_panel.html.erb @@ -36,10 +36,12 @@ Navigation: file <%= @current_index + 1 %> out of <%= @filelist.size %> <%= link_to_userfile_if_accessible(@userfile, current_user, :html_options => { :target => '_blank' }) %>

- <% if @current_index > 0 %> + <% not_first_file = @current_index != 0 %> + <% not_last_file = @current_index < @filelist.size - 1 %> + <% if not_first_file %> <%= submit_button "Previous File", :name => :previous %> <% end %> - <% if @current_index < @filelist.size - 1 %> + <% if not_last_file %> <%= submit_button "Next File", :name => :next %> <% end %> @@ -66,7 +68,9 @@

-<%= submit_tag "Update (and go to next file)", :name => :update %> +<% update_message = "Update" %> +<% update_message += " (and go to next file)" if not_last_file %> +<%= submit_tag "#{update_message}", :name => :update, :data => {:current_file_id => @userfile.id } %>


diff --git a/BrainPortal/app/views/userfiles/show.html.erb b/BrainPortal/app/views/userfiles/show.html.erb index fe3e43b73..ac315b56e 100644 --- a/BrainPortal/app/views/userfiles/show.html.erb +++ b/BrainPortal/app/views/userfiles/show.html.erb @@ -152,7 +152,7 @@ <% if @userfile.zenodo_doi.starts_with?( ZenodoHelper::ZenodoSandboxDOIPrefix ) %> Published: <%= link_to_deposit(@userfile.zenodo_deposit_id) %> <% else %> - Published: <%= link_to_doi(@userfile.zenodo_doi) %> + Published: <%= link_to_zenodo_doi(@userfile.zenodo_doi) %> <% end %> <% elsif @userfile.zenodo_deposit_id.present? %> In progress: <%= link_to_deposit(@userfile.zenodo_deposit_id) %> @@ -204,26 +204,27 @@ Content <% if @userfile.archived? %> + <%= html_colorize("This #{@userfile.pretty_type} has been archived.", 'red') %>
Content viewers are disabled until the file is unarchived. - <% else %> - <% if ! @userfile.can_be_accessed_by?(current_user, :read) %> + + <% elsif ! @userfile.can_be_accessed_by?(current_user, :read) %> (This file cannot be viewed by you; I wonder how you got here.) - <% elsif @userfile.data_provider.meta[:no_viewers] %> + <% elsif @userfile.data_provider.meta[:no_viewers] %> (This file cannot be viewed as it is stored on Data Provider <%= link_to_data_provider_if_accessible(@userfile.data_provider) %> which is marked as non-viewable) - <% elsif @userfile.data_provider.not_syncable? %> + <% elsif @userfile.data_provider.not_syncable? %> (This file cannot be viewed as it is stored on Data Provider <%= link_to_data_provider_if_accessible(@userfile.data_provider) %> which is configured to not allow synchronization at all) - <% elsif @sync_status == "Corrupted" %> + <% elsif @sync_status == "Corrupted" %> (The content of this file seems to be corrupted. This might be the result @@ -232,26 +233,20 @@ by a task, consider restarting the task's Post Processing stage.) - <% elsif ! @userfile.data_provider.rr_allowed_syncing? %> + <% elsif ! @userfile.data_provider.rr_allowed_syncing? %> (This file cannot be viewed as it is stored on Data Provider <%= link_to_data_provider_if_accessible(@userfile.data_provider) %> which is configured to not allow synchronization to this Portal) - <% elsif (! @userfile.is_locally_synced?) && (! @userfile.data_provider.online?) %> + <% elsif (! @userfile.is_locally_synced?) && (! @userfile.data_provider.online?) %> (This data is not currently synchronized and its Data Provider <%= link_to_data_provider_if_accessible(@userfile.data_provider) %> is offline, so its content is not viewable for the moment) - <% elsif @userfile.is_locally_synced? && @userfile.viewers_with_applied_conditions.blank? %> - - (The contents of this file cannot be viewed: no viewer code available at this moment - for files of type '<%= @userfile.pretty_type %>') - - <% else %> + <% elsif ! @userfile.is_locally_synced? %> - <% if ! @userfile.is_locally_synced? %> <% if @sync_status =~ /^To/ %> (This data file is currently being synchronized. Wait a few seconds for this to complete) <% else %> @@ -260,8 +255,13 @@ to start the synchronization process. This may allow you to view displayable content<% if @userfile.is_a?(FileCollection) %> and extract files from this collection<% end %>). <% end %> -
- <% end %> + + <% elsif @userfile.viewers_with_applied_conditions.blank? %> + + (The contents of this file cannot be viewed: no viewer code available at this moment + for files of type '<%= @userfile.pretty_type %>') + + <% else %> <% if @userfile.viewers_with_applied_conditions.size > 1 %> Change view: @@ -293,8 +293,9 @@ <% end %> - <% end %> + <% end %> + diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/new_multi_boutiques_demo.json b/BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/new_multi_boutiques_demo.json index acc8e1269..c2d2ec4ef 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/new_multi_boutiques_demo.json +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/new_multi_boutiques_demo.json @@ -133,7 +133,7 @@ "du_report_out": "TextFile" }, "BoutiquesOutputFilenameRenamer": { - "my_output_name": "sinput1" + "du_report_out": [ "sinput1", "my_output_name" ] } } } diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/bourreau/bash_scriptor.rb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/bourreau/bash_scriptor.rb index 5745079f0..4ae3f5457 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/bourreau/bash_scriptor.rb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/bourreau/bash_scriptor.rb @@ -57,6 +57,7 @@ def setup #:nodoc: def cluster_commands #:nodoc: params = self.params file_ids = params[:interface_userfile_ids] || [] + File.unlink(self.stdout_cluster_filename) rescue nil # needed in case of retries raw_text = params[:bash_script] raw_text.tr!("\r","") # text areas have CRs in line terminators, yuk! diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/portal/boutiques_descriptor_maker_handler.rb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/portal/boutiques_descriptor_maker_handler.rb index 86fffe623..3ab1847ce 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/portal/boutiques_descriptor_maker_handler.rb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/portal/boutiques_descriptor_maker_handler.rb @@ -56,6 +56,7 @@ def descriptor_for_form self.errors.add(:base, "Your descriptor has syntax errors") desc_user_posted = self.descriptor_when_json_error end + desc_user_posted.delete(:groups) if desc_user_posted.groups.blank? added_input = self.boutiques_descriptor.input_by_id('_bdm_json_descriptor').dup desc_user_posted.inputs.unshift(added_input) desc_user_posted @@ -76,6 +77,7 @@ def before_form # there is nothing to launch. def after_form desc = descriptor_for_form + if self.errors.empty? self.bosh_validation_messages = generate_validation_messages(desc) if self.bosh_validation_messages.to_s.strip != "OK" @@ -84,6 +86,16 @@ def after_form self.bosh_command_preview = generate_command_preview(desc, self.invoke_params) end end + + if self.errors.empty? && (params[:_bdm_reorder] == 'on' || params[:_bdm_pad] == 'on') + btq = descriptor_from_posted_form + btq = btq.pretty_ordered if params[:_bdm_reorder] == 'on' + btq.delete(:groups) if btq.groups.blank? + json = btq.super_pretty_json if params[:_bdm_pad] == 'on' + json ||= JSON.pretty_generate(btq) + self.invoke_params[:_bdm_json_descriptor] = json + end + if self.errors.empty? # We must add at least one error to prevent CBRAIN from attempting to launch something. self.errors.add(:base, <<-ALL_OK @@ -93,6 +105,7 @@ def after_form ALL_OK ) end + "" end @@ -116,6 +129,20 @@ def descriptor_from_posted_form #:nodoc: text = descriptor_text_from_posted_form return nil unless text desc = BoutiquesSupport::BoutiquesDescriptor.new_from_string(text) rescue nil + + # Check for something bosh doesn't verify: input IDs mentioned in groups + # that do not exist + zap_it = false + (desc&.groups || []).each do |group| + members = group.members || [] + badid = members.detect { |inputid| (desc.input_by_id(inputid) rescue nil).nil? } + if badid + self.errors.add(:base, "The group '#{group.name}' has a member input id '#{badid}' which doesn't exist") + zap_it = true + end + end + desc = nil if zap_it + desc end @@ -131,7 +158,7 @@ def generate_validation_messages(desc) #:nodoc: rescue => ex return "Bosh validation failed: #{ex.class} #{ex.message}" ensure - File.unlink tmpfile + File.unlink(tmpfile) rescue nil end # Invokes bosh to generate a command preview. diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/views/_boutiques_preview.html.erb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/views/_boutiques_preview.html.erb index 3776dff98..92844ed13 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/views/_boutiques_preview.html.erb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/views/_boutiques_preview.html.erb @@ -26,6 +26,8 @@

+ () + ()

diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_edit_help.html.erb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_edit_help.html.erb index 0d3ee1746..c2b6649ee 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_edit_help.html.erb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_edit_help.html.erb @@ -59,6 +59,11 @@

<% end %> +<% if @descriptor.author.present? %> + Tool Author: <%= @descriptor.author %> +
+<% end %> + <% if @descriptor.tool_doi.present? %> Tool DOI: <%= link_to @descriptor.tool_doi, @descriptor.tool_doi, :target => '_blank', :class => 'action_link' %>
@@ -70,7 +75,7 @@ <% end %> <% if @descriptor.descriptor_url.present? %> - Boutiques Descriptor:: <%= link_to @descriptor.descriptor_url, @descriptor.descriptor_url, :target => '_blank', :class => 'action_link' %> + Boutiques Descriptor: <%= link_to @descriptor.descriptor_url, @descriptor.descriptor_url, :target => '_blank', :class => 'action_link' %>
<% end %> diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_show_params.html.erb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_show_params.html.erb index 5b2daa58a..845e9ebc6 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_show_params.html.erb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_show_params.html.erb @@ -63,7 +63,11 @@ <%= param.name %> - <%= format_param.(@task.invoke_params[param.id]) %> + <% if param.type == 'Flag' %> + <%= red_if(@task.invoke_params[param.id].to_s =~ /^1$|^true$/, "No", "Yes", { :color1 => 'red', :color2 => 'green' }) %> + <% else %> + <%= format_param.(@task.invoke_params[param.id]) %> + <% end %> <%- end -%> diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/bourreau/diagnostics.rb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/bourreau/diagnostics.rb index b1e7a2289..46cdf29c7 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/bourreau/diagnostics.rb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/bourreau/diagnostics.rb @@ -168,11 +168,11 @@ def cluster_commands #:nodoc: echo "" echo "==== Host Info ====" - uname -a - uptime + uname -a 2>/dev/null + uptime 2>/dev/null echo "" - if test -n "$(type lsb_release)" ; then + if test -n "$(type -p lsb_release)" ; then echo "==== LSB Release ====" lsb_release -a echo "" @@ -185,8 +185,8 @@ def cluster_commands #:nodoc: fi if test -e /proc/cpuinfo ; then - echo "==== Last CPU Info ====" - cat /proc/cpuinfo | perl -ne '@x=grep(/./,<>);unshift(@y,pop(@x)) while @x > 0 && $y[0] !~ /^processor/; END { print @y }' + echo "==== Compacted CPU Info ====" + cat /proc/cpuinfo | sort | uniq | grep -v -E 'apicid|^processor|core id' echo "" fi @@ -206,6 +206,10 @@ def cluster_commands #:nodoc: ls -la echo "" + echo "==== Listing Content of Work Directory With Dereferencing ====" + ls -laL + echo "" + _DIAGNOSTIC_COMMANDS_ file_ids.each do |id| diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/apptainer_image/apptainer_image.rb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/apptainer_image/apptainer_image.rb new file mode 100644 index 000000000..8e77a7eb6 --- /dev/null +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/apptainer_image/apptainer_image.rb @@ -0,0 +1,59 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This model represents a single file containing an Apptainer container image. +class ApptainerImage < FilesystemImage + + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + has_viewer :name => 'Image Info', :partial => :info, :if => :is_viewable? + + def self.file_name_pattern #:nodoc: + /\.s?img\z|\.sif\z/i + end + + def is_viewable? #:nodoc: + if ! self.has_apptainer_support? + return [ "The local portal doesn't support inspecting Apptainer images." ] + elsif ! self.is_locally_synced? + return [ "Apptainer image file not yet synchronized" ] + else + true + end + end + + def has_apptainer_support? #:nodoc: + self.class.has_apptainer_support? + end + + # Detects if the system has the 'apptainer' command. + # Caches the result in the class so it won't need to + # be detected again after the first time, for the life + # of the current process. + def self.has_apptainer_support? #:nodoc: + return @_has_apptainer_support if ! @_has_apptainer_support.nil? + out = IO.popen("bash -c 'type -p apptainer'","r") { |f| f.read } + @_has_apptainer_support = out.present? + end + +end + diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/apptainer_image/views/_info.html.erb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/apptainer_image/views/_info.html.erb new file mode 100644 index 000000000..f3c3b07f5 --- /dev/null +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/apptainer_image/views/_info.html.erb @@ -0,0 +1,47 @@ + +<% +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +%> + +<% path = @userfile.cache_full_path.to_s.bash_escape %> +<% cat = lambda { |com| IO.popen(com,"r") { |fh| fh.read } } %> + +

Image Inspect
+ +
<%= cat.("apptainer inspect #{path}") %>
+ +
SIF Header
+ +
<%= cat.("apptainer sif header #{path}") %>
+ +
SIF List
+ +
<%= text = cat.("apptainer sif list #{path}") %>
+ +
SIF Items
+ +<% text.split(/\n/).map { |line| line[/^(\d+)/] }.compact.each do |id| %> + +
<%= cat.("apptainer sif info #{id} #{path}") %>

+ +<% end %> + diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/cbrain_file_list.rb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/cbrain_file_list.rb index 62139f5d7..aebfca32d 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/cbrain_file_list.rb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/cbrain_file_list.rb @@ -96,7 +96,7 @@ def load_from_content(csv_file_content) # as extracted by cached_csv_array(). IDs will be numeric, or for # missing rows, will contain nils. IDs can be zero. # - # [ 12, 0, 45, nil nil, 433 ] + # [ 12, 0, 45, nil, nil, 433 ] # # Note that this method caches internally its result. To clear the # cache (if the userfile's content has changed for instance) call @@ -241,15 +241,14 @@ def flush_internal_caches # for the array of +userfiles+. nil entries are allowed in +userfiles+ # and will be properly encoded as missing rows with ID set to 0. def self.create_csv_file_from_userfiles(userfiles) - userfile_model_hash = Userfile.columns_hash text_rows = [] assoc_cache = {} userfiles.each do |userfile| row = [] if (userfile.nil?) - row = [0] + Array.new(ATTRIBUTES_LIST.size - 1, "") + row = [0] + Array.new(self::ATTRIBUTES_LIST.size - 1, "") else - ATTRIBUTES_LIST.each do |att| + self::ATTRIBUTES_LIST.each do |att| val = userfile.send(att) # attribute value in mode; can be an id of an assoc if att =~ /_id$/ # try to look up names in other models assoc_cache[[att,val]] ||= ( userfile.send(att.to_s.sub(/_id$/,"")).try(att == :user_id ? :login : :name) || "-") @@ -268,4 +267,33 @@ def self.create_csv_file_from_userfiles(userfiles) csv_file end + # This is like CbrainFileList.create!() but you must + # also provide :userfiles among the attributes; these + # userfiles will be stored as the content of the created + # CbrainFileList. + def self.create_with_userfiles!(attributes) + userfiles = attributes[:userfiles] || cb_error("Need some userfiles for CbrainFileList") + attlist = attributes.reject { |k,v| k.to_s == 'userfiles' } + cbfile = self.create!(attlist) + cbfile.set_userfiles(userfiles) + cbfile + end + + # Replace the content of the CbrainFileList with a new + # CbrainFileList representing +userfiles+. The content + # of the CSV will be immediately uploaded to the provider. + def set_userfiles(userfiles) + flush_internal_caches() + csv = self.class.create_csv_file_from_userfiles(userfiles) + self.cache_writehandle { |fh| fh.write csv } + self + end + + private + + # Can be redefine in sub class + def self.userfile_model_hash + Userfile.columns_hash + end + end diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/views/_cb_file_list.html.erb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/views/_cb_file_list.html.erb index fe73edb0b..cdfa2695b 100644 --- a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/views/_cb_file_list.html.erb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/views/_cb_file_list.html.erb @@ -90,7 +90,7 @@ <% val = cvs_row[idx] %> <% if att == :id %> - <% cur_file = Userfile.find_all_accessible_by_user(current_user).where(:id => val).first %> + <% cur_file = Userfile.find_all_accessible_by_user(current_user, :access_requested => :read).where(:id => val).first %> <%= val %> : <%= link_to_userfile_if_accessible(cur_file) %> <% else %> <%= val.nil? ? "-" : val %> diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/extended_cbrain_file_list/extended_cbrain_file_list.rb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/extended_cbrain_file_list/extended_cbrain_file_list.rb new file mode 100644 index 000000000..f23587b18 --- /dev/null +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/extended_cbrain_file_list/extended_cbrain_file_list.rb @@ -0,0 +1,131 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2012 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This class extand the attributes_list array used by the +# CbrainFileList by adding an extra key that correspond +# to the last column of the CSV file. +# +# Example of file content: +# +# 232123,"myfile.txt",425,"TextFile","MainStoreProvider","jsmith","mygroup","{extra_param_1: value_1}" +# 112233,"plan.pdf",3894532,"SingleFile","SomeDP","jsmith","secretproject", "{extra_param_2: value_2}" +# 0,,,,,,, +# 933,"hello.txt",3433434,"TextFile","SomeDP","jsmith","mygroup","{extra_param_3: value_3}" +# +class ExtendedCbrainFileList < CbrainFileList + + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + # Structure of the CSV file; only the ID is used when this object is used as input to something else. + # When displayed in a web page, the associations to other models are shown by name. + ATTRIBUTES_LIST = superclass::ATTRIBUTES_LIST + [ :json_params ] + + def self.pretty_type #:nodoc: + "Extended CBRAIN List of files" + end + + # Returns an hash extract from the last column of the Extended CBCsv file + # as extracted by cached_csv_array(). Value will be a hash (can be empty) + # + # [ {key_param_1_task_1: value_for_param_1_task_1, key_param_2_task_1: value_for_param_2_task_1}, + # {key_param_1_task_1: value_for_param_1_task_1}, + # {}, + # {}, + # {key_param_1_task_5: value_for_param_1_task_5, key_param_2_task_5: value_for_param_2_task_5} + # ] + # + # Note that this method caches internally its result. To clear the + # cache (if the userfile's content has changed for instance) call + # the method flush_internal_caches(). + # + def ordered_params() + json_params_idx = ATTRIBUTES_LIST.index(:json_params) + @extra_params ||= cached_csv_array.map do |row| + JSON.parse(row[json_params_idx]) + end + + @extra_params + end + + # Many methods of this class cache their result internally + # to avoid reduplicating costly work. If the content of + # the CSV file change, calling flush_internal_caches() will + # clean these caches so they return new, accurate results. + def flush_internal_caches + super + @extra_params = nil + end + + # ["a/b/c", "a/d/e", "x/y/z", "x/w/a"] + # return {"a" => ["a/b/c", "a/d/f"] + # "x" => ["x/y/z", "x/w/a"] + # } + def self.roots_to_fullpaths(relpaths) + # Special situation when a file with a path + # is specified instead of just a basename. + relpaths.inject({}) do |results,relpath| + filenames = Pathname.new(relpath).each_filename.to_a + # E.g: root == sub-123 + parent_dir = filenames.first + res = results[parent_dir] ||= [] + res << relpath if filenames.size != 1 + results + end + end + + # Add json_params reader method to userfile object + def self.extend_userfile_json_params_reader(userfile,json_params_value) + userfile.define_singleton_method(:json_params) { + json_params_value + } + end + + # Extend each userfile with json_params_reader + def self.extended_userfiles_by_name(userfiles,id_to_values) + userfiles.to_a.each do |userfile| + extend_userfile_json_params_reader(userfile,id_to_values[userfile.name]) + end + userfiles + end + + # Extended Userfile.columns_hash with json_params key + def self.userfile_model_hash + extended_userfile_model_hash = Userfile.columns_hash.dup + extended_userfile_model_hash["json_params"].define_singleton_method(:type) { :hash } + extended_userfile_model_hash + end + + # add json_params reader method to userfile object + def self.extend_userfile_json_params_reader(userfile,json_params_value) + userfile.define_singleton_method(:json_params) { + json_params_value + } + end + + # userfile_name => {Id: values} + def self.extended_userfiles_by_name(userfiles,id_to_values) + userfiles.to_a.each do |userfile| + extend_userfile_json_params_reader(userfile,id_to_values[userfile.name]) + end + userfiles + end +end diff --git a/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/squashfs_file/squashfs_file.rb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/squashfs_file/squashfs_file.rb new file mode 100644 index 000000000..ffb344be2 --- /dev/null +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/squashfs_file/squashfs_file.rb @@ -0,0 +1,62 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# Model for filesystem files in SquashFS format. +class SquashfsFile < SingleFile + + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + has_viewer :name => 'SquashFS Filesystem', :partial => :squashfs_file, :if => :is_viewable? + + def self.file_name_pattern #:nodoc: + /\.(sqs|squashfs|sqfs|sfs)\z/i + end + + def self.pretty_type #:nodoc: + "SquashFS Filesystem File" + end + + def is_viewable? #:nodoc: + if ! self.has_unsquashfs_support? + return [ "The local portal doesn't support inspecting SquashFS images." ] + elsif ! self.is_locally_synced? + return [ "The SquashFS image file is not yet synchronized" ] + else + true + end + end + + def has_unsquashfs_support? #:nodoc: + self.class.has_unsquashfs_support? + end + + # Detects if the system has the 'unsquashfs' command. + # Caches the result in the class so it won't need to + # be detected again after the first time, for the life + # of the current process. + def self.has_unsquashfs_support? #:nodoc: + return @_has_unsquashfs_support if ! @_has_unsquashfs_support.nil? + out = IO.popen("bash -c 'type -p unsquashfs'","r") { |f| f.read } + @_has_unsquashfs_support = out.present? + end + +end diff --git a/BrainPortal/app/views/service/support.html.erb b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/squashfs_file/views/_squashfs_file.html.erb similarity index 64% rename from BrainPortal/app/views/service/support.html.erb rename to BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/squashfs_file/views/_squashfs_file.html.erb index ded686517..fccb6e610 100644 --- a/BrainPortal/app/views/service/support.html.erb +++ b/BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/squashfs_file/views/_squashfs_file.html.erb @@ -3,7 +3,7 @@ # # CBRAIN Project # -# Copyright (C) 2008-2012 +# Copyright (C) 2008-2023 # The Royal Institution for the Advancement of Learning # McGill University # @@ -22,15 +22,15 @@ # -%> -<% title 'Service Support' %> +<% path = @userfile.cache_full_path.to_s.bash_escape %> +<% cat = lambda { |com| IO.popen(com,"r") { |fh| fh.read } } %> +<% max_list = 300 %> -

CBRAIN Basic Support

-
- <% if @support[:helpUrl].present? %> -
Help URL:
<%= link_to 'Support', @support[:helpUrl] %> (<%= @support[:helpUrl] %>)
- <% end %> -
Email
<%= mail_to(@support[:supportEmail]) %>
-
Information
<%= link_to 'About Us', @support[:aboutUs] %>
-
+
SquashFS Superblock Information
+
<%= cat.("unsquashfs -p 1 -s #{path}") %>
+ +
SquashFS File List (first <%= max_list %> entries only)
+ +
<%= cat.("unsquashfs -p 1 -lls #{path} | head -#{max_list}") %>
diff --git a/BrainPortal/config/console_rc/lib/interactive_bourreau_control.rb b/BrainPortal/config/console_rc/lib/interactive_bourreau_control.rb index ada09bed3..d791343b0 100644 --- a/BrainPortal/config/console_rc/lib/interactive_bourreau_control.rb +++ b/BrainPortal/config/console_rc/lib/interactive_bourreau_control.rb @@ -20,6 +20,8 @@ # along with this program. If not, see . # +require 'reline' # Readline.get_screen_size fails me + # We need some sort of constant to refer to the console's # context, which has access to all the pretty helpers etc. ConsoleCtx = self # also in pretty_view.rb in the same directory @@ -56,7 +58,7 @@ def initialize(bourreaux_list = Bourreau.order(:id).all, term_width = nil) @bourreaux = bourreaux_list @width = term_width if term_width.blank? || term_width.to_i < 1 - _,numcols = Readline.get_screen_size rescue [25,120] + _,numcols = Reline.get_screen_size rescue [25,120] @width = numcols end @selected = {} @@ -113,7 +115,7 @@ def interactive_control(initial_command = nil) OPERATIONS userinput = initial_command.presence - userinput ||= Readline.readline("Do something (h for help): ",false) + userinput ||= Reline.readline("Do something (h for help): ",false) userinput = "Q" if userinput.nil? inputkeywords = userinput.downcase.split(/\W+/).map(&:presence).compact @@ -124,7 +126,7 @@ def interactive_control(initial_command = nil) end puts "" if dowait && initial_command.blank? - Readline.readline("Press RETURN to continue: ",false) + Reline.readline("Press RETURN to continue: ",false) puts "" end initial_command = nil @@ -320,7 +322,12 @@ def process_user_letter(letter) #:nodoc: uptime &&= ConsoleCtx.send(:pretty_elapsed, uptime, :num_components => 2) uptime &&= "up for #{uptime}" uptime ||= "DOWN" - printf "%#{max_size}s rev %-9.9s %s, %d/%d workers\n", bou.name, gitrev, uptime, numworkers, expworkers + color_on = color_off = nil + color_on = "\e[31m" if uptime == 'DOWN' # RED for down bourreaux + color_on ||= "\e[33m" if numworkers != expworkers # YELLOW for missing workers + color_on ||= "\e[32m" # GREEN when everything ok + color_off = "\e[0m" if color_on + printf "#{color_on}%#{max_size}s rev %-9.9s %s, %d/%d workers#{color_off}\n", bou.name, gitrev, uptime, numworkers, expworkers end end return true @@ -352,7 +359,7 @@ def process_user_letter(letter) #:nodoc: puts " * @r@ will be substituted by the Bourreau's RAILS root path" puts " * @d@ will be substituted by the Bourreau's DP cache dir path" puts " * @g@ will be substituted by the Bourreau's gridshare dir path" - comm = Readline.readline("Bash command: ") + comm = Reline.readline("Bash command: ") bash_command_on_bourreaux(comm) return true end diff --git a/BrainPortal/config/console_rc/lib/reports.rb b/BrainPortal/config/console_rc/lib/reports.rb index 97dccd0d5..1e674fd12 100644 --- a/BrainPortal/config/console_rc/lib/reports.rb +++ b/BrainPortal/config/console_rc/lib/reports.rb @@ -54,6 +54,11 @@ def acttasks(tasks = CbrainTask.active.all) result end + # Remove column c_types if it's empty everywhere + if list1.all? { |struct| struct[:c_types].blank? } + list1.each { |struct| struct.delete(:c_types) } + end + # Remove duplicates from list1 and count them seen={} list2 = list1.select { |r| seen[r] ||= 0 ; seen[r] += 1 ; seen[r] == 1 } diff --git a/BrainPortal/config/initializers/cbrain.rb b/BrainPortal/config/initializers/cbrain.rb index 8a7391f74..478881eaa 100644 --- a/BrainPortal/config/initializers/cbrain.rb +++ b/BrainPortal/config/initializers/cbrain.rb @@ -95,6 +95,9 @@ def self.spawn_with_active_records(destination = nil, taskname = 'Internal Backg reader,writer = IO.pipe # The stream that we use to send the subchild's pid to the parent childpid = Kernel.fork do + # Need to properly tell MySQL/MariaDB that we're disconnecting + ApplicationRecord.connection.disconnect! rescue nil + # Child code starts here reader.close # Not needed in the child! @@ -198,6 +201,9 @@ def self.spawn_fully_independent(taskname = 'Independent Background Task') reader,writer = IO.pipe # The stream that we use to send the subchild's pid to the parent childpid = Kernel.fork do + # Need to properly tell MySQL/MariaDB that we're disconnecting + ApplicationRecord.connection.disconnect! rescue nil + # Child code starts here reader.close # Not needed in the child! diff --git a/BrainPortal/config/initializers/cors.rb b/BrainPortal/config/initializers/cors.rb new file mode 100644 index 000000000..4420121d9 --- /dev/null +++ b/BrainPortal/config/initializers/cors.rb @@ -0,0 +1,12 @@ + +Rails.application.config.middleware.insert_before 0, Rack::Cors do + allow do + # This is a dummy configuration. + # Adjust as needed. + origins 'https://example.com:8888' + resource '/doesnotexist', + :headers => :any, + :methods => [:get] + end +end + diff --git a/BrainPortal/config/initializers/validation_portal.rb b/BrainPortal/config/initializers/validation_portal.rb index 1df6e1d5b..8244286fc 100644 --- a/BrainPortal/config/initializers/validation_portal.rb +++ b/BrainPortal/config/initializers/validation_portal.rb @@ -83,8 +83,8 @@ # # Rake Exceptions By First Argument # - skip_validations_for = [ /^db:/, /^cbrain:plugins/, /^cbrain:test/, /^route/, /^assets/, /^cbrain:nagios/ ] - first_arg = ARGV.detect { |x| x =~ /^[\w:]+$/i } # first thing that looks like abc:def:ghi + skip_validations_for = [ /^db:/, /^cbrain:plugins/, /^cbrain:test/, /^route/, /^assets/, /^cbrain:nagios/, /^cbrain:boutiques:rewrite/ ] + first_arg = ARGV.detect { |x| x =~ /^[\w:]+/i } # first thing that looks like abc:def:ghi first_arg ||= '(none)' if skip_validations_for.any? { |p| first_arg =~ p } #------------------------------------------------------------------------------ diff --git a/BrainPortal/config/routes.rb b/BrainPortal/config/routes.rb index 9b0605940..1a283d135 100644 --- a/BrainPortal/config/routes.rb +++ b/BrainPortal/config/routes.rb @@ -2,7 +2,7 @@ # # CBRAIN Project # -# Copyright (C) 2008-2012 +# Copyright (C) 2008-2023 # The Royal Institution for the Advancement of Learning # McGill University # @@ -41,7 +41,11 @@ resources :custom_filters, :except => [ :index ] resources :tags, :except => [ :new, :edit ] resources :access_profiles, :except => [ :edit ] - resources :disk_quotas, :only => [ :new, :index, :show, :create, :destroy, :update ] + resources :disk_quotas, :only => [ :new, :index, :show, :create, :destroy, :update ] do + collection do + get 'report' + end + end # Standard CRUD resources, with extra actions @@ -111,10 +115,13 @@ get 'report' post 'report' post 'repair' + post 'check_personal' end collection do get 'dp_access' get 'dp_transfers' + get 'new_personal' + post 'create_personal' end end @@ -195,6 +202,7 @@ get '/about_us' => 'portal#about_us' get '/available' => 'portal#available' get '/search' => 'portal#search' + get '/stats' => 'portal#stats' get '/login' => 'sessions#new' get '/logout' => 'sessions#destroy' get '/session_status' => 'sessions#show' @@ -258,26 +266,6 @@ - #################################################################################### - # Service; most of these actions are only needed - # for the CANARIE monitoring system, and are therefore - # shipped disabled by default, because it's not needed - # anywhere else. - #################################################################################### - #get '/platform/info', :controller => :service, :action => :info - #get '/platform/stats', :controller => :service, :action => :stats - #get '/platform/detailed_stats', :controller => :service, :action => :detailed_stats - #get '/platform/doc', :controller => :service, :action => :doc - #get '/platform/releasenotes', :controller => :service, :action => :releasenotes - #get '/platform/support', :controller => :service, :action => :support - #get '/platform/source', :controller => :service, :action => :source - #get '/platform/tryme', :controller => :service, :action => :tryme - #get '/platform/licence', :controller => :service, :action => :licence - #get '/platform/provenance', :controller => :service, :action => :provenance - #get '/platform/factsheet', :controller => :service, :action => :factsheet - - - #################################################################################### # NeuroHub routes #################################################################################### diff --git a/BrainPortal/data_dumps/README.md b/BrainPortal/data_dumps/README.md index 50bebafd8..09bde1c62 100644 --- a/BrainPortal/data_dumps/README.md +++ b/BrainPortal/data_dumps/README.md @@ -24,5 +24,33 @@ resources that no longer exist. The `reload` task requires a timestamp in argument (e.g. `2021-12-31T120856`). +### Standard regular maintenance + +On a system with a large amount of activity, a regular cleanup +of the ResourceUsage table is necessary. The process is performed +in two steps: + +First dump all resource usage objects that refer to objects +no longer existing in the database, and remove them from +the database: + +``` + RAILS_ENV=something rake cbrain:resource_usage:dump[DESTROY_ALL,no] +``` + +Second, re-insert monthly summaries of all removed records so that +total historical usage by users is maintained: + +``` + RAILS_ENV=something rake cbrain:resource_usage:monthly[All] +``` + +Note that this last step will re-create all monthly summaries +cumulatively using the info in all previous YAML dumps. This +rake task can safely be run multiple times, it will not duplicate +summary information. + +### See also + See the file [BrainPortal/lib/tasks/resource_usage_serialization.rake](https://github.com/aces/cbrain/blob/master/BrainPortal/lib/tasks/resource_usage_serialization.rake) for more information about the rake tasks. diff --git a/BrainPortal/db/migrate/20221007094232_remove_old_remote_ressources_attributes.rb b/BrainPortal/db/migrate/20221007094232_remove_old_remote_ressources_attributes.rb new file mode 100644 index 000000000..53c5d7fae --- /dev/null +++ b/BrainPortal/db/migrate/20221007094232_remove_old_remote_ressources_attributes.rb @@ -0,0 +1,7 @@ +class RemoveOldRemoteRessourcesAttributes < ActiveRecord::Migration[5.0] + def change + remove_column :remote_resources, :nh_email_delivery_options, :text, :after => :nh_system_from_email + remove_column :remote_resources, :tunnel_mysql_port, :integer + remove_column :remote_resources, :tunnel_actres_port, :integer + end +end diff --git a/BrainPortal/db/migrate/20230304184206_add_short_task_workdir_to_tool_configs.rb b/BrainPortal/db/migrate/20230304184206_add_short_task_workdir_to_tool_configs.rb new file mode 100644 index 000000000..d8a023571 --- /dev/null +++ b/BrainPortal/db/migrate/20230304184206_add_short_task_workdir_to_tool_configs.rb @@ -0,0 +1,28 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +class AddShortTaskWorkdirToToolConfigs < ActiveRecord::Migration[5.0] + def change + add_column :tool_configs, :singularity_use_short_workdir, :boolean, + :default => false, :null => false, :after => :singularity_overlays_specs + end +end diff --git a/BrainPortal/db/migrate/20230418205141_add_unique_sync_status_index.rb b/BrainPortal/db/migrate/20230418205141_add_unique_sync_status_index.rb new file mode 100644 index 000000000..3b4c59691 --- /dev/null +++ b/BrainPortal/db/migrate/20230418205141_add_unique_sync_status_index.rb @@ -0,0 +1,10 @@ +class AddUniqueSyncStatusIndex < ActiveRecord::Migration[5.0] + def up + remove_index :sync_status, [ :userfile_id, :remote_resource_id ] + add_index :sync_status, [ :userfile_id, :remote_resource_id ], :unique => true + end + + def down + remove_index :sync_status, [ :userfile_id, :remote_resource_id ] + end +end diff --git a/BrainPortal/db/schema.rb b/BrainPortal/db/schema.rb index 3ce3e9c33..bdb3f4dc2 100644 --- a/BrainPortal/db/schema.rb +++ b/BrainPortal/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 20220913183448) do +ActiveRecord::Schema.define(version: 20230418205141) do create_table "access_profiles", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci" do |t| t.string "name", null: false @@ -263,8 +263,6 @@ t.string "ssh_control_host" t.integer "ssh_control_port" t.string "ssh_control_rails_dir" - t.integer "tunnel_mysql_port" - t.integer "tunnel_actres_port" t.string "cache_md5" t.boolean "portal_locked", default: false, null: false t.integer "cache_trust_expire", default: 0 @@ -290,7 +288,6 @@ t.text "email_delivery_options", limit: 65535 t.string "nh_support_email" t.string "nh_system_from_email" - t.text "nh_email_delivery_options", limit: 65535 t.string "external_status_page_url" t.string "docker_executable_name" t.string "singularity_executable_name" @@ -411,7 +408,7 @@ t.datetime "accessed_at" t.datetime "synced_at" t.index ["remote_resource_id"], name: "index_sync_status_on_remote_resource_id", using: :btree - t.index ["userfile_id", "remote_resource_id"], name: "index_sync_status_on_userfile_id_and_remote_resource_id", using: :btree + t.index ["userfile_id", "remote_resource_id"], name: "index_sync_status_on_userfile_id_and_remote_resource_id", unique: true, using: :btree t.index ["userfile_id"], name: "index_sync_status_on_userfile_id", using: :btree end @@ -441,12 +438,12 @@ create_table "tool_configs", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci" do |t| t.string "version_name" - t.text "description", limit: 65535 + t.text "description", limit: 65535 t.integer "tool_id" t.integer "bourreau_id" - t.text "env_array", limit: 65535 - t.text "script_prologue", limit: 65535 - t.text "script_epilogue", limit: 65535 + t.text "env_array", limit: 65535 + t.text "script_prologue", limit: 65535 + t.text "script_epilogue", limit: 65535 t.datetime "created_at" t.datetime "updated_at" t.integer "group_id" @@ -463,9 +460,10 @@ t.string "containerhub_image_name" t.string "container_engine" t.string "container_index_location" - t.text "singularity_overlays_specs", limit: 65535 + t.text "singularity_overlays_specs", limit: 65535 + t.boolean "singularity_use_short_workdir", default: false, null: false t.string "container_exec_args" - t.boolean "inputs_readonly", default: false + t.boolean "inputs_readonly", default: false t.string "boutiques_descriptor_path" t.index ["bourreau_id"], name: "index_tool_configs_on_bourreau_id", using: :btree t.index ["tool_id"], name: "index_tool_configs_on_tool_id", using: :btree diff --git a/BrainPortal/lib/boutiques_boot_integrator.rb b/BrainPortal/lib/boutiques_boot_integrator.rb index fcd3fe1de..6b3dd9b49 100644 --- a/BrainPortal/lib/boutiques_boot_integrator.rb +++ b/BrainPortal/lib/boutiques_boot_integrator.rb @@ -95,7 +95,7 @@ def self.link_from_json_file(path) # This method scans a directory for JSON boutiques descriptors and # loads them all. def self.link_all(dir = CBRAIN::BoutiquesDescriptorsPlugins_Dir) - jsons=Dir.glob(Pathname.new(dir) + "*.json") + jsons=Dir.glob(Pathname.new(dir) + "*.json").sort jsons.each do |json| self.link_from_json_file(json) end diff --git a/BrainPortal/lib/boutiques_collection_basenames_list_maker.rb b/BrainPortal/lib/boutiques_collection_basenames_list_maker.rb new file mode 100644 index 000000000..81e140e79 --- /dev/null +++ b/BrainPortal/lib/boutiques_collection_basenames_list_maker.rb @@ -0,0 +1,98 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This module will extract pattern based on a regex +# specified in the descriptor by: +# +# "cbrain:integrator_modules": { +# "BoutiquesCollectionBasenamesListMaker": [ "input_id", "FileCollection", "A regex to extract prefix from the file name" ] +# } +# +# It is possible to specify a FileCollection type in order to know on which +# input file to apply the regex. +# +# The extracted names will populate the input with value-choices +# according to what was extracted. +# +# Then one task will be created for each choice. +# +# *Note*: The usage of this module is not usable with a Cbcsv file. +# If it will be used with a Cbcsv file, the Cbcsv file will be the input file +# and one task will be created by sample. +# +# Should be usable with ExtendedCbcsv file though. +# +module BoutiquesCollectionBasenamesListMaker + + # Note: to access the revision info of the module, + # you need to access the constant directly, the + # object method revision_info() won't work. + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + # If more than one choice is detected in the FileCollection, + # the input_id option will be populated with prefix + # extracted based on pattern specified by the regex in the descriptor. + def descriptor_for_form #:nodoc: + descriptor = super.dup() + input_id, type, regex_string, = descriptor.custom_module_info('BoutiquesCollectionBasenamesListMaker') + + # Get the userfile_ids from the params + # Return immediately if there is no exactly one file. + userfile_ids = params["interface_userfile_ids"] || [] + type_class = type.constantize + userfiles = Userfile.find(userfile_ids).select{|x| x.is_a?(type_class)} + return descriptor if userfiles.count != 1 + + # Fill the input with the list of prefix + regex = Regexp.new(regex_string) + userfile = userfiles.first + file_names = userfile.provider_collection_index(:top, :regular).map(&:name) + file_names.map! {|x| Pathname.new(x).basename.to_s } + input = descriptor.input_by_id(input_id) + + input["value-choices"] = file_names.map do |f_n| + f_n.match(regex) && Regexp.last_match[1] + end.compact.uniq + + descriptor + end + + # One task will be created by value of + # the input specified in the descriptor. + def final_task_list #:nodoc: + descriptor = self.descriptor_for_final_task_list + input_id, _, _ = descriptor.custom_module_info('BoutiquesCollectionBasenamesListMaker') + + params_values = self.invoke_params[input_id] + return super if params_values.blank? || params_values.size == 1 + + # Create one task for each value + params_values.map do |value| + task = self.dup + task.description = task.description || "" + task.description += "\n\nRun with value: #{value}, for input #{input_id}." + task.invoke_params[input_id] = [value] + task + end + + end + end diff --git a/BrainPortal/lib/boutiques_ext3_capturer.rb b/BrainPortal/lib/boutiques_ext3_capturer.rb new file mode 100644 index 000000000..9b9bf15fa --- /dev/null +++ b/BrainPortal/lib/boutiques_ext3_capturer.rb @@ -0,0 +1,65 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This module adds automatic setting up of mounted +# ext3 filesystem as subdirectories of a task, provided +# the tool works in Singularity/Apptainer. +# It is the exact equivalent of adding an ext3 overlay +# configuration entry in the task's tool config. +# +# To include the module automatically at boot time +# in a task integrated by Boutiques, add a new entry +# in the 'custom' section of the descriptor, like this: +# +# "custom": { +# "cbrain:integrator_modules": { +# "BoutiquesExt3Capturer": { +# "work": "50g", +# "tmpdir": "20m" +# } +# } +# } +# +module BoutiquesExt3Capturer + + # Note: to access the revision info of the module, + # you need to access the constant directly, the + # object method revision_info() won't work. + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + # Override the default behavior by adding new entries directly + # from the descriptor. + def ext3capture_basenames + # Get standard list as described in tool config + initial_list = super.dup # [ [ basename, size], [basename, size], ... ] + + # Get values in descriptor, as a hash + descriptor = self.descriptor_for_cluster_commands + ext3_specs = descriptor.custom_module_info('BoutiquesExt3Capturer') + + # Append our own entries; note that duplications of basenames + # will mean only the first entry is used! + initial_list + ext3_specs.to_a # the .to_a transforms the hash into an array of pairs. + end + +end + diff --git a/BrainPortal/lib/boutiques_forced_output_browse_path.rb b/BrainPortal/lib/boutiques_forced_output_browse_path.rb index 383d2b675..e86f7980c 100644 --- a/BrainPortal/lib/boutiques_forced_output_browse_path.rb +++ b/BrainPortal/lib/boutiques_forced_output_browse_path.rb @@ -75,8 +75,25 @@ def name_and_type_for_output_file(output, pathname) config = descriptor.custom_module_info('BoutiquesForcedOutputBrowsePath') browse_path = config[output.id] # "a/b/c" return [ name, type ] if browse_path.blank? # no configured browse_path for this output + browse_path = apply_value_keys(browse_path) # replaces [XYZ] strings with values from params combined = (Pathname.new(browse_path) + name).to_s # "a/b/c/name" [ combined, type ] end + # Returns a modified version of browse_path where the + # substrings [XYZ] are replaced by the value-keys of + # the invoke structure. + def apply_value_keys(browse_path) + descriptor = self.descriptor_for_save_results + + # Prepare the substitution hash + substitutions_by_token = descriptor.build_substitutions_by_tokens_hash( + JSON.parse(File.read(self.invoke_json_basename)) + ) + + new_browse_path = descriptor.apply_substitutions(browse_path, substitutions_by_token) + + new_browse_path + end + end diff --git a/BrainPortal/lib/boutiques_input_value_fixer.rb b/BrainPortal/lib/boutiques_input_value_fixer.rb new file mode 100644 index 000000000..dee765ea2 --- /dev/null +++ b/BrainPortal/lib/boutiques_input_value_fixer.rb @@ -0,0 +1,157 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2024 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This module allows one to fix some of input parameters to specific constand values +# The fixed input(s) would no longer be shown to the user in the form. +# The optional inputs assigned null value will be removed +# (do not use with mandatory input parameters) +# +# In the descriptor, the spec would look like: +# +# "custom": { +# "cbrain:integrator_modules": { +# "BoutiquesInputValueFixer": { +# "n_cpus": 1, +# "mem": "4G", +# "optional_custom_query": null, +# "level": "group" +# } +# } +# } +# +# Our main use case is resource related parameter which seldom participate +# in dependencies and constraints. +# Therefore we remove parameters from the form in a straightforward fashion +# and do not address indirect or transitive dependencies. For instance, +# if say i1-requires->i2-requires->i3 while i2 is deleted, dependency +# of i3 on i1 no longer be reflected in web form UI dynamically +module BoutiquesInputValueFixer + + # Note: to access the revision info of the module, + # you need to access the constant directly, the + # object method revision_info() won't work. + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + + # the hash of input parameter values to be fixed or, if value is null, to be omited + def fixed_values + self.boutiques_descriptor.custom_module_info('BoutiquesInputValueFixer') + end + + # deletes fixed inputs listed in the custom 'integrator_modules' + def descriptor_without_fixed_inputs(descriptor) + # input parameters are marked by null values will be excluded from the command line + # other will be given fixed values during execution; neither should appear in web form UI + + fixed_input_ids = fixed_values.keys + descriptor_dup = descriptor.dup + fully_removed = fixed_input_ids.select do |i_id| # this variables are flagged to be removed rather than assigned value + # in the spec, so they will be treated slightly different + input = descriptor_dup.input_by_id(i_id) + value = fixed_values[i_id] + value.nil? || (input.type == 'Flag') && (value.presence.to_s.strip =~ /0|null|false/i || value.blank?) + end + + # generally speaking, boutiques input groups can have three different constraints, + # here we address 1) mutually exclusive constraint, which is the only one present in GUI javascript (the rest are evaluated + # after submission of the form), 2) 'one is required' constraint that affect the initial rendering of the form + # ( though IMHO red stars or other indicators to draw user attention should eventually implemented ) + + descriptor_dup.groups.each do |g| # filter groups, relax restriction to ensure that form can still be submitted + members = g.members - fixed_input_ids + + # some actions at least some group members are actually assigned vals rather than deleted + if (fixed_input_ids & g.members - fully_removed).present? # + # since one input parameter is already selected permanently (fixed), + # we can drop one_is_required constraint + g.one_is_required = false # as result group's checkbox is unselected in form rendering + + # as one of mutually exclusive parameters is selected by setting a fixed value + # the rest of group should be disabled, no remaining + # Whenever deleting all remaining parameters of the group is preferred to disabling + # boutiques author/admin can modify the list of fixed values accordingly + block_inputs(descriptor_dup, members) if g.mutually_exclusive + g.mutually_exclusive = false # will make form's javascript smaller/faster + + # all-or-none constraint is seldom used, does not affect form itself, + # and only validated after the form submission + # and generally presents less pitfalls + # Therefore, at the moment, 'all or none' constraint is not addressed here + + end + g.members = members + end + + # remove empty groups + descriptor_dup.groups = descriptor_dup.groups.select {|g| g.members.present? } + + # delete fixed inputs + descriptor_dup.inputs = descriptor_dup.inputs.select { |i| ! fixed_values.key?(i.id) } # filter out fixed inputs + + # straight-forward delete of fixed inputs from dependencies. + # Indirect and transitive dependencies may be lost for UI + # but will be validated after form submission + descriptor_dup.inputs.each do |i| + i.requires_inputs = i.requires_inputs - fixed_input_ids if i.requires_inputs.present? + i.disables_inputs = i.disables_inputs - fixed_input_ids if i.disables_inputs.present? + i.value_requires.each { |v, a| i.value_requires[v] -= fixed_input_ids } if i.value_requires.present? + i.value_disables.each { |v, a| i.value_disables[v] -= fixed_input_ids } if i.value_disables.present? + end + + descriptor_dup + end + + # this blocks an input parameter by 'self-disabling', rather than explicitly deleting it + # it is a bit unorthodox yet expected to be used seldom + def block_inputs(descriptor, input_ids) + input_ids.each do |input_id| + input = descriptor.input_by_id(input_id) rescue next + input.disables_inputs ||= [] + input.disables_inputs |= [input_id] + input.name += " ( unavailable )" + end + end + + # adjust descriptor to allow check the number of supplied files + def descriptor_for_before_form + descriptor_without_fixed_inputs(super) + end + + # prevent from showing/submitting fixed inputs in the form + def descriptor_for_form + descriptor_without_fixed_inputs(super) + end + + # show all the params + def descriptor_for_show_params + self.invoke_params.merge!(fixed_values) # shows 'fixed' parameters, user would not be able to edit them + super # standard values + end + + # validation step - the original boutiques with combined invocation, for the greatest accuracy + # note, error messages might involve fixed variables + def after_form + self.invoke_params.merge!(fixed_values.compact) # put back fixed values into invocation, if needed + super # Performs standard processing + end + +end diff --git a/BrainPortal/lib/boutiques_output_cache_cleaner.rb b/BrainPortal/lib/boutiques_output_cache_cleaner.rb index 0e1311e82..24f2dacf1 100644 --- a/BrainPortal/lib/boutiques_output_cache_cleaner.rb +++ b/BrainPortal/lib/boutiques_output_cache_cleaner.rb @@ -57,12 +57,16 @@ def descriptor_with_special_input(descriptor) new_input = BoutiquesSupport::Input.new( "name" => "Enable Output Cache Cleaning", "id" => "cbrain_enable_output_cache_cleaner", - "description" => "If set, the cached content of produced outputs are erased when the task completes successfuly.", + "description" => <<-DESC, + If set, the cached content of produced outputs are erased when the task completes successfully. + This does not affect the actual outputs of the task, only their cached content on the execution server. + Turn off this option only if you need a copy of the outputs on the server, e.g. for further processing. + DESC "type" => "Flag", "optional" => false, "default-value" => true, ) - descriptor.inputs <<= new_input + descriptor.inputs << new_input # Add new group with that input groups = descriptor.groups || [] @@ -76,7 +80,7 @@ def descriptor_with_special_input(descriptor) ) groups << cb_mod_group end - cb_mod_group.members <<= new_input.id + cb_mod_group.members << new_input.id descriptor.groups = groups descriptor diff --git a/BrainPortal/lib/boutiques_post_processing_cleaner.rb b/BrainPortal/lib/boutiques_post_processing_cleaner.rb index a0f75272e..fd1f630b2 100644 --- a/BrainPortal/lib/boutiques_post_processing_cleaner.rb +++ b/BrainPortal/lib/boutiques_post_processing_cleaner.rb @@ -37,10 +37,17 @@ # "work", # "*.tmp", # "[OUTFILE_NAME].*.work" -# [ +# ] # } # } # +# This module will also erase EXT3 capture filesystems created by CBRAIN +# if the basename of the filesystem, as configured in the ToolConfig, matches +# one of the entries in this module's configuration. So in the code above, +# the content of the file ".capt_work.ext3" would also be erased if a capture +# filesystem was configured for "work". Patterns are not supported for +# this feature. +# module BoutiquesPostProcessingCleaner # Note: to access the revision info of the module, @@ -81,6 +88,15 @@ def save_results end end + # Also erase ext3 catpure files IF they match one of the patterns + ext3capture_basenames.each do |basename, _| + next unless patterns.include?(basename) # must be exact match, e.g. 'work' == 'work' + fs_name = ".capt_#{basename}.ext3" # e.g. .capt_work.ext3, see also in cluster_task.rb + next unless File.file?(fs_name) + self.addlog("Cleaning up EXT3 capture filesystem '#{fs_name}' in work directory") + File.delete(fs_name) rescue nil + end + true end diff --git a/BrainPortal/lib/boutiques_save_std_out_std_err.rb b/BrainPortal/lib/boutiques_save_std_out_std_err.rb new file mode 100644 index 000000000..83933ba68 --- /dev/null +++ b/BrainPortal/lib/boutiques_save_std_out_std_err.rb @@ -0,0 +1,152 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This module allow to save the stdout and stderr files of a Boutiques task +# +# To use this module, you need to add the following lines in the descriptor: +# "cbrain:integrator_modules": { +# "BoutiquesSaveStdOutStdErr": { +# "stdout_output_dir": "", +# "stderr_output_dir": "path/to/dir" +# } +# } +# +# In case of a MultilevelSshDataProvider the "path/to/dir" will be use to save the output. +# In case of a no MultilevelSshDataProvider the "path/to/dir" will be ignored. +# +# The value of the key "stdout_output_dir" and "stderr_output_dir" can be set to an empty string, +# in this situation the files will be saved directly in the root folder of the DataProvider. +# +# The output files will be saved as a LogFile with the name: -.std(out|err) +# +module BoutiquesSaveStdOutStdErr + + # Note: to access the revision info of the module, + # you need to access the constant directly, the + # object method revision_info() won't work. + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + # This method overrides the one in BoutiquesClusterTask. + # Save the stdout and stderr files of the task as output files. + # The files will be saved as a child of the first input file. + def save_results + # Get the folder where to save the log files from the descriptor + descriptor = self.descriptor_for_save_results + module_info = descriptor.custom_module_info('BoutiquesSaveStdOutStdErr') + + # Get parent file to set stderr and stdout as children of first input file + main_input_id = descriptor.file_inputs.first.id + file_id = self.invoke_params[main_input_id] + parent_file = Userfile.find(file_id) + + # Save stdout + science_stdout_basename = science_stdout_basename(self.run_number) + save_stdout_basename = (Pathname.new(module_info["stdout_output_dir"]) + + "#{self.pretty_type}-#{self.bname_tid_dashed}.stdout").to_s + stdout_file = save_log_file(science_stdout_basename, save_stdout_basename, parent_file) + self.params["_cbrain_output_cbrain_stdout"] = [stdout_file.id] if stdout_file + + # Save stderr + science_stderr_basename = science_stderr_basename(self.run_number) + save_stderr_basename = (Pathname.new(module_info["stderr_output_dir"]) + + "#{self.pretty_type}-#{self.bname_tid_dashed}.stderr").to_s + stderr_file = save_log_file(science_stderr_basename, save_stderr_basename, parent_file) + self.params["_cbrain_output_cbrain_stderr"] = [stderr_file.id] if stderr_file + + self.save + + super + end + + # Add the stdout and stderr files to the descriptor + # for the show page of the task. + def descriptor_for_show_params #:nodoc: + descriptor = super.dup + + stdout_file = BoutiquesSupport::OutputFile.new({ + "id" => "cbrain_stdout", + "name" => "Standard output", + "description" => "Standard output of the tool", + "optional" => true + }) + + stderr_file = BoutiquesSupport::OutputFile.new({ + "id" => "cbrain_stderr", + "name" => "Standard error", + "description" => "Standard error of the tool", + "optional" => true + }) + + descriptor["output-files"] << stdout_file if !descriptor.output_files.any? { |f| f.id == "cbrain_stdout" } + descriptor["output-files"] << stderr_file if !descriptor.output_files.any? { |f| f.id == "cbrain_stderr" } + + descriptor + end + + private + + # Returns a Userfile object, prepared with a browse_path if necessary. + # To do that it can override the attlist to add a browse_path and then + # call the standard safe_userfile_find_or_new() method. + def safe_logfile_find_or_new(klass, attlist) + name = attlist[:name] + return safe_userfile_find_or_new(klass, attlist) if ! (name.include? "/") # if there is no relative path, just do normal stuff + + # Find all the info we need + attlist = attlist.dup + dp_id = attlist[:data_provider_id] || self.results_data_provider_id + dp = DataProvider.find(dp_id) + pn = Pathname.new(name) # "a/b/c/hello.txt" + + # Make adjustements to name and browse_path + attlist[:name] = pn.basename.to_s # "hello.txt" + if dp.has_browse_path_capabilities? + attlist[:browse_path] = pn.dirname.to_s # "a/b/c" + self.addlog "BoutiquesSaveStdErrOut: result DataProvider browse_path for Stderr and Stdout will be '#{pn.dirname}'" + else + attlist[:browse_path] = nil # ignore the browse_path + self.addlog "BoutiquesSaveStdErrOut: result DataProvider doesn't have multi-level capabilities, ignoring forced browse_path for Stderr and Stdout '#{pn.dirname}'." + end + + # Invoke the standard code + return safe_userfile_find_or_new(klass, attlist) + end + + # Save the log with original_file_path to filename as + # a child of parent_file on the results data provider. + def save_log_file(original_file_path, filename, parent_file) #:nodoc: + self.addlog("Saving log file #{filename}") + file = safe_logfile_find_or_new(LogFile, :name => filename) + + if ! file.save + self.addlog("Could not save back log file #{filename}") + return nil + end + + file.cache_copy_from_local_file(original_file_path) + file.move_to_child_of(parent_file) + self.addlog("Saved log file #{filename}") + + file + end +end + diff --git a/BrainPortal/lib/boutiques_support.rb b/BrainPortal/lib/boutiques_support.rb index 670634031..2b51c5426 100644 --- a/BrainPortal/lib/boutiques_support.rb +++ b/BrainPortal/lib/boutiques_support.rb @@ -60,6 +60,13 @@ def self.validate(json) Group = Class.new(RestrictedHash) { allowed_keys group_prop_names } ContainerImage = Class.new(RestrictedHash) { allowed_keys cont_prop_names } + # Adds a comparison operator to these subobjects so that + # they can be sorted. + # See also Hash.resorted in the CBRAIN core extensions. + [ Input, OutputFile, Group ].each do |klass| + klass.send(:define_method, :'<=>') { |other| self.id <=> other.id } + end + def initialize(hash={}) super(hash) # The following re-assignment transforms hashed into subobjects (like OutputFile etc) @@ -68,7 +75,7 @@ def initialize(hash={}) self.output_files = self.output_files || [] self.groups = self.groups || [] self.custom = self.custom || {} - self.container_image &&= self.container_image # we need to to remain nil if already nil + self.container_image &&= self.container_image # we need it to remain nil if already nil self end @@ -203,6 +210,7 @@ def build_substitutions_by_tokens_hash(invoke_structure) self.inputs.map do |input| next nil if input.value_key.blank? value = invoke_structure[input.id] + value = input.default_value if value.nil? next nil if value.nil? [ input.value_key, value ] end.compact.to_h @@ -227,6 +235,172 @@ def apply_substitutions(string, substitutions_by_tokens, to_strip=[]) newstring end + PRETTY_ORDER_TOP = %w( + name + tool-version + author + description + url + descriptor-url + online-platform-urls + doi + tool-doi + shell + command-line + schema-version + container-image + inputs + groups + output-files + error-codes + suggested-resources + tags + tests + custom + ) + PRETTY_ORDER_INPUT = %w( + id + name + description + type + optional + integer + minimum + exclusive-minimum + maximum + exclusive-maximum + list + list-separator + min-list-entries + max-list-entries + default-value + command-line-flag + command-line-flag-separator + value-key + value-choices + value-disables + disables-inputs + requires-inputs + ) + PRETTY_ORDER_OUTPUT = %w( + id + name + description + optional + list + command-line-flag + value-key + path-template + path-template-stripped-extensions + ) + PRETTY_ORDER_GROUP = %w( + id + name + description + all-or-none + one-is-required + members + ) + + # Returns a dup() of the current descriptor, but with + # the fields re-ordered so as to create a 'pretty' + # layout when printed out (as JSON, YAML etc). + # + # The order puts things like the name, description, command + # version number etc near the top, then then inputs, the + # groups, the outputs, and the custom sections. + def pretty_ordered + ordered = Hash.new # we use a plain hash to hold the newly ordered elems. + selfcopy = self.dup + PRETTY_ORDER_TOP.each { |k| ordered[k] = selfcopy.delete(k).dup if selfcopy.has_key?(k) } + selfcopy.each { |k,v| puts "Top miss: #{k}" ; ordered[k] = v.dup } + final = self.class.new(ordered) + + # Order fields in each input + final.inputs = final.inputs.map do |input| + ordered = Hash.new + selfcopy = input.dup + PRETTY_ORDER_INPUT.each { |k| ordered[k] = selfcopy.delete(k).dup if selfcopy.has_key?(k) } + selfcopy.each { |k,v| puts "Inp miss: #{k}" ; ordered[k] = v.dup } + input.class.new(ordered) + end + + # Order fields in each output-file + final.output_files = final.output_files.map do |output| + ordered = Hash.new + selfcopy = output.dup + PRETTY_ORDER_OUTPUT.each { |k| ordered[k] = selfcopy.delete(k).dup if selfcopy.has_key?(k) } + selfcopy.each { |k,v| puts "Out miss: #{k}" ; ordered[k] = v.dup } + output.class.new(ordered) + end + + # Order fields in each group + final.groups = final.groups.map do |group| + ordered = Hash.new + selfcopy = group.dup + PRETTY_ORDER_GROUP.each { |k| ordered[k] = selfcopy.delete(k).dup if selfcopy.has_key?(k) } + selfcopy.each { |k,v| puts "Group miss: #{k}" ; ordered[k] = v.dup } + group.class.new(ordered) + end + + final + end + + # Returns a JSON text version of the descriptor but with + # the fields aligned with pretty whitespaces, e.g. + # instead of + # + # "name": "megatool", + # "tool-version": "3.14.15926", + # "url": "https://example.com", + # + # we get + # + # "name": "megatool", + # "tool-version": "3.14.15926", + # "url": "https://example.com", + def super_pretty_json + + # Internally, the alignment is made by padding property names with '|' + # and then stripping them out of the normal JSON generated. + pad_keys = ->(hash,length) do + hash.transform_keys! { |k| k.to_s.size >= length ? k : k + ('|' * (length-k.size) ) } + end + maxkeylength = ->(hash) { hash.keys.map(&:to_s).map(&:size).max } + + # Returns a modified hash with keys all padded with '|' + max_pad_keys = ->(hash) do + copy = HashWithIndifferentAccess.new.merge(hash.dup) + max = maxkeylength.(copy) + pad_keys.(copy,max) + copy + end + + final = HashWithIndifferentAccess.new.merge(self.dup) + + final['inputs'].map! { |input| max_pad_keys.(input) } + final['output-files'].map! { |output| max_pad_keys.(output) } if final['output-files'].present? + final['groups'].map! { |group| max_pad_keys.(group) } if final['groups'].present? + final.delete('groups') if final['groups'].blank? + + final['container-image'] &&= max_pad_keys.(final['container-image']) + final['custom'] &&= max_pad_keys.(final['custom']) + + final = max_pad_keys.(final) + + json_with_bars = JSON.pretty_generate(final) + new_json = json_with_bars + .gsub( /\|+": / ) do |bars| + spaces = bars.size - 3; '": ' + (' ' * spaces) + end + + new_json + end + + #------------------------------------------------------ + # Aditional methods for the sub-objects of a descriptor + #------------------------------------------------------ + class Input # This method return the parameter name for the input. diff --git a/BrainPortal/lib/boutiques_task_logs_copier.rb b/BrainPortal/lib/boutiques_task_logs_copier.rb new file mode 100644 index 000000000..bc2504c35 --- /dev/null +++ b/BrainPortal/lib/boutiques_task_logs_copier.rb @@ -0,0 +1,187 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +# This module implement a special step that is executed BEFORE the standard +# post-processing code of CBRAIN is triggered. A Boutiques descriptor can +# include it in the custom section like this: +# +# "custom": { +# "cbrain:integrator_modules": { +# "BoutiquesTaskLogsCopier": { +# "stdout": "local/path/[PARAM1]/ses*/basename[PARAM2]_{taskid}_stdout.log", +# "stderr": "local/path/[PARAM1]/ses*/basename[PARAM2]_{taskid}_stderr.log", +# "runtime": "blah/blah/runtime.kv", +# "descriptor": "blah/blah/descriptor.json", +# "invoke": "blah/blah/params.json", +# "jobscript": "blah/blah/cbrain_script.sh" +# } +# } +# } +# +# The module's behavior is to copy some CBRAIN-specific files (e.g. the STDOUT and STDERR +# capture files of the task) and install them in some subdirectory that (normally) +# will be saved as an output. It can also copy other useful configuration files, +# as shown in the example above. +# +# The copy code will get triggered before CBRAIN runs its normal post-processing +# code, so before it is aware whether or not the task completed successfully, +# or failed. +# +# Configuration errors in the paths will raise a fatal exception. A missing +# output directory path, however, will only generate a warning within +# the task's processing logs. +# +# The pathnames patterns provided can include standard filesystem glob elements +# and Boutiques value-key parameters. The module will try to make sure that +# only one subdirecty path matches the parent location specified by the path, though +# it will attempt the create the last component of the parent if necessary. +# +# Several examples of what is supported: +# +# # Direct path: +# "abc/def/stdout.log" +# +# # Paths with value-keys taken from Boutiques parameters: +# "[OUTPUT_DIR]/[INPUT_FILE].stdout" +# "work/[SUBJECT_ID]/logs/stdout_[SUBJECT_ID].log" +# +# # Path with a value-key AND a glob to find a subdirectory ses-N : +# "work/[SUBJECT_ID]/ses-*/logs/stdout_[SUBJECT_ID].log" +# +# When tring to find the final path for the copied file, the parent dir +# is initially globbed(), and if a single directory is returned, +# it will be used. If none are found, the parent of THAT +# is checked and if it exists, the missing last component directory +# will be created. E.g. for the last example above, if +# "work/sub-1234/ses-2" exists but "work/sub-1234/ses-2/logs" doesn't +# exist, the "logs" subdirectory will be created. +module BoutiquesTaskLogsCopier + + # Note: to access the revision info of the module, + # you need to access the constant directly, the + # object method revision_info() won't work. + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + + # This method overrides the one in BoutiquesClusterTask. + # It will attempt to copy the stdout and stderr files + # that CBRAIN captured, and then invoke the normal + # post processing code. + def save_results + + # Get the cleaning paths patterns from the descriptor + descriptor = self.descriptor_for_save_results + destpaths = descriptor.custom_module_info('BoutiquesTaskLogsCopier') + + # Copy STDOUT and STDERR, if possible + install_std_log_file(science_stdout_basename, destpaths[:stdout], "stdout") + install_std_log_file(science_stderr_basename, destpaths[:stderr], "stderr") + + # Copy Boutiques configuration files + install_std_log_file(boutiques_json_basename, destpaths[:descriptor], "boutiques descriptor") + install_std_log_file(invoke_json_basename, destpaths[:invoke], "boutiques parameters") + + # Copy Runtime info file + install_std_log_file(runtime_info_basename, destpaths[:runtime], "runtime info") + + # Copy sbatch/qsub script + install_std_log_file(science_script_basename, destpaths[:jobscript], "jobscript") + + # Performs standard processing + super + end + + # Try to install a file +stdlogfile+ into the destination path + # specified by +destpath+ . destpath can be a pattern + # with glob components and Boutiques parameter value-keys, and + # must be at least one level deep. + # + # See the examples at the top of the module. + def install_std_log_file(stdlogfile, destpath, typeinfo) + + # If we have not configured a capture path, do nothing. + return if destpath.blank? + + # If for some reason the task's work directory doesn't have + # the required file, ignore it too. + return if ! File.file?(stdlogfile) + + descriptor = self.descriptor_for_save_results + + # Prepare the substitution hash and apply it + substitutions_by_token = descriptor.build_substitutions_by_tokens_hash( + JSON.parse(File.read(self.invoke_json_basename)) + ) + destpath = descriptor.apply_substitutions(destpath, substitutions_by_token) + destpath = Pathname.new(destpath).cleanpath + + # Extract the prefix subdirectory paths (which can be globbed) and the basename + prefixglob = destpath.parent + basename = destpath.basename + + # Sanity checks. These errors should never happen because the paths + # and patterns are normally configured by the administrator, who + # should know better than to misconfigure the module or + # point at paths outside the task's work directory. + cb_error "Misconfigured module BoutiquesTaskLogsCopier for #{typeinfo} with absolute path pattern '#{destpath}'" if destpath.absolute? + if prefixglob.to_s.blank? || prefixglob.to_s == '.' + cb_error "Misconfigured module BoutiquesTaskLogsCopier without a prefix subdirectory for #{typeinfo} '#{destpath}'" + end + + # Try to find one and only one directory where to install the file. + dirglobs = Pathname.glob(prefixglob) + + # If we get a pattern that matches several places, we can't do anything. + if dirglobs.size > 1 + self.addlog "Warning: too many intermediate subdirectories match pattern '#{prefixglob}'; #{typeinfo} file not saved." + return + end + + # If we can't find a match at all, maybe we can find a match with just the + # parent directory and we can create the final component. + if dirglobs.empty? + parent_of_prefix_glob = prefixglob.parent + parent_of_prefix_dirs = Pathname.glob(parent_of_prefix_glob) + if parent_of_prefix_dirs.size != 1 + self.addlog "Warning: cannot find intermediate subdirectories matching pattern '#{prefixglob}'; #{typeinfo} file not saved." + return + end + mkdir_path = (Pathname.new(parent_of_prefix_dirs.first) + prefixglob.basename).to_s + Dir.mkdir(mkdir_path) + dirglobs = [ mkdir_path ] + end + + destdir = dirglobs.first + if ! path_is_in_workdir?(destdir) + self.addlog "Misconfigured module BoutiquesTaskLogsCopier: path pattern '#{destpath}' is outside of the task's workdirectory; #{typeinfo} file not saved." + return + end + + self.addlog "Copying #{typeinfo} file to '#{destdir}/#{basename}'" + FileUtils.copy_file(stdlogfile, "#{destdir}/#{basename}") + + end + +end + + + + diff --git a/BrainPortal/lib/cbrain_extensions/hash_extensions/conversions.rb b/BrainPortal/lib/cbrain_extensions/hash_extensions/conversions.rb index 93c7db853..0292f75ff 100644 --- a/BrainPortal/lib/cbrain_extensions/hash_extensions/conversions.rb +++ b/BrainPortal/lib/cbrain_extensions/hash_extensions/conversions.rb @@ -81,6 +81,25 @@ def to_api_xml(options = {}) to_xml({ :dasherize => false, :root => root_tag }.merge(options)) end + # Returns a dup of the hash, where the keys are sorted, and + # any values that are arrays are also sorted. Applies these + # rules recursively. Assumes that all keys and all array values + # are things that can be compared, otherwise this will crash. + def resorted + res = self.class.new + self.keys.sort.each do |key| + val = self[key] + if val.is_a?(Hash) + res[key] = val.resorted + elsif val.is_a?(Array) + res[key] = val.sort.map { |x| x.respond_to?(:resorted) ? x.resorted : x } + else + res[key] = val + end + end + res + end + end end end diff --git a/BrainPortal/lib/cbrain_task_generators/schema_task_generator.rb b/BrainPortal/lib/cbrain_task_generators/schema_task_generator.rb index df9e80abc..48edeed38 100644 --- a/BrainPortal/lib/cbrain_task_generators/schema_task_generator.rb +++ b/BrainPortal/lib/cbrain_task_generators/schema_task_generator.rb @@ -261,9 +261,10 @@ def register(task) container_engine.capitalize! return if container_engine == "Singularity" && !resource.singularity_present? return if container_engine == "Docker" && (!resource.docker_present? && !resource.singularity_present?) - + # If Docker engine isn't present use Singularity container_engine = "Singularity" if (container_engine == "Docker" && !resource.docker_present?) + container_index = 'docker://' if container_index == 'index.docker.io' # old convention ToolConfig.new( :tool_id => task.tool.id, diff --git a/BrainPortal/app/views/service/info.html.erb b/BrainPortal/lib/data_provider_test_connection_error.rb similarity index 57% rename from BrainPortal/app/views/service/info.html.erb rename to BrainPortal/lib/data_provider_test_connection_error.rb index 545527fb5..7d2b8fefd 100644 --- a/BrainPortal/app/views/service/info.html.erb +++ b/BrainPortal/lib/data_provider_test_connection_error.rb @@ -1,9 +1,8 @@ -<%- # # CBRAIN Project # -# Copyright (C) 2008-2012 +# Copyright (C) 2008-2023 # The Royal Institution for the Advancement of Learning # McGill University # @@ -20,18 +19,13 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . # --%> -<% title 'Service Info' %> +# This class provides an exception class for +# representing a user key connection error. +# At the moment it does not guaranty though that connection is good. +class DataProviderTestConnectionError < CbrainError + + Revision_info=CbrainFileRevision[__FILE__] #:nodoc: + +end -

CBRAIN Web Service Information

-

Basic Information

-
-
Name
<%= @info[:name] %>
-
Version
<%= @info[:version] %>
-
Synopsis
<%= @info[:synopsis] %>
-
Release Time
<%= @info[:releaseTime] %>
-
Research Subject
<%= @info[:researchSubject] %>
-
Support Email
<%= @info[:supportEmail] %>
-
Tags
<%= @info[:tags].join(", ") %>
-
diff --git a/BrainPortal/lib/models_report.rb b/BrainPortal/lib/models_report.rb index 66894aca9..533298c52 100644 --- a/BrainPortal/lib/models_report.rb +++ b/BrainPortal/lib/models_report.rb @@ -154,7 +154,7 @@ def self.search_for_token(token, user=current_user) #:nodoc: is_numeric = token =~ /\A\d+\z/ || token == "-9998877" # ... because we'll find by ID - file_scope = Userfile .find_all_accessible_by_user(user) .order(:name) + file_scope = Userfile .find_all_accessible_by_user(user, :access_requested => :read).order(:name) task_scope = CbrainTask .find_all_accessible_by_user(user) .order(:id) rr_scope = RemoteResource.find_all_accessible_by_user(user) .order(:name) dp_scope = DataProvider .find_all_accessible_by_user(user) .order(:name) diff --git a/BrainPortal/lib/portal_sanity_checks.rb b/BrainPortal/lib/portal_sanity_checks.rb index def73adf4..6d9f40179 100644 --- a/BrainPortal/lib/portal_sanity_checks.rb +++ b/BrainPortal/lib/portal_sanity_checks.rb @@ -373,5 +373,23 @@ def self.ensure_scratch_data_provider_exists #:nodoc: scratch.meta['no_viewers'] = 'on' # files can't be viewed in interface end + def self.ensure_expired_messages_are_purged #:nodoc: + + #----------------------------------------------------------------------------- + puts "C> Ensuring expired Messages are purged" + #----------------------------------------------------------------------------- + + todel = Message.all.to_a + .select { |m| m.expiry.present? } + .select { |m| m.expiry < Time.now } + if todel.size > 0 + puts "C> \t - There are #{todel.size} messages to delete." + todel.each { |m| m.destroy } + else + puts "C> \t - There are no messages to delete." + end + + end + end diff --git a/BrainPortal/lib/portal_system_checks.rb b/BrainPortal/lib/portal_system_checks.rb index f8ac0757c..6d3466baa 100644 --- a/BrainPortal/lib/portal_system_checks.rb +++ b/BrainPortal/lib/portal_system_checks.rb @@ -119,11 +119,11 @@ def self.z000_ensure_we_have_a_local_ssh_agent #:nodoc: puts "C> Making sure we have a CBRAIN key for the agent..." #---------------------------------------------------------------------------- - cbrain_identity_file = "#{CBRAIN::Rails_UserHome}/.ssh/id_cbrain_portal" + cbrain_identity_file = "#{CBRAIN::Rails_UserHome}/.ssh/id_cbrain_ed25519" if ! File.exists?(cbrain_identity_file) puts "C> \t- Creating identity file '#{cbrain_identity_file}'." with_modified_env('SSH_ASKPASS' => '/bin/true', 'DISPLAY' => 'none:0.0') do - system("/bin/bash","-c","ssh-keygen -t rsa -f #{cbrain_identity_file.bash_escape} -C 'CBRAIN_Portal_Key' /dev/null 2>/dev/null") + system("/bin/bash","-c","ssh-keygen -t ed25519 -f #{cbrain_identity_file.bash_escape} -C 'CBRAIN_Portal_Key' /dev/null 2>/dev/null") end end @@ -132,8 +132,8 @@ def self.z000_ensure_we_have_a_local_ssh_agent #:nodoc: else CBRAIN.with_unlocked_agent curkeys=agent.list_keys - if curkeys.size > 0 - puts "C> \t- Identity already present in agent: #{curkeys[0]}" + if digest = curkeys.detect { |string| string.to_s =~ /\(ED25519\)/i } # = not == + puts "C> \t- Identity already present in agent: #{digest}" else ok = with_modified_env('SSH_ASKPASS' => '/bin/true', 'DISPLAY' => 'none:0.0') do agent.add_key_file(cbrain_identity_file) rescue nil # will raise exception if anything wrong @@ -146,6 +146,20 @@ def self.z000_ensure_we_have_a_local_ssh_agent #:nodoc: end end end + + # Add old key if it exists. + old_identity_file = "#{CBRAIN::Rails_UserHome}/.ssh/id_cbrain_portal" + if File.exists?(old_identity_file) + ok_old = with_modified_env('SSH_ASKPASS' => '/bin/true', 'DISPLAY' => 'none:0.0') do + agent.add_key_file(old_identity_file) rescue nil + end + if ok_old + puts "C> \t- Added OLD identity to agent from file: '#{old_identity_file}'." + else + puts "C> \t- WARNING: cannot add OLD identity from file: '#{old_identity_file}'." + end + end + end diff --git a/BrainPortal/lib/ssh_key.rb b/BrainPortal/lib/ssh_key.rb index 9f996d348..3024072ed 100644 --- a/BrainPortal/lib/ssh_key.rb +++ b/BrainPortal/lib/ssh_key.rb @@ -39,7 +39,7 @@ class SshKey CONFIG = { #:nodoc: :ssh_keys_dir => (Rails.root rescue nil) ? "#{Rails.root.to_s}/user_keys" : "/not/yet/configured", :exec_ssh_keygen => `bash -c "type -p ssh-keygen"`.strip, - :ssh_keygen_type => "rsa", + :ssh_keygen_type => "ed25519", :debug => false, } @@ -124,7 +124,7 @@ def validate_files! raise RuntimeError.new("Public file for SSH Key '#{@name}' does not exist.") unless File.exists?(pub_path) && File.size(pub_path) > 50 raise RuntimeError.new("Private file for SSH Key '#{@name}' does not exist.") unless - File.exists?(priv_path) && File.size(priv_path) > 1000 + File.exists?(priv_path) && File.size(priv_path) > 300 true end @@ -211,7 +211,7 @@ def private_key_path # Returns the private key (in SSH format) def private_key(i_know_what_i_am_doing = false) - raise RuntimeError("Private key access denied") unless i_know_what_i_am_doing == 'I Know What I Am Doing' + raise RuntimeError.new("Private key access denied") unless i_know_what_i_am_doing == 'I Know What I Am Doing' File.read(private_key_path) end diff --git a/BrainPortal/lib/tasks/boutiques_rewrite.rake b/BrainPortal/lib/tasks/boutiques_rewrite.rake new file mode 100644 index 000000000..1013840d3 --- /dev/null +++ b/BrainPortal/lib/tasks/boutiques_rewrite.rake @@ -0,0 +1,98 @@ + +# +# CBRAIN Project +# +# Copyright (C) 2008-2023 +# The Royal Institution for the Advancement of Learning +# McGill University +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +namespace :cbrain do + namespace :boutiques do + desc "Reads a Boutiques descriptor and writes it back with adjustments" + + task :rewrite, [:action] => :environment do |t,args| + + args.with_defaults(:action => 'reorder') + action = args.action + raise "This task's action must be 'reorder' (default), or 'pad' or 'pad+reorder'" unless + action.match /\A(reorder|pad|pad\+reorder)\z/ + + # There is no good way to provide standard command line + # args to a rake task, so I have to butcher ARGV myself. + args = ARGV.size > 1 ? ARGV[1..ARGV.size-1] : [] # remove 'rake' + while args.size > 0 && args[0] =~ /^cbrain:boutiques|^-/ # remove options and task name + args.shift + end + + # Usage + if args.size != 1 + puts <<-USAGE + Usage: + rake cbrain:boutiques:rewrite boutiques.json + rake cbrain:boutiques:rewrite[reorder] boutiques.json # default + rake cbrain:boutiques:rewrite[pad] boutiques.json + rake cbrain:boutiques:rewrite[pad+reorder] boutiques.json + + This task will read the content of 'boutiques.json' and + write back 'new_boutiques.json'. + + The single option is a keyword that determines which rewriting + procedure to perform. + + With 'reorder' (the default), the properties are reordered + in a pretty way. + + With 'pad', the JSON produced will contain extra spaces to + align all the values together. + + Unfortunately, because of the way rake tasks work, the + full path to 'boutiques.json' must be provided, or a + path relative to CBRAIN's BrainPortal directory. + + USAGE + exit 1 + end + + filename = args.shift + pathname = Pathname.new(filename) + newfile = pathname.dirname + "new_#{pathname.basename}" + + puts "Reading file #{filename}..." + btq = BoutiquesSupport::BoutiquesDescriptor.new_from_file(filename) + + if action =~ /reorder/ + puts "Re-ordering..." + btq = btq.pretty_ordered + btq.delete('groups') if btq.groups.blank? # stupid btq spec say it must be completely absent + json = JSON.pretty_generate(btq) + end + + if action =~ /pad/ + puts "Padding values..." + btq.delete('groups') if btq.groups.blank? # stupid btq spec say it must be completely absent + json = btq.super_pretty_json + end + + puts "Saving #{newfile}..." + File.open(newfile.to_s,"w") { |fh| fh.write json } + + puts "Done." + + end + end +end + diff --git a/BrainPortal/lib/tasks/cbrain_nagios_checker.rake b/BrainPortal/lib/tasks/cbrain_nagios_checker.rake index 55285a5b8..37a68833b 100644 --- a/BrainPortal/lib/tasks/cbrain_nagios_checker.rake +++ b/BrainPortal/lib/tasks/cbrain_nagios_checker.rake @@ -43,6 +43,7 @@ namespace :cbrain do task :dps => :environment do CbrainSystemChecks.check([:a002_ensure_Rails_can_find_itself]) + PortalSystemChecks.check([:z000_ensure_we_have_a_local_ssh_agent]) # Restores STDOUT and STDERR so that nagios # can capture our pretty message at the end. diff --git a/BrainPortal/lib/tasks/resource_usage_serialization.rake b/BrainPortal/lib/tasks/resource_usage_serialization.rake index 9f28673a2..8f29ec069 100644 --- a/BrainPortal/lib/tasks/resource_usage_serialization.rake +++ b/BrainPortal/lib/tasks/resource_usage_serialization.rake @@ -212,6 +212,9 @@ namespace :cbrain do # Main processing loop for all classes klass_names.each do |klass_name| + puts "\n-------------------------------------------------------" + puts "Reloading ResourceUsage records for class #{klass_name}" + # Find all files for klass_name globpattern = Rails.root + "data_dumps" + "#{klass_name}.*.yaml*" # matches .gz too files = Dir.glob(globpattern) diff --git a/BrainPortal/lib/view_helpers.rb b/BrainPortal/lib/view_helpers.rb index 9ad850b66..fbf791d1e 100644 --- a/BrainPortal/lib/view_helpers.rb +++ b/BrainPortal/lib/view_helpers.rb @@ -99,8 +99,6 @@ def pretty_elapsed(numseconds,options = {}) ] components = components.select { |c| c[0] > 0 } - components.pop while components.size > 0 && components[-1] == 0 - components.shift while components.size > 0 && components[0] == 0 if options[:num_components] while components.size > options[:num_components] diff --git a/BrainPortal/public/401.html b/BrainPortal/public/401.html index 58e70975f..84d633712 100644 --- a/BrainPortal/public/401.html +++ b/BrainPortal/public/401.html @@ -3,63 +3,45 @@ You are not authorized to view this page (401) - + +
-
-

You are not authorized to view this page.

-
+

You are not authorized to view this page.

Stop trying to be a hacker and get back to work.

diff --git a/BrainPortal/public/404.html b/BrainPortal/public/404.html index ab1bceb20..a5bd42279 100644 --- a/BrainPortal/public/404.html +++ b/BrainPortal/public/404.html @@ -3,63 +3,45 @@ The page you were looking for doesn't exist (404) - + +
-
-

The page you were looking for doesn't exist.

-
+

The page you were looking for doesn't exist.

You may have mistyped the address or the page may have moved.

diff --git a/BrainPortal/public/422.html b/BrainPortal/public/422.html index 7e30a2f0b..3ac3de251 100644 --- a/BrainPortal/public/422.html +++ b/BrainPortal/public/422.html @@ -3,63 +3,45 @@ The change you wanted was rejected (422) - + +
-
-

The change you wanted was rejected.

-
+

The change you wanted was rejected.

Maybe you tried to change something you didn't have access to.

diff --git a/BrainPortal/public/500.html b/BrainPortal/public/500.html index a022b7da4..929fda37e 100644 --- a/BrainPortal/public/500.html +++ b/BrainPortal/public/500.html @@ -3,63 +3,45 @@ We're sorry, but something went wrong (500) - + +
-
-

We're sorry, but something went wrong.

-
+

We're sorry, but something went wrong.

We've been notified about this issue and we'll take a look at it shortly.

diff --git a/BrainPortal/public/502.html b/BrainPortal/public/502.html index 0d4d1417c..d9575c2b9 100644 --- a/BrainPortal/public/502.html +++ b/BrainPortal/public/502.html @@ -3,65 +3,48 @@ We're sorry, this server is down for maintenance (502) - + +
-
-

We're sorry, this server is down for maintenance.

-
-

Please return in a few minutes.
- Maintenance periods usually last between 5 and 30 minutes.

+

We're sorry, this server is down for maintenance.

+

+ Please return in a few minutes. +
+ Maintenance periods usually last between 5 and 30 minutes. +

diff --git a/BrainPortal/public/images/brainbrowser-loader.gif b/BrainPortal/public/images/brainbrowser-loader.gif deleted file mode 100644 index cd6055863..000000000 Binary files a/BrainPortal/public/images/brainbrowser-loader.gif and /dev/null differ diff --git a/BrainPortal/public/javascripts/cbrain.js b/BrainPortal/public/javascripts/cbrain.js index d2df9e9d4..8dd335e87 100644 --- a/BrainPortal/public/javascripts/cbrain.js +++ b/BrainPortal/public/javascripts/cbrain.js @@ -117,7 +117,7 @@ loaded_element.find("select").each(function(){ var select = $(this); - + var defined_width = (select.context.style.width); if ( defined_width !== '' ){ select.chosen({ width: defined_width }); @@ -125,7 +125,7 @@ select.chosen({ width: '25em' }); } }); - + ///////////////////////////////////////////////////////////////////// // // UI Helper Methods see application_helper.rb for corresponding @@ -631,8 +631,8 @@ $(hidden_box).val( value ); }; - // Value of the header box is used to set - // the checked status of child boxes + // Value of the header box is used to set + // the checked status of child boxes var click_select_all = (header_box) => { var checkbox_class = header_box.data("checkbox-class"); @@ -649,27 +649,27 @@ }); // Define on click event for each child of a `select_all` element. - $(".select_all").each( (index,input) => { - var checkbox_class = $(input).data("checkbox-class"); - - $(input).load(click_select_all($(input))); - - var checkbox_class_elements = $('.' + checkbox_class); - checkbox_class_elements.each(function(index, element) { - $(element).on("click", () => { - var number_of_checkbox = checkbox_class_elements.filter((i,e) => e.checked).length; - if (number_of_checkbox === 0) { - set_hidden_select_all($(input), "none"); - input.checked = false; - } else if (checkbox_class_elements.length === number_of_checkbox) { - set_hidden_select_all($(input), "all"); - input.checked = true; - } else { - set_hidden_select_all($(input), "some"); - input.checked = false; - } + $(".select_all").each( (index,input) => { + if ($(input).data("persistant-name")) { + var checkbox_class = $(input).data("checkbox-class"); + $(input).load(click_select_all($(input))); + var checkbox_class_elements = $('.' + checkbox_class); + checkbox_class_elements.each(function(index, element) { + $(element).on("click", () => { + var number_of_checkbox = checkbox_class_elements.filter((i,e) => e.checked).length; + if (number_of_checkbox === 0) { + set_hidden_select_all($(input), "none"); + input.checked = false; + } else if (checkbox_class_elements.length === number_of_checkbox) { + set_hidden_select_all($(input), "all"); + input.checked = true; + } else { + set_hidden_select_all($(input), "some"); + input.checked = false; + } + }); }); - }); + }; }); $(document).delegate(".select_master", "change", function() { diff --git a/BrainPortal/public/javascripts/dynamic-table.js b/BrainPortal/public/javascripts/dynamic-table.js index 895db26a5..58f1c7dbf 100644 --- a/BrainPortal/public/javascripts/dynamic-table.js +++ b/BrainPortal/public/javascripts/dynamic-table.js @@ -167,6 +167,9 @@ /* sorting, filtering and pagination requests */ dyntbl + .delegate('.dt-fpop-find > input', 'keypress', function (event) { + if (event.key == "Enter") event.preventDefault(); + }) .undelegate('.dt-sort-btn, .dt-fpop-txt, .dt-pag-pages > a', 'click.dyn-tbl') .delegate( '.dt-sort-btn, .dt-fpop-txt, .dt-pag-pages > a', 'click.dyn-tbl', function (event) { event.preventDefault(); diff --git a/BrainPortal/public/javascripts/userfiles.js b/BrainPortal/public/javascripts/userfiles.js index 6a83c5d83..e22b4046f 100644 --- a/BrainPortal/public/javascripts/userfiles.js +++ b/BrainPortal/public/javascripts/userfiles.js @@ -33,7 +33,7 @@ function cbrain_userfile_launch_bar(tool_name, file_status_text, have_selection, $('#menu_bar').after( $('
') - .append($('Select some files to launch ' + tool_name + '')) + .append($('Select some files to launch ' + tool_name + '')) .append($('' + file_status_text + '')) .append($('').button({ disabled: !have_selection })) ); @@ -53,18 +53,60 @@ function cbrain_attach_userfile_checkboxes(userfile_checkboxes, tool_name, launc userfile_checkboxes .unbind('change.launch_task') .bind('change.launch_task', function () { - var checked = userfile_checkboxes.filter(':checked').length; + launch_task(); + }); +} - /* Update the button in the dialog */ - launch_button.val((checked ? "Launch " : "Prepare ") + tool_name); +// Generate the launch_task div if prepare_tool_id present as a parameter +function launch_task() { + var parameters = window.location.search.split(/\?|&/); + + // scan the query params in the URL, trying to find "prepare_tool_id=NNN" + var prepare_tool_id = undefined; + for (var i = 0; i < parameters.length; i++) { + var [name, id] = parameters[i].split(/=/); + if (name === "prepare_tool_id") { + prepare_tool_id = id; + break; + } + } + + var tool_name = $("#tool_name_to_launch").text(); + if (tool_name === '') { + tool_name === undefined; + } + + if (!prepare_tool_id && !tool_name) { + return; + } + // Fetch tool name according to `prepare_tool_id` or + // Fetch prepare_tool_id according to tool name. + for (let tool of document.getElementsByClassName("toolsLink")) { + var id = tool.dataset.toolId; + var name = tool.childNodes[0].data; + if (prepare_tool_id && id === prepare_tool_id) { + tool_name = name; + break; + } + if (tool_name !== '' && name === tool_name) { + prepare_tool_id = id; + break; + } + } - /* And the launch_bar, if it exists */ - $('.launch_bar span.file_status') - .text(checked ? "Launch with " + checked + " file(s)" : "No files selected"); + if (tool_name === undefined) { + return; + } - $('.launch_bar button') - .button(checked ? 'enable' : 'disable'); - }); + /* Do we have some files selected to launch the task on? */ + var nb_selected_files = parseInt($('.psel-count').text()); + var have_selection = nb_selected_files > 0; + + var file_status_text = have_selection ? "Launch with " + nb_selected_files + " file(s)" : "No files selected"; + var url = "tasks/new?tool_id=" + prepare_tool_id; + + // Generate the launch_bar div and attach action on the userfile_checkboxes + cbrain_userfile_launch_bar(tool_name, file_status_text, have_selection, url); } $(function() { @@ -441,49 +483,6 @@ $(function() { /* Show/Hide dynamic actions/menu elements according to current selection */ (function () { - // Generate the launch_task div if prepare_tool_id present as a parameter - function launch_task() { - var parameters = window.location.search.split(/\?|&/); - var launch_button = $(this).find('input.launch_tool'); - var userfile_checkboxes = $("input[name='file_ids[]']"); - - // scan the query params in the URL, trying to find "prepare_tool_id=NNN" - var prepare_tool_id = undefined; - for (var i = 0; i < parameters.length; i++) { - var [name, id] = parameters[i].split(/=/); - if (name === "prepare_tool_id") { - prepare_tool_id = id; - break; - } - } - - if (!prepare_tool_id) { - return; - } - - // Fetch tool name according to `prepare_tool_id` - var tool_name = undefined; - for (let tool of document.getElementsByClassName("toolsLink")) { - var id = tool.dataset.toolId; - var name = tool.childNodes[0].data; - if (id === prepare_tool_id) { - tool_name = name; - break; - } - } - - /* Do we have some files selected to launch the task on? */ - var nb_selected_files = parseInt($('.psel-count').text()); - var have_selection = nb_selected_files > 0; - - var file_status_text = have_selection ? "Launch with " + nb_selected_files + " file(s)" : "No files selected"; - var url = "tasks/new?tool_id=" + prepare_tool_id; - - // Generate the launch_bar div and attach action on the userfile_checkboxes - cbrain_userfile_launch_bar(tool_name, file_status_text, have_selection, url); - cbrain_attach_userfile_checkboxes(userfile_checkboxes, tool_name, launch_button); - } - function toggle(checked, persistent) { if (typeof checked === 'undefined') checked = $('input[name="file_ids[]"]:checked').length; diff --git a/BrainPortal/spec/boutiques/boutiques_tester_spec.rb b/BrainPortal/spec/boutiques/boutiques_tester_spec.rb index fe622f7a4..1eb4e96ac 100644 --- a/BrainPortal/spec/boutiques/boutiques_tester_spec.rb +++ b/BrainPortal/spec/boutiques/boutiques_tester_spec.rb @@ -50,6 +50,7 @@ before(:all) do createInputFiles # Create required input files PWD = Dir.pwd # Save the starting dir form which the tests were launched + @admin = User.admin end before(:each) do @@ -58,7 +59,7 @@ # Build some of the cbrain environment @user, @group = FactoryBot.create(:user), FactoryBot.create(:group) @dp = FlatDirLocalDataProvider.new({ - :online => true, :read_only => false, :remote_dir => '.', :name => "dp1", :user_id => @user.id, :group_id => @group.id + :online => true, :read_only => false, :remote_dir => '.', :name => "dp1", :user_id => @admin.id, :group_id => @group.id }) @dp.save! # Lambda for constructing cbcsv files diff --git a/BrainPortal/spec/controllers/userfiles_controller_spec.rb b/BrainPortal/spec/controllers/userfiles_controller_spec.rb index 88c5b31df..687861b93 100644 --- a/BrainPortal/spec/controllers/userfiles_controller_spec.rb +++ b/BrainPortal/spec/controllers/userfiles_controller_spec.rb @@ -32,21 +32,19 @@ class << file; attr_reader :tempfile; end RSpec.describe UserfilesController, :type => :controller do let(:admin) { create(:admin_user, :login => "admin_user" ) } + let(:data_provider) { create(:flat_dir_local_data_provider, :user => admin, :online => true, :group => EveryoneGroup.first, :read_only => false) } let(:site_manager) { create(:site_manager) } - let(:dp_site_manager) { create(:flat_dir_local_data_provider, :user => site_manager, :online => true, :read_only => false) } let(:user) { create(:normal_user, :site => site_manager.site) } - let(:dp_user) { create(:flat_dir_local_data_provider, :user => user, :online => true, :read_only => false) } let(:admin_userfile) { create(:single_file, :user => admin) } let(:admin_userfile_2) { create(:single_file, :user => admin) } - let(:site_manager_userfile) { create(:single_file, :user => site_manager, :data_provider => dp_site_manager) } + let(:site_manager_userfile) { create(:single_file, :user => site_manager, :data_provider => data_provider) } let(:user_userfile) { create(:single_file, :user => user, :data_provider => data_provider) } let(:child_userfile) { create(:single_file, :user => admin, :parent_id => admin_userfile.id) } - let(:group_userfile) { create(:single_file, :group_id => user.group_ids.last, :data_provider => dp_user) } + let(:group_userfile) { create(:single_file, :group_id => user.group_ids.last, :data_provider => data_provider) } let(:public_group) { create(:group, :public => true, :creator_id => admin.id )} let(:public_group_file) { create(:single_file, :user=> user, :group_id => public_group.id, :data_provider => data_provider)} let(:mock_userfile) { mock_model(TextFile, :id => 1).as_null_object } let(:mock_userfile2) { mock_model(TextFile, :id => 2).as_null_object } - let(:data_provider) { create(:flat_dir_local_data_provider, :user => user, :online => true, :read_only => false) } userfile = FactoryBot.attributes_for(:userfile) after(:all) do diff --git a/BrainPortal/spec/factories/portal_factories.rb b/BrainPortal/spec/factories/portal_factories.rb index 03c532c47..06f3512a3 100644 --- a/BrainPortal/spec/factories/portal_factories.rb +++ b/BrainPortal/spec/factories/portal_factories.rb @@ -121,7 +121,7 @@ sequence(:name) { |n| "dataprovider_#{n}" } read_only { true } type { "FlatDirLocalDataProvider" } - association :user, factory: :normal_user + association :user, factory: :admin_user association :group end diff --git a/BrainPortal/spec/models/remote_resource_spec.rb b/BrainPortal/spec/models/remote_resource_spec.rb index 4b4a04294..a127854cf 100644 --- a/BrainPortal/spec/models/remote_resource_spec.rb +++ b/BrainPortal/spec/models/remote_resource_spec.rb @@ -334,7 +334,6 @@ end it "should return a 'localhost' url if ssh control and active resource tunnel info given" do allow(remote_resource).to receive(:has_ssh_control_info?).and_return(true) - allow(remote_resource).to receive(:tunnel_actres_port).and_return(true) expect(remote_resource.site).to match(/^http:\/\/localhost/) end end diff --git a/BrainPortal/spec/models/tool_config_spec.rb b/BrainPortal/spec/models/tool_config_spec.rb index 5ab14038a..731851f1f 100644 --- a/BrainPortal/spec/models/tool_config_spec.rb +++ b/BrainPortal/spec/models/tool_config_spec.rb @@ -32,7 +32,6 @@ let(:no_b_tool_config) { create(:tool_config, :bourreau => nil) } let(:no_t_tool_config) { create(:tool_config, :tool => nil) } - it "should allow admin user to access a tool config even if they don't belong to its group" do expect(tool_config.can_be_accessed_by?(user)).to be_truthy end @@ -159,12 +158,14 @@ context "fill HEADER" do it "should print 'Configuration: tool_config.id'" do - expect(tool_config.to_bash_prologue).to match(/Configuration\s?:\s+#\s+#{tool_config.id}/) + expect(tool_config.to_bash_prologue).to match(/Configuration\s?:\s+#\s+#{tool_config.id}/) + expect(tool_config.to_bash_prologue(singularity: true)).to match(/Configuration\s?:\s+#\s+#{tool_config.id}/) end it "should print 'Tool: ALL' if specific tool is not defined" do tool_config.tool = nil - expect(tool_config.to_bash_prologue).to match(/Tool\s?:\s+ALL/) + expect(tool_config.to_bash_prologue).to match(/Tool\s?:\s+ALL/) + expect(tool_config.to_bash_prologue(singularity: true)).to match(/Tool\s?:\s+ALL/) end it "should print 'Tool: tool_config.tool.name' if specific tool is defined" do @@ -195,12 +196,14 @@ it "should print 'Description: (NONE SUPPLIED)' if description is blank" do tool_config.description = nil tool_config.tool = tool - expect(tool_config.to_bash_prologue).to match(/Description\s?:\s+\(NONE SUPPLIED\)/) + expect(tool_config.to_bash_prologue).to match(/Description\s?:\s+\(NONE SUPPLIED\)/) + expect(tool_config.to_bash_prologue true).to match(/Description\s?:\s+\(NONE SUPPLIED\)/) end it "should print 'Description: tool_config.description' if description is blank" do tool_config.tool = tool - expect(tool_config.to_bash_prologue).to match(/Description\s?:\n\#\-+\n\n\#\s+#{tool_config.description}/) + expect(tool_config.to_bash_prologue).to match(/Description\s?:\n\#\-+\n\n\#\s+#{tool_config.description}/) + expect(tool_config.to_bash_prologue true).to match(/Description\s?:\n\#\-+\n\n\#\s+#{tool_config.description}/) end end @@ -222,6 +225,22 @@ expect(tool_config.to_bash_prologue).to match(/Environment variables\s?:\n\#\-+\n\n#{script}/) end + it "should not print 'Environment variables: export SINGULARITYENV_name1=\"value1\".... if config has no singularity" do + tool_config.env_array = [["name1", "value1"],["name2","value2"]] + expect(tool_config.to_bash_prologue).not_to match(/(SINGULARITYENV|APPTAINERENV)/) + end + it "should print 'export SINGULARITYENV_name1=\"value1\".... if env is not empty and config uses singularity" do + tool_config.env_array = [["name1", "value1"],["name2","value2"]] + + script = "" + tool_config.env_array.each do |name_val| + name = "SINGULARITYENV_" + name_val[0].strip + val = name_val[1] + script += "export #{name}=\\\"#{val}\\\"\\n" + end + + expect(tool_config.to_bash_prologue true).to match(/#{script}/) + end end context "fill SCRIPT" do diff --git a/BrainPortal/spec/modules/github_ci_spec.rb b/BrainPortal/spec/modules/github_ci_spec.rb index 811df96df..7307515a8 100644 --- a/BrainPortal/spec/modules/github_ci_spec.rb +++ b/BrainPortal/spec/modules/github_ci_spec.rb @@ -23,7 +23,7 @@ require 'rails_helper' # This test file is meant to test stuff that are particular to -# being in a Travis CI testing environment. +# being in a Github CI testing environment. describe "GithubCI" do describe "environment" do diff --git a/BrainPortal/spec/modules/resource_access_spec.rb b/BrainPortal/spec/modules/resource_access_spec.rb index c924b8feb..0d621374a 100644 --- a/BrainPortal/spec/modules/resource_access_spec.rb +++ b/BrainPortal/spec/modules/resource_access_spec.rb @@ -26,11 +26,10 @@ let(:normal_user) { create(:normal_user) } let(:site_manager) { create(:site_manager) } let(:admin) { create(:admin_user) } - let(:scratch_dp) { ScratchDataProvider.main } - let(:free_resource) { create(:ssh_data_provider) } - let(:group_resource) { create(:ssh_data_provider, :group => user.groups.last) } - let(:site_resource) { create(:ssh_data_provider, :user => create(:normal_user, :site => user.site)) } - let(:owned_resource) { create(:ssh_data_provider, :user => user) } + let(:free_resource) { create(:cbrain_task) } + let(:group_resource) { create(:cbrain_task, :group => user.groups.last) } + let(:site_resource) { create(:cbrain_task, :user => create(:normal_user, :site => user.site)) } + let(:owned_resource) { create(:cbrain_task, :user => user) } describe "#can_be_accessed_by?" do @@ -147,7 +146,7 @@ site_resource owned_resource # BTW: fails if the rake task 'db:sanity:check' was not run - expect(DataProvider.find_all_accessible_by_user(admin).map(&:id)).to match_array([scratch_dp.id, free_resource.id, group_resource.id, site_resource.id, owned_resource.id]) + expect(CbrainTask.find_all_accessible_by_user(admin).map(&:id)).to match_array([free_resource.id, group_resource.id, site_resource.id, owned_resource.id]) end end @@ -158,7 +157,7 @@ group_resource site_resource owned_resource - expect(DataProvider.find_all_accessible_by_user(site_manager).map(&:id)).to match_array([group_resource.id, site_resource.id, owned_resource.id]) + expect(CbrainTask.find_all_accessible_by_user(site_manager).map(&:id)).to match_array([group_resource.id, site_resource.id, owned_resource.id]) end end @@ -169,7 +168,7 @@ group_resource site_resource owned_resource - expect(DataProvider.find_all_accessible_by_user(user).map(&:id)).to match_array([group_resource.id, owned_resource.id]) + expect(CbrainTask.find_all_accessible_by_user(user).map(&:id)).to match_array([group_resource.id, owned_resource.id]) end end @@ -180,48 +179,48 @@ let(:user) { admin } it "should find owned resources" do - expect(DataProvider.find_accessible_by_user(owned_resource.id, admin).id).to eq(owned_resource.id) + expect(CbrainTask.find_accessible_by_user(owned_resource.id, admin).id).to eq(owned_resource.id) end it "should find site-associated resources" do - expect(DataProvider.find_accessible_by_user(site_resource.id, admin).id).to eq(site_resource.id) + expect(CbrainTask.find_accessible_by_user(site_resource.id, admin).id).to eq(site_resource.id) end it "should find group-associated resources" do - expect(DataProvider.find_accessible_by_user(group_resource.id, admin).id).to eq(group_resource.id) + expect(CbrainTask.find_accessible_by_user(group_resource.id, admin).id).to eq(group_resource.id) end it "should find non-associated resources" do - expect(DataProvider.find_accessible_by_user(free_resource.id, admin).id).to eq(free_resource.id) + expect(CbrainTask.find_accessible_by_user(free_resource.id, admin).id).to eq(free_resource.id) end end describe "for site managers" do let(:user) { site_manager } it "should find owned resources" do - expect(DataProvider.find_accessible_by_user(owned_resource.id, site_manager).id).to eq(owned_resource.id) + expect(CbrainTask.find_accessible_by_user(owned_resource.id, site_manager).id).to eq(owned_resource.id) end it "should find site-associated resources" do - expect(DataProvider.find_accessible_by_user(site_resource.id, site_manager).id).to eq(site_resource.id) + expect(CbrainTask.find_accessible_by_user(site_resource.id, site_manager).id).to eq(site_resource.id) end it "should find group-associated resources" do - expect(DataProvider.find_accessible_by_user(group_resource.id, site_manager).id).to eq(group_resource.id) + expect(CbrainTask.find_accessible_by_user(group_resource.id, site_manager).id).to eq(group_resource.id) end it "should raise ActiveRecord::RecordNotFound when used to find non-associated resources" do - expect{DataProvider.find_accessible_by_user(free_resource.id, site_manager)}.to raise_error(ActiveRecord::RecordNotFound) + expect{CbrainTask.find_accessible_by_user(free_resource.id, site_manager)}.to raise_error(ActiveRecord::RecordNotFound) end end describe "for users" do let(:user) { normal_user } it "should find owned resources" do - expect(DataProvider.find_accessible_by_user(owned_resource.id, user).id).to eq(owned_resource.id) + expect(CbrainTask.find_accessible_by_user(owned_resource.id, user).id).to eq(owned_resource.id) end it "should rause ActiveRecord::RecordNotFound when used to find site-associated resources" do - expect{DataProvider.find_accessible_by_user(site_resource.id, user)}.to raise_error(ActiveRecord::RecordNotFound) + expect{CbrainTask.find_accessible_by_user(site_resource.id, user)}.to raise_error(ActiveRecord::RecordNotFound) end it "should find group-associated resources" do - expect(DataProvider.find_accessible_by_user(group_resource.id, user).id).to eq(group_resource.id) + expect(CbrainTask.find_accessible_by_user(group_resource.id, user).id).to eq(group_resource.id) end it "should rause ActiveRecord::RecordNotFound when used to find non-associated resources" do - expect{DataProvider.find_accessible_by_user(free_resource.id, user)}.to raise_error(ActiveRecord::RecordNotFound) + expect{CbrainTask.find_accessible_by_user(free_resource.id, user)}.to raise_error(ActiveRecord::RecordNotFound) end end end diff --git a/Release-Notes.md b/Release-Notes.md index 00e612e5c..dd046f5bb 100644 --- a/Release-Notes.md +++ b/Release-Notes.md @@ -1,6 +1,111 @@ ## CBRAIN/NeuroHub Release Notes +#### Version 6.3.0 Released 2023-01-26 + +(Nearly a full year since the previous release! The diff is 19,773 lines long!) + +User support and user interface improvements: + +* The S3 DataProvider class has been extended to fully support the + browse_path feature, allowing files to be registered at arbitrary + depth within the object namespace tree of the S3 bucket. +* The launch button in the file manager was improved so that external + sites can directly link to a prepared pair of dataset/tool. Used + by the CONP project, mostly. +* We added the standard 'This site use a cookie' banner. +* The globally visible list of available tools and datasets was + cleaned up and re-arranged in two tabs, each with two tables for + the public and restricted thingies. +* Added a new userfile model in the base distribution: ZipArchive. +* Special API hooks for the LORIS projects were adjusted; although + they can also be used by non LORIS actors, they're pretty specific. + +Admin and codebase improvements: + +* A new type of user called an 'AutomatedUser' has been added; it + is basically the same as a NormalUser, but the type can help admins + identify accounts that are meant to be accessed by automated + systems (API calls etc). +* A DataUsage model was added to track and count how often files + are being downloaded, copied, used in processing, or viewed. Admins + can selectively enable this on a project by project basis, and + counts are aggregated on a user + month-by-month basis. +* A DiskQuota model was added, it allows administrator to impose + limits on the number of files and their total sizes for any user + on any data provider. +* The communication channels between the portal and the Bourreau + are now completely performed by setting up UNIX-domain sockets + on the Bourreau side. No longer do we open a network port on + localhost! The connections are established by proper -L and -R + SSH options, which now support such sockets. +* A new rake task help developers and admins manage their CBRAIN + instances (cbrain:models:broken:*) +* The boot mechanism for Bourreau was rewritten as plain bash shell + wrappers to allow a faster startup than then old Ruby bootstrapping + code. The Bourreau is still in Ruby, of course, but prepping it + up no longer requires a costly initial Ruby setup script. +* Speaking of the boot system, Bourreaux servers now launch a + separate watchdog process (also a bash script) that will ping the + Bourreau every 20 minutes and force it to shutdown if the DB + connection (or any SSH tunnel) is shut down unexpectedly. This + keep the PID file from staying around for no reason. +* We removed from the GitHub-hosted codebase the hardcoded cookie + secret keys; these were never really a security issue (given in + 'production' mode the admin was supposed to create them), but for + convenience now any true production or developement CBRAIN system + will generate their own secret key deterministically (yet in a + non-guessable way). +* Admins can force specific users to not only link their account + to a GlobusAuth provider, it can also be a specific provider chosen + by the admin. And once the linkage is done, the password method + is permanently disabled for such users. +* We cleaned up a bunch of system attributes that are no longer + used (like port numbers for DB and ActiveResource connections to + Bourreaux, which are now always tunnelled through SSH) +* Admins users have access to the new 'last' command in the console, + and the 'p' (ping) command in the ibc interface. +* Support for Apptainer as the new Singularity engine. +* Bourreaux can be configured to log to an external file some + information about each job submitted (user, jobid, name, user + Globus name, etc). +* When configuring a ToolConfig, the admin no longer has to explicitely + duplicate the environment variables that the tool needs depending + on whether the tools runs in Singularity/Apptainer or not. Before, + the admins had to set both XYZ=a and SINGULARITYENV_XYZ=a, now + it's done automatically. +* Admins can now visualize directly in the interface the Boutiques + descriptor associated with a particular ToolConfig, for a tool + configured with the new integrator. +* We cleaned up (removed) most of the controller actions that were + required by the Canadian agency that initialy funded CBRAIN (CANARIE). + These were being monitored by them but they discontinued their + side. +* Admins can create notification messages that will show up in the + dashboard of all users (e.g. notice for downtime etc) + +Boutiques improvements: + +* The (relatively) new Boutiques integrator has been extended with lots + of modules to let integrators customize the behavior of their tools: + * BoutiquesAllowedExitCodes + * BoutiquesFileNameMatcher + * BoutiquesForcedOutputBrowsePath + * BoutiquesInputCacheCleaner + * BoutiquesInputSubdirMaker + * BoutiquesOutputCacheCleaner + * BoutiquesOutputFilenameRenamer + +* Launching task arrays with a CbrainFileList now allows the user + to provide extra parameters specific to each row in the file list. + To do so, the file list should be an ExtendedCbrainFileList and + the last column should contain a serialized JSON structure that + can merge to the Boutiques parameters of the task. + +* A new tool BoutiquesDescriptorMaker is provided as part of the + base distribution. It allows a developer to test 'live' what a + Boutiques descriptor would look like in CBRAIN. + #### Version 6.2.0 Released 2022-01-28 (After eleven months, the `git diff` output is over 12,000 lines long!) @@ -119,7 +224,7 @@ New CBRAIN features: (some of these apply to NeuroHub too) UserkeyFlatDirSshDataProvider class in NeuroHub. * These keys (full pair) can be pushed to a Bourreau so that the CBRAIN code running there can connect as the user. Users - have control over which bourreau to push their key pair to. + have control over which Bourreau to push their key pair to. * CBRAIN can generate a new API token and show it to the user in their 'my account' page. * When a user accepts an invitation to join a project, the diff --git a/Travis/Dockerfile.travis b/Travis/Dockerfile.travis deleted file mode 100644 index dcf1acdf7..000000000 --- a/Travis/Dockerfile.travis +++ /dev/null @@ -1,223 +0,0 @@ - -############################################ -# This Dockerfile builds a docker image -# suitable to boot a CBRAIN BrainPortal -# where tests can be run. It is meant -# to be run within a Travis Continuous -# Integration virtual machine, -# although invoking its entry point manually -# from docker is also a possibility. -# -# Do not use the resulting docker container -# as a CBRAIN installation, it won't work, -# and you'll have no persistent data because -# the SQL server is completely inside the -# container fileystem. -############################################ - -# We use CentOS 7; CentOS 8 needs further adjustments, as it's new (Oct 2019) -FROM centos:7 - - - -##################################### -# Package updates and installations # -##################################### - -# Note: keep the package list alphabetically -# ordered to facilitate parsing - -RUN yum update -y -RUN yum install -y epel-release -RUN yum install -y \ - autoconf \ - automake \ - make \ - bzip2 \ - bison \ - gcc-c++ \ - git \ - glibc-devel \ - glibc-headers \ - gpg \ - libffi-devel \ - libmysqlclient-dev \ - libsodium \ - libtool \ - libxml2 \ - libxml2-devel \ - libyaml-devel \ - mariadb-devel \ - mariadb-server \ - openssl-devel \ - patch \ - readline-devel \ - sqlite-devel \ - zlib-devel \ - which \ - wget - -# The following UID and GID are chosen -# to match what is usually the unprivileged user -# that runs inside the Travis CI virtual machines, -# but that should not make much difference. -RUN groupadd -g 500 cbrain -RUN useradd -u 500 -g 500 cbrain - -# Environment variables for the MYSQL DB -ENV MYSQL_ROOT_PASSWORD="my-secret-pw" MYSQL_USER="cb_user" MYSQL_DATABASE="cb_db_test" MYSQL_PASSWORD="cbpw12345" - - - -############################################# -# MySQL server installation and configuration -############################################# - -RUN mysql_install_db --force # we need --force so it ignores the fake hostname -RUN mkdir -p /var/lib/mysql /var/run/mysqld -RUN chown -R mysql:mysql /var/lib/mysql /var/run/mysqld && \ - chmod 777 /var/run/mysqld -RUN rm -f /var/lib/mysql/aria_log_control - -RUN mysqld_safe & sleep 3 && \ - /usr/bin/mysqladmin -u root password "$MYSQL_ROOT_PASSWORD" - -RUN mysqld_safe & sleep 3 && \ - /usr/bin/mysql -u root --password="$MYSQL_ROOT_PASSWORD" -e "create database $MYSQL_DATABASE;" - -RUN mysqld_safe & sleep 3 && \ - /usr/bin/mysql -u root --password="$MYSQL_ROOT_PASSWORD" -e "grant all on $MYSQL_DATABASE.* to '$MYSQL_USER'@'localhost' identified by '$MYSQL_PASSWORD';" - - - -############################# -# Ruby and rvm installation # -############################# - -USER cbrain - -ENV RUBY_VERSION=2.6.3 - -RUN cd $HOME && curl -sSL https://rvm.io/mpapis.asc | gpg2 --import - -RUN cd $HOME && curl -sSL https://rvm.io/pkuczynski.asc | gpg2 --import - - -RUN cd $HOME && curl -sSL https://get.rvm.io | bash -s stable - -RUN cd $HOME && echo "source $HOME/.rvm/scripts/rvm" >> $HOME/.bashrc - -RUN bash --login -c 'rvm install $RUBY_VERSION --autolibs=read' - -RUN bash --login -c 'rvm --default $RUBY_VERSION' - - - -################################ -# Rails application bundling # -################################ - -# These four statements is a way for the -# people building the container to specify -# variations on what base CBRAIN installation -# to use. -ARG CBRAIN_REPO=https://github.com/aces/cbrain.git -ARG CBRAIN_BRANCH=dev -ENV CBRAIN_REPO=$CBRAIN_REPO -ENV CBRAIN_BRANCH=$CBRAIN_BRANCH - -# Edit manually the following line to have your docker installation -# skip its cache of the previous container build, if necessary. -# Just having a different commit number in the echo statement will do. -# This can be necessary if you know that the code on the GitHub -# repo has changed since that last build. -RUN echo Force install using CBRAIN at commit c1e59cb4ac72a9ff66e76a42c607a269a043ee4d - -# Extract initial CBRAIN source (will be replaced at test time) -# but having an initial installation speeds up bundling, -# migrations, etc. -# I would use --single-branch in the git clone command below, but -# it seems not all git packages support it. -RUN cd $HOME && \ - git clone --branch "$CBRAIN_BRANCH" --depth 2 "$CBRAIN_REPO" cbrain_base - -# Install and configure the portal -ENV RAILS_ENV=test - -RUN bash --login -c 'cd $HOME/cbrain_base/BrainPortal && gem install bundler' - -RUN bash --login -c 'cd $HOME/cbrain_base/BrainPortal && bundle install' - -RUN bash --login -c 'cd $HOME/cbrain_base/Bourreau && bundle install' - -RUN bash --login -c 'cd $HOME/cbrain_base/BrainPortal && cd $(bundle show sys-proctable) && rake install' - -COPY ./templates/database.yml.TEST /home/cbrain/cbrain_base/BrainPortal/config/database.yml - -COPY ./templates/config_portal.rb.TEST /home/cbrain/cbrain_base/BrainPortal/config/initializers/config_portal.rb - -# Seed the DB -USER root - -RUN chown cbrain /home/cbrain/cbrain_base/BrainPortal/config/database.yml && \ - chown cbrain /home/cbrain/cbrain_base/BrainPortal/config/initializers/config_portal.rb - -RUN su -c "bash --login -c 'cd \$HOME/cbrain_base/BrainPortal && rake cbrain:plugins:install:plugins'" cbrain - -RUN mysqld_safe & sleep 2 && \ - su -c "bash --login -c 'cd \$HOME/cbrain_base/BrainPortal && rake db:schema:load'" cbrain - -RUN mysqld_safe & sleep 2 && \ - su -c "bash --login -c 'cd \$HOME/cbrain_base/BrainPortal && rake db:seed'" cbrain - -RUN mysqld_safe & sleep 2 && \ - su -c "bash --login -c 'cd \$HOME/cbrain_base/BrainPortal && rake db:seed:test:bourreau'" cbrain - -RUN mysqld_safe & sleep 2 && \ - su -c "bash --login -c 'cd \$HOME/cbrain_base/BrainPortal && rake db:sanity:check'" cbrain - - - -######################################################## -# Cleanup files to make the image as small as possible # -######################################################## - -USER cbrain - -RUN bash --login -c 'rvm cleanup all' - -USER root - -RUN yum clean all - -# Not sure if next line won't interfere with future bundle updates... -# but then it saves just a few dozen megabytes. -RUN rm -rf /home/cbrain/.rvm/gems/ruby*/bundler/gems/*/.git - - - -######################### -# Ports and entry point # -######################### - -# This command will copy the code freshly extracted by travis -# and perform the rest of the setup needed to run the tests -# (migrate the DB, run rake tasks, run rspec); the path -# /home/cbrain/cbrain_travis is a mounted volume from the -# VM side. -CMD [ "/home/cbrain/cbrain_travis/Travis/bootstrap.sh" ] - -########### -# Volumes # -########### -# -# Only one volume is needed, it is Travis CI's own -# copy of the cbrain project to be tested. -# -# Note that this is distinct from -# -# /home/cbrain/cbrain_base -# -# which is where we did the initial installation here -# in this image, as a way to speed up the tests. - -VOLUME /home/cbrain/cbrain_travis - diff --git a/Travis/README.md b/Travis/README.md deleted file mode 100644 index faa5451f0..000000000 --- a/Travis/README.md +++ /dev/null @@ -1,28 +0,0 @@ - -# Continuous integration with [Travis CI](https://travis-ci.org/) - -This directory contain all the support files and scripts for testing CBRAIN within a Docker container running on a VM provided by Travis CI. - -The process works in two steps. - -* a docker container is prepared, once, and published on DockerHub or kept locally. This step is performed by the script `Travis/build_container.sh`. -* when Travis detects a push on a CBRAIN repo that it tracks, it will invoke (through the `.travis.yml` file a the top of CBRAIN's repo) the script `Travis/travis_ci.sh`. - -This script `travis_ci.sh` will launch the container prepared in the first step. - -The container will run `bootstrap.sh`, which is the main docker entry point, and as root it performs these two operations: - -* first it simply starts the MySQL DB server -* second it invokes, as user 'cbrain', the script `cb_run_tests.sh` which performs the setup necessary to run the test suites. - -## Trying it out locally - -The entire process can be tried locally without setting up all the Travis CI configuration, as long as Docker is available. - -```bash - cd Travis - bash build_container.sh -b hello/bye # "hello/bye" can be any container name of your choice - cd .. - env CBRAIN_CI_IMAGE_NAME=hello/bye bash Travis/travis_ci.sh # the container name can be given in argument too -``` - diff --git a/Travis/bootstrap.sh b/Travis/bootstrap.sh deleted file mode 100755 index 5e7ed9859..000000000 --- a/Travis/bootstrap.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -# This script is run as root as the entry point of -# the CBRAIN docker container for running tests; -# it basically just starts the DB server then -# invokes cb_run_tests.sh as user 'cbrain'. - -set -e # exit as soon as any command fails - -if test "$UID" -ne 0 ; then - echo "This script is meant to be run as the entry point" - echo "to the docker container for running tests." - exit 2 -fi - -MAGENTA='\033[35m' -NC='\033[0m' - -printf "${MAGENTA}Container bootstrap script starting at %s ${NC}\n" "$(date '+%F %T')" - -test_user="cbrain" # normal user to run test suite -test_script="cb_run_tests.sh" # the script for running the suite - -echo "Starting DB server as root" -mysqld_safe & -started="" -for n in 1 2 3 4 5 ; do - echo "Waiting for DB server ($n/5)" - sleep 3 - started=$(ps ax -o cmd | grep -v mysqld_safe | grep mysql | grep -v grep | head -1 | cut -d" " -f1 ) - test -n "$started" && break -done -if test -z "$started" ; then - echo "Error: cannot start DB server?!?" - exit 2 -else - echo "DB server started." - echo "" -fi - -echo "Running test script '$test_script' as user '$test_user'" -su -c "bash --login -c /home/cbrain/cbrain_travis/Travis/$test_script" $test_user - diff --git a/Travis/build_container.sh b/Travis/build_container.sh deleted file mode 100755 index e087e907e..000000000 --- a/Travis/build_container.sh +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/bash -e - -# -# This bash script builds a docker image for testing CBRAIN. -# - -# Default values for building the docker container. -CBRAIN_REPO="https://github.com/aces/cbrain.git" -CBRAIN_BRANCH="dev" -IMAGE_NAME="mcin/cbrain_travis" - -function usage { - cat </dev/null - find . -print | sort | md5sum | cut -c1-32 - popd >/dev/null -} - -############### -# Main script # -############### - -# Install the code base to be tested -cd $HOME - -test -e "$cb_test" && ls -la && die "Oh oh, some directory '$cb_test' is in the way..." -cp -p -r "$cb_travis" "$cb_test" || die "Cannot copy the cbrain code base to '$cb_test'..." - -# Copy DB configuration file from the docker original install -cp -p "$cb_base"/BrainPortal/config/database.yml \ - "$cb_test"/BrainPortal/config/database.yml || die "Cannot copy DB configuration file" - -# Copy CBRAIN configuration file from the docker original install -cp -p "$cb_base"/BrainPortal/config/initializers/config_portal.rb \ - "$cb_test"/BrainPortal/config/initializers/config_portal.rb || die "Cannot copy CBRAIN configuration file" - -# Copy the symlinks for installed plugins -rsync -a --ignore-existing \ - "$cb_base"/BrainPortal/cbrain_plugins/installed-plugins/ \ - "$cb_test"/BrainPortal/cbrain_plugins/installed-plugins - -# Make sure RVM is loaded -source /home/cbrain/.bashrc -export RAILS_ENV=test - - - -# ------------------------------ -# Report Version Numbers -# ------------------------------ -echo "" -printf "${YELLOW}Versions of CBRAIN code base installed:${NC}\n" - -cd $cb_base/BrainPortal || die "Cannot cd to base BrainPortal directory" -printf "${BLUE}Container BASE CBRAIN:${NC} " -git log --date=iso -n 1 --pretty="%h by %an at %ad, %s" - -cd $cb_test/BrainPortal || die "Cannot cd to test BrainPortal directory" -printf "${BLUE}Travis CI TEST CBRAIN:${NC} " -git log --date=iso -n 1 --pretty="%h by %an at %ad, %s" -printf "${BLUE}Travis CI REV CBRAIN:${NC} "; script/show_cbrain_rev -z - -echo "" - - - -# ------------------------------ -# Portal-Side Re-Initializations -# ------------------------------ - -# Go to the new code to test -cd $cb_test/BrainPortal || die "Cannot cd to BrainPortal directory" - -# Prep all that needs to be prepared. With a bit of luck, bundle install -# will be quite quick given that when building the docker image we already -# ran it once in ~/cbrain_base. - -# Only bundle the gems if the Gemfile has changed -if ! cmp -s "$cb_base/BrainPortal/Gemfile" \ - "$cb_test/BrainPortal/Gemfile" ; then - printf "${YELLOW}Running Bundler on BrainPortal side.${NC}\n" - bundle install || die "Cannot bundle gems for the BrainPortal" -else - printf "${BLUE}No need to run the Bundler on BrainPortal side, yippee!${NC}\n" - cp -p "$cb_base/BrainPortal/Gemfile.lock" \ - "$cb_test/BrainPortal/Gemfile.lock" -fi - -# Only install the plugins if the list of plugins files has changed. -if test $(dir_list_cksum "$cb_base/BrainPortal/cbrain_plugins") != \ - $(dir_list_cksum "$cb_test/BrainPortal/cbrain_plugins") ; then - printf "${YELLOW}Installing plugins symbolic links.${NC}\n" - rake "cbrain:plugins:install:plugins" || die "Cannot install cbrain:plugins" # works for Bourreau too -else - printf "${BLUE}No need to install the plugins symbolic links, yippee!${NC}\n" -fi - - - -# -------------------------------- -# Bourreau-Side Re-Initializations -# -------------------------------- - -# Go to the new code to test -cd $cb_test/Bourreau || die "Cannot cd to Bourreau directory" - -# Only bundle the gems if the Gemfile has changed -if ! cmp -s "$cb_base/Bourreau/Gemfile" \ - "$cb_test/Bourreau/Gemfile" ; then - printf "${YELLOW}Running Bundler on Bourreau side.${NC}\n" - bundle install || die "Cannot bundle gems for the Bourreau" -else - printf "${BLUE}No need to run the Bundler on Bourreau side, yippee!${NC}\n" - cp -p "$cb_base/Bourreau/Gemfile.lock" \ - "$cb_test/Bourreau/Gemfile.lock" -fi - - - -# ------------------------------ -# Bring the DB up to date -# ------------------------------ - -# Prep steps that necessitates the DB to be ready. -cd $cb_test/BrainPortal || die "Cannot cd to BrainPortal directory" - -# Only migrate if the list of migration files have changed. -if test $(dir_list_cksum "$cb_base/BrainPortal/db/migrate") != \ - $(dir_list_cksum "$cb_test/BrainPortal/db/migrate") ; then - printf "${YELLOW}Running the database migrations.${NC}\n" - rake "db:migrate" || die "Cannot migrate the DB" -else - printf "${BLUE}No need to migrate the DB, yippee!${NC}\n" -fi - -# This cannot be avoided. -printf "${YELLOW}Running the database sanity checks.${NC}\n" -rake "db:sanity:check" || die "Cannot sanity check DB" - - - -# ------------------------------- -# Show TEST environment variables -# ------------------------------- -# These environment variable allow the -# user to skip over some test stages, -# or make the API tests more verbose. -printf "${BLUE}Environment variables for this test session:${NC}\n" -echo "" -echo "General control:" -printf "${YELLOW}CBRAIN_SKIP_TEST_STAGES${NC} = '${CBRAIN_SKIP_TEST_STAGES:=unset}'\n" -printf "Possible values: \"RspecPortal,RspecBourreau,CurlAPI,GemAPI\"\n" -echo "" -echo "API test control:" -printf "${YELLOW}CBRAIN_CURL_TEST_VERBOSE_LEVEL${NC} = '${CBRAIN_CURL_TEST_VERBOSE_LEVEL:=1}'\n" -printf "${YELLOW}CBRAIN_CURL_TEST_FILTER${NC} = '${CBRAIN_CURL_TEST_FILTER}'\n" -printf "${YELLOW}CBRAIN_GEM_TEST_VERBOSE_LEVEL${NC} = '${CBRAIN_GEM_TEST_VERBOSE_LEVEL:=1}'\n" -printf "${YELLOW}CBRAIN_GEM_TEST_FILTER${NC} = '${CBRAIN_GEM_TEST_FILTER}'\n" -echo "" - - - -# ------------------------------ -# Finally, run the tests! -# ------------------------------ -# We save the failures of the main test commands in strings. -# That way we run them all and report everything at the end. -fail_portal="" -fail_bourreau="" -fail_api_curl="" -fail_api_ruby="" - - - -# ------------------------------ -# Portal-Side Testing -# ------------------------------ -cd $cb_test/BrainPortal || die "Cannot cd to BrainPortal directory" - -# Eventually, it would be nice if from a ENV variable set in Travis, -# we could run only a subset of the tests. -printf "${BLUE}Running rspec on BrainPortal side.${NC}\n" -if echo "X$CBRAIN_SKIP_TEST_STAGES" | grep -q 'RspecPortal' >/dev/null ; then - printf "${YELLOW} -> Skipped by request from env CBRAIN_SKIP_TEST_STAGES${NC}\n" -else - rspec spec || fail_portal="rspec on BrainPortal failed with return code $?" -fi -#CBRAIN_FAILTEST=1 rspec spec/modules/travis_ci_spec.rb || fail_portal="rspec on BrainPortal failed with return code $?" - - - -# ------------------------------ -# Bourreau-Side Testing -# ------------------------------ -cd $cb_test/Bourreau || die "Cannot cd to Bourreau directory" - -# Eventually, it would be nice if from a ENV variable set in Travis, -# we could run only a subset of the tests. -# -> NOTE FIXME TODO : hardcoded 'spec/boutiques' for <- -# -> the moment because no other test files work on Bourreau. <- -printf "${BLUE}Running rspec on Bourreau side.${NC}\n" -if echo "X$CBRAIN_SKIP_TEST_STAGES" | grep -q 'RspecBourreau' >/dev/null ; then - printf "${YELLOW} -> Skipped by request from env CBRAIN_SKIP_TEST_STAGES${NC}\n" -else - rspec spec/boutiques || fail_bourreau="rspec on Bourreau failed with return code $?" -fi - - - -# ------------------------------ -# Testing of API (curl) -# ------------------------------ -printf "${BLUE}Running API tests with curl.${NC}\n" -if echo "X$CBRAIN_SKIP_TEST_STAGES" | grep -q 'CurlAPI' >/dev/null ; then - printf "${YELLOW} -> Skipped by request from env CBRAIN_SKIP_TEST_STAGES${NC}\n" -else - cd $cb_test/BrainPortal || die "Cannot cd to BrainPortal directory" - rake "db:seed:test:api" >/dev/null || die "Cannot re-seed the DB for API testing" - rails server puma -p 3000 -d || die "Cannot start local puma server?" - cd test_api || die "Cannot cd to test_api directory?" - sleep 5 # must wait a bit for puma to be ready - perl curl_req_tester.pl \ - -h localhost \ - -p 3000 \ - -s http \ - -v"${CBRAIN_CURL_TEST_VERBOSE_LEVEL}" \ - -R \ - ${CBRAIN_CURL_TEST_FILTER} \ - || fail_api_curl="API testing with CURL failed" - kill $(cat $cb_test/BrainPortal/tmp/pids/server.pid) -fi - - - -# ------------------------------ -# Testing of API (Ruby Gem) -# ------------------------------ -printf "${BLUE}Running API tests with Ruby CbrainClient gem.${NC}\n" -if echo "X$CBRAIN_SKIP_TEST_STAGES" | grep -q 'GemAPI' >/dev/null ; then - printf "${YELLOW} -> Skipped by request from env CBRAIN_SKIP_TEST_STAGES${NC}\n" -else - cd $cb_test/BrainPortal || die "Cannot cd to BrainPortal directory" - rake "db:seed:test:api" >/dev/null || die "Cannot re-seed the DB for API testing" - rails server puma -p 3000 -d || die "Cannot start local puma server?" - cd test_api || die "Cannot cd to test_api directory?" - sleep 5 # must wait a bit for puma to be ready - rake "cbrain:test:api:client" \ - -v "${CBRAIN_GEM_TEST_VERBOSE_LEVEL}" \ - ${CBRAIN_GEM_TEST_FILTER} \ - || fail_api_ruby="API testing with Ruby CbrainClient failed" - kill $(cat $cb_test/BrainPortal/tmp/pids/server.pid) -fi - - - -# ------------------------------ -# Return status of both rspec -# ------------------------------ -test -z "$fail_portal$fail_bourreau$fail_api_curl$fail_api_ruby" && exit 0 # Pangloss -echo "" -printf "${YELLOW}**** Summary of command failures ****${NC}\n" -test -n "$fail_portal" && printf "${RED}$fail_portal${NC}\n" -test -n "$fail_bourreau" && printf "${RED}$fail_bourreau${NC}\n" -test -n "$fail_api_curl" && printf "${RED}$fail_api_curl${NC}\n" -test -n "$fail_api_ruby" && printf "${RED}$fail_api_ruby${NC}\n" -printf "${YELLOW}**** --------------------------- ****${NC}\n" -echo "" -exit 2 - diff --git a/Travis/templates/config_portal.rb.TEST b/Travis/templates/config_portal.rb.TEST deleted file mode 100644 index d27861229..000000000 --- a/Travis/templates/config_portal.rb.TEST +++ /dev/null @@ -1,13 +0,0 @@ - -# -# CBRAIN Project -# -# Configuration file for BrainPortal. -# This is file is used within a docker -# container for running the test suite. -# - -class CBRAIN - CBRAIN_RAILS_APP_NAME = "CbrainPortalTravisTesting" -end - diff --git a/Travis/templates/database.yml.TEST b/Travis/templates/database.yml.TEST deleted file mode 100644 index 4e19a77f0..000000000 --- a/Travis/templates/database.yml.TEST +++ /dev/null @@ -1,16 +0,0 @@ - -# -# CBRAIN Project -# -# DB configuration file for BrainPortal. -# This is file is used within a docker -# container for running the test suite. -# - -test: - adapter: mysql2 - host: localhost - username: cb_user - database: cb_db_test - password: cbpw12345 - encoding: utf8 diff --git a/Travis/travis_ci.sh b/Travis/travis_ci.sh deleted file mode 100755 index b524ac98f..000000000 --- a/Travis/travis_ci.sh +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash - -############################################################################### -# # -# This script is used by Travis CI (https://travis-ci.org/) to run the # -# CBRAIN test suite. # -# # -# The script expects a testing docker container to already have been built # -# and made available from the local system. The name of that docker image is # -# expected in the environment variable $CBRAIN_CI_IMAGE_NAME, or given as a # -# first argument to the script. # -# # -# This script does the following: # -# - Invoke the container which runs the test suite # -# - Wait for the container to finish, dumping its logs on the way # -# - Returns the return code (and possibly diagnostics) of the suite. # -# # -############################################################################### - -# Terminal colors, using ANSI sequences. -RED='\033[31m' -GREEN='\033[32m' -MAGENTA='\033[35m' -NC='\033[0m' - -# Do we even have a Travis+Docker environment set up ? -if test ! -d Travis ; then - printf "${RED}No 'Travis' subdirectory found.${NC}\n" - echo "Please invoke this program from the root of the CBRAIN project." - exit 2 # config error -fi -cbrain_travis="`pwd -P`" # Root of where the code to test is located. -cd Travis || exit 2 - -# Do we have a docker image name to run? -CBRAIN_CI_IMAGE_NAME=${CBRAIN_CI_IMAGE_NAME:-$1} # can be given as argument -if test "X$CBRAIN_CI_IMAGE_NAME" = "X" ; then - printf "${RED}No CBRAIN_CI_IMAGE_NAME environment variable supplied.${NC}\n" - exit 2 # config error -fi - -# Count time -SECONDS=0 # bash is great - -# Run the docker containers -printf "${MAGENTA}Launching CBRAIN test container at %s ${NC}\n" "$(date '+%F %T')" - -# Note: to skip stages, set CBRAIN_SKIP_TEST_STAGES to -# one or several of the keywords 'RspecPortal', 'RspecBourreau', -# 'CurlAPI' or 'GemAPI', joined by commas or periods. -docker_name="cb_travis" # pretty name of the process -docker run -d \ - --env CBRAIN_SKIP_TEST_STAGES \ - --env CBRAIN_CURL_TEST_VERBOSE_LEVEL \ - --env CBRAIN_CURL_TEST_FILTER \ - --env CBRAIN_GEM_TEST_VERBOSE_LEVEL \ - --env CBRAIN_GEM_TEST_FILTER \ - -v "$cbrain_travis":/home/cbrain/cbrain_travis \ - --name "$docker_name" \ - ${CBRAIN_CI_IMAGE_NAME} | perl -ne 'print unless /^[0-9a-f]{64}\n$/' -if [ $? -ne 0 ] ; then - printf "${RED}Docker Start Failed. So sorry.${NC}\n" - exit 10 # partial abomination -fi - -# Print logs (always, by request). -# Also Travis CI will abort the test if nothing is printed for too long. -echo "" -printf "${MAGENTA}==== Docker logs start here ====${NC}\n" -docker logs ${docker_name} --follow -printf "${MAGENTA}==== Docker logs end here ====${NC}\n" -echo "" -test_exit_code=$(docker wait ${docker_name}) -docker rm ${docker_name} >/dev/null || true -printf "${MAGENTA}Docker container finished after $SECONDS seconds.${NC}\n" -echo "" - -# Final Results -if [ "X$test_exit_code" != "X0" ] ; then - printf "${RED}===================================================${NC}\n" - printf "${RED}Tests Failed${NC} - 'docker wait' exit code: $test_exit_code\n" - printf "${RED}===================================================${NC}\n" - exit 20 # total abomination -fi - -# Yippee. -printf "${GREEN}===================================================${NC}\n" -printf "${GREEN}All tests Passed${NC}\n" -printf "${GREEN}===================================================${NC}\n" - -# Important, eh, oh, not kidding here. -exit 0 - diff --git a/cbrain_file_revisions.csv b/cbrain_file_revisions.csv index a53a15cbd..a9d46ac4f 100644 --- a/cbrain_file_revisions.csv +++ b/cbrain_file_revisions.csv @@ -1,10 +1,9 @@ -6.2.0 -#- 2022-01-28 14:28:08 -0500 -#- CBRAIN Team -#- __CBRAIN_TAG__ -fc85bf12a645af586c914e24ba1badd087377ac1 -#- 2022-01-28 11:50:17 -0500 -#- Pierre Rioux -#- __CBRAIN_HEAD__ +6.3.0 -#- 2023-01-26 14:22:35 -0500 -#- CBRAIN Team -#- __CBRAIN_TAG__ +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- __CBRAIN_HEAD__ 5b22b5d61971a45646ded4ff502025ce5a83eaa8 -#- 2019-07-04 14:12:52 -0400 -#- Shawn T. Brown -#- .github/CODE_OF_CONDUCT.md -9dec9e0176b34764197d134ba1dc6f61499012f9 -#- 2021-02-12 13:05:13 -0500 -#- Pierre Rioux -#- .github/workflows/cbrain_ci.yaml +69c1caadd61820cfedd9c44cf247ab6a4f7812ee -#- 2023-01-17 17:06:54 -0500 -#- Pierre Rioux -#- .github/workflows/cbrain_ci.yaml 1b38ded2d16ee2b163149d0ae9bea35afdef47bd -#- 2021-01-21 10:06:59 -0500 -#- Pierre Rioux -#- .github/workflows/scripts/make_cbrain_app_name_rb.sh 1b38ded2d16ee2b163149d0ae9bea35afdef47bd -#- 2021-01-21 10:06:59 -0500 -#- Pierre Rioux -#- .github/workflows/scripts/make_database_yml.sh -1d321c6d4efcf45aea7898104ebd6b9378f69717 -#- 2021-01-21 10:47:46 -0500 -#- Pierre Rioux -#- .travis.yml.old 81add9599705a5219617814a4fec5cb023004cf9 -#- 2017-12-07 19:19:03 -0500 -#- Pierre Rioux -#- Bourreau/.gitignore 4ed47fcc8c35e42a8831de5d67bdf7a18a1e7d9d -#- 2016-12-02 17:53:51 -0500 -#- Pierre Rioux -#- Bourreau/.rspec 58fc182e0053dd048659744080645af2eedae126 -#- 2020-03-10 16:42:40 -0400 -#- Pierre Rioux -#- Bourreau/Gemfile @@ -30,8 +29,10 @@ e1908309c4ddae35b28c0170b696744b87833e6d -#- 2020-12-07 16:09:34 -0500 -#- Pierr d8351b666a97a4eefbb2768c8cc785d8cc968e46 -#- 2009-08-03 22:56:02 +0000 -#- prioux -#- Bourreau/app/models/active_record_log.rb 9159ab6b65ee5f1cdc673bb8586e2c3d5ae3fb8a -#- 2012-04-16 18:30:37 -0400 -#- Tarek Sherif -#- Bourreau/app/models/admin_user.rb 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- Bourreau/app/models/application_record.rb +dc14b6bc410b3a77a20e409815c9cd93e55cd2c7 -#- 2022-06-21 12:47:44 -0400 -#- Pierre Rioux -#- Bourreau/app/models/automated_user.rb 0765162a23b328e9f2cdea637f8bdaba242cb370 -#- 2009-07-08 15:24:14 +0000 -#- prioux -#- Bourreau/app/models/bourreau.rb -b43298eb375d0754efac4a77e425610238fe8ea3 -#- 2019-09-10 14:53:58 -0400 -#- Pierre Rioux -#- Bourreau/app/models/bourreau_worker.rb +8d67b48924a8f2dde0ce4ac5160b091007023e37 -#- 2022-06-28 13:13:44 -0400 -#- Pierre Rioux -#- Bourreau/app/models/bourreau_worker.rb +96e7e41e2d7f24c610ce530b1febe69981241865 -#- 2022-06-02 15:24:23 -0400 -#- Pierre Rioux -#- Bourreau/app/models/boutiques_allowed_exit_codes.rb e58779eae25346bd5f8e46998f735a23630389f2 -#- 2021-08-22 15:47:02 -0400 -#- Pierre Rioux -#- Bourreau/app/models/boutiques_cluster_task.rb 3c42d39e29a5851da9f8c6d5ca47e79cf866d4a9 -#- 2021-10-18 10:19:27 -0400 -#- Pierre Rioux -#- Bourreau/app/models/boutiques_portal_task.rb 85f29bd99c3a75cda8399b815c70caf62d476421 -#- 2009-10-05 20:48:07 +0000 -#- prioux -#- Bourreau/app/models/brain_portal.rb @@ -43,6 +44,8 @@ ddd240615fd0852904e3d551948d010dca7d40cc -#- 2011-04-19 19:32:10 +0000 -#- priou 4551c18d1922bbd8cc3200c69cba1033c16bb24a -#- 2012-10-31 16:54:43 -0400 -#- Tarek Sherif -#- Bourreau/app/models/core_admin.rb a7258273e3f1161fa2edba04497ae8c1c4ac3fbd -#- 2019-09-14 15:30:35 -0400 -#- Pierre Rioux -#- Bourreau/app/models/cputime_resource_usage_for_cbrain_task.rb 5851dc66949bcd4456c928af52e97cb5cc1155dd -#- 2009-06-11 00:17:37 +0000 -#- prioux -#- Bourreau/app/models/data_provider.rb +2452e06d395848773e24b6b70ade52d7e8fb1097 -#- 2022-03-24 16:19:50 -0400 -#- Pierre Rioux -#- Bourreau/app/models/data_usage.rb +e3bb119fe27c3b38a9b8fef427679abb77e1b69f -#- 2022-03-16 15:17:04 -0400 -#- Pierre Rioux -#- Bourreau/app/models/disk_quota.rb a5dd982df172b02416f06efc996c4190ed0ae1a7 -#- 2010-03-24 23:54:26 +0000 -#- prioux -#- Bourreau/app/models/en_cbrain_local_data_provider.rb a5dd982df172b02416f06efc996c4190ed0ae1a7 -#- 2010-03-24 23:54:26 +0000 -#- prioux -#- Bourreau/app/models/en_cbrain_smart_data_provider.rb a5dd982df172b02416f06efc996c4190ed0ae1a7 -#- 2010-03-24 23:54:26 +0000 -#- prioux -#- Bourreau/app/models/en_cbrain_ssh_data_provider.rb @@ -68,6 +71,7 @@ a7258273e3f1161fa2edba04497ae8c1c4ac3fbd -#- 2019-09-14 15:30:35 -0400 -#- Pierr 8516b84c302d7a76248c9960852124cba97ca747 -#- 2009-12-06 02:33:08 +0000 -#- prioux -#- Bourreau/app/models/restricted_hash.rb 0d2a532305cd7de3a77a5d988089f8dc36e0033e -#- 2012-01-23 12:23:22 -0500 -#- Nicolas Kassis -#- Bourreau/app/models/s3_data_provider.rb 06ad56f51e47fe6a1124ebd6b8ee1961a2f51459 -#- 2019-01-07 13:59:53 -0500 -#- Pierre Rioux -#- Bourreau/app/models/s3_flat_data_provider.rb +aacfa25ed1aaa071be10bb03016672afe174618e -#- 2022-08-23 14:47:22 -0400 -#- Pierre Rioux -#- Bourreau/app/models/s3_multi_level_data_provider.rb 906e424ae61efd8c2f915b0423cae6d22117b1ec -#- 2012-05-08 15:44:59 -0400 -#- Pierre Rioux -#- Bourreau/app/models/sanity_check.rb ed20f731fb3ffdaf84da14226608f17918943da5 -#- 2017-12-19 21:10:56 -0500 -#- Pierre Rioux -#- Bourreau/app/models/scratch_data_provider.rb 3c42d39e29a5851da9f8c6d5ca47e79cf866d4a9 -#- 2021-10-18 10:19:27 -0400 -#- Pierre Rioux -#- Bourreau/app/models/sing_bindmount_data_provider.rb @@ -111,7 +115,7 @@ b8f1777f7ac969954ba851fc142c99136af89716 -#- 2010-02-16 21:01:18 +0000 -#- anton 31d70e8c1a243a3b7513e5d64e3c7a89dfcc6fb5 -#- 2011-04-26 22:21:12 +0000 -#- prioux -#- Bourreau/cbrain_plugins 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- Bourreau/config.ru 73d1908294f5d0a218c07e2ce6428e273fb600d1 -#- 2015-04-01 15:24:59 -0400 -#- Natacha Beck -#- Bourreau/config/.gitignore -44f2a622fac0a0cf463817858e3e41bd09a7d598 -#- 2018-03-06 16:37:20 -0500 -#- Pierre Rioux -#- Bourreau/config/application.rb +db8b1958d2c89ab4107cc6912b4969db0b542377 -#- 2022-08-25 15:52:02 -0400 -#- Pierre Rioux -#- Bourreau/config/application.rb 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- Bourreau/config/boot.rb 925b9ec5790a125abf7c53eaf10f7de4fd42c02f -#- 2017-12-07 19:18:42 -0500 -#- Natacha Beck -#- Bourreau/config/cable.yml 5815364e74d0880b767568255e919b55b3f888dc -#- 2015-12-11 17:12:26 -0500 -#- Pierre Rioux -#- Bourreau/config/console_rc @@ -134,26 +138,32 @@ f2d9036347c5409ac5b5d511418240310574eeb1 -#- 2009-10-16 21:02:26 +0000 -#- priou 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- Bourreau/config/initializers/mime_types.rb 81add9599705a5219617814a4fec5cb023004cf9 -#- 2017-12-07 19:19:03 -0500 -#- Pierre Rioux -#- Bourreau/config/initializers/new_framework_defaults.rb b0c034565dc45e84f9a61175d82c49c103a33e0a -#- 2017-12-07 19:19:05 -0500 -#- Pierre Rioux -#- Bourreau/config/initializers/session_store.rb -d280ff2acacf9dda97f7ea7d8d625b570584f83e -#- 2019-03-18 20:54:43 -0400 -#- Pierre Rioux -#- Bourreau/config/initializers/validation_bourreau.rb +05478d8123f2cacbad66835e9acb9f4995e057ca -#- 2022-07-28 18:21:02 -0400 -#- Pierre Rioux -#- Bourreau/config/initializers/validation_bourreau.rb 925b9ec5790a125abf7c53eaf10f7de4fd42c02f -#- 2017-12-07 19:18:42 -0500 -#- Natacha Beck -#- Bourreau/config/initializers/wrap_parameters.rb 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- Bourreau/config/locales/en.yml -925b9ec5790a125abf7c53eaf10f7de4fd42c02f -#- 2017-12-07 19:18:42 -0500 -#- Natacha Beck -#- Bourreau/config/puma.rb +7408a463060fcf6761c8ab60eeb657a40a1408d5 -#- 2022-05-11 12:55:58 -0400 -#- Pierre Rioux -#- Bourreau/config/puma.rb 6f15a264d630eaedd1a35e802bda1135dea0d60d -#- 2020-02-21 14:37:55 -0500 -#- Pierre Rioux -#- Bourreau/config/routes.rb -925b9ec5790a125abf7c53eaf10f7de4fd42c02f -#- 2017-12-07 19:18:42 -0500 -#- Natacha Beck -#- Bourreau/config/secrets.yml +db8b1958d2c89ab4107cc6912b4969db0b542377 -#- 2022-08-25 15:52:02 -0400 -#- Pierre Rioux -#- Bourreau/config/secrets.yml 925b9ec5790a125abf7c53eaf10f7de4fd42c02f -#- 2017-12-07 19:18:42 -0500 -#- Natacha Beck -#- Bourreau/config/spring.rb a1848dfdd07ce34f15d55e3bed31cac13a7005d1 -#- 2011-09-30 14:35:00 -0400 -#- Pierre Rioux -#- Bourreau/doc/.gitignore 9f12607a0561aa336b30a17d551215d68909f08b -#- 2010-07-06 19:11:57 +0000 -#- prioux -#- Bourreau/lib/act_rec_log.rb 9f12607a0561aa336b30a17d551215d68909f08b -#- 2010-07-06 19:11:57 +0000 -#- prioux -#- Bourreau/lib/act_rec_meta_data.rb 925b9ec5790a125abf7c53eaf10f7de4fd42c02f -#- 2017-12-07 19:18:42 -0500 -#- Natacha Beck -#- Bourreau/lib/assets/.keep -01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- Bourreau/lib/bourreau_system_checks.rb +05478d8123f2cacbad66835e9acb9f4995e057ca -#- 2022-07-28 18:21:02 -0400 -#- Pierre Rioux -#- Bourreau/lib/bourreau_system_checks.rb e5df01b9a679fcbc186eaf8bb180499c9ebd1e4d -#- 2021-09-05 16:41:01 -0400 -#- Pierre Rioux -#- Bourreau/lib/boutiques_boot_integrator.rb 520284a6220fe2d84a485f2ab3cf17215ab78484 -#- 2021-11-30 10:36:41 -0500 -#- Pierre Rioux -#- Bourreau/lib/boutiques_file_name_matcher.rb 520284a6220fe2d84a485f2ab3cf17215ab78484 -#- 2021-11-30 10:36:41 -0500 -#- Pierre Rioux -#- Bourreau/lib/boutiques_file_type_verifier.rb +aacfa25ed1aaa071be10bb03016672afe174618e -#- 2022-08-23 14:47:22 -0400 -#- Pierre Rioux -#- Bourreau/lib/boutiques_forced_output_browse_path.rb +01b3f0e199a76c40be04f4880bce7fa717f7efd5 -#- 2022-03-10 15:00:43 -0500 -#- MontrealSergiy -#- Bourreau/lib/boutiques_input_cache_cleaner.rb +01a23d453aa7a0a4c2515f473794e1a25f02c8b2 -#- 2022-09-28 16:00:29 -0400 -#- Natacha Beck -#- Bourreau/lib/boutiques_input_subdir_maker.rb +b9412989844a95a7f705629ce667f7ce8723c48d -#- 2022-03-16 12:24:43 -0400 -#- Pierre Rioux -#- Bourreau/lib/boutiques_output_cache_cleaner.rb f00997eb767f27e6a17f23a6267d2ddbb0d22b0b -#- 2022-01-25 11:56:35 -0500 -#- Pierre Rioux -#- Bourreau/lib/boutiques_output_file_type_setter.rb +ba92b49d171adfc4984b3b97e16237e49d96de13 -#- 2022-08-09 15:40:20 -0400 -#- Pierre Rioux -#- Bourreau/lib/boutiques_output_filename_renamer.rb a16ee1122476fe433314f20182e00ed239dab202 -#- 2022-01-27 15:20:20 -0500 -#- Pierre Rioux -#- Bourreau/lib/boutiques_post_processing_cleaner.rb e58779eae25346bd5f8e46998f735a23630389f2 -#- 2021-08-22 15:47:02 -0400 -#- Pierre Rioux -#- Bourreau/lib/boutiques_support.rb ad2feb109b3ff7bcc50145198947d151d177c083 -#- 2010-03-29 19:06:02 +0000 -#- prioux -#- Bourreau/lib/cbrain_checker.rb df10525ea5250f8328a3f867968b6238f1f78dbb -#- 2011-09-07 11:25:47 -0400 -#- Pierre Rioux -#- Bourreau/lib/cbrain_delete_restriction_error.rb +e3bb119fe27c3b38a9b8fef427679abb77e1b69f -#- 2022-03-16 15:17:04 -0400 -#- Pierre Rioux -#- Bourreau/lib/cbrain_disk_quota_exceeded.rb df10525ea5250f8328a3f867968b6238f1f78dbb -#- 2011-09-07 11:25:47 -0400 -#- Pierre Rioux -#- Bourreau/lib/cbrain_error.rb f2d9036347c5409ac5b5d511418240310574eeb1 -#- 2009-10-16 21:02:26 +0000 -#- prioux -#- Bourreau/lib/cbrain_exception.rb f3e1362e37f4f4395c88a7bd1d95ef830bd5e32c -#- 2012-04-24 19:57:20 -0400 -#- Pierre Rioux -#- Bourreau/lib/cbrain_extensions @@ -197,11 +207,12 @@ c637666465dfd4deb3339634fbe50198dbca574d -#- 2019-05-06 12:50:19 -0400 -#- Pierr c7d4662266d4eb7485c3611989d3a6283d59e014 -#- 2015-03-16 17:17:05 -0400 -#- Pierre Rioux -#- Bourreau/lib/tasks/cbrain_plugins.rake 65d87ed48def4ba99bcb5ad825770395b89dde5d -#- 2016-09-22 12:50:31 -0400 -#- Pierre Rioux -#- Bourreau/lib/username_format_validator.rb 077511bc3a7a9c3674df2d6eee186c2741b3ee9e -#- 2015-01-05 16:56:36 -0500 -#- Pierre Rioux -#- Bourreau/log/.gitignore -498a3ad929c4805ce141004b7967ee0dafbd6e0c -#- 2019-03-13 15:07:23 -0400 -#- Pierre Rioux -#- Bourreau/script/cbrain_bashrc -54a88a4a69960e8a3afc326a431b63f46460079b -#- 2020-12-17 16:49:15 -0500 -#- Pierre Rioux -#- Bourreau/script/cbrain_remote_ctl -598063ef6c013421ed34a9202fb6ab9e19e1fe33 -#- 2020-12-17 15:05:26 -0500 -#- Pierre Rioux -#- Bourreau/script/cbrain_remote_ctl_rb +95b1ac6ea3f36a79aeeb97b324c6dc99943cb3de -#- 2022-04-15 16:08:28 -0400 -#- Pierre Rioux -#- Bourreau/script/cbrain_bashrc +ece4059c3f7a6a67920d99a58a366ae782e79096 -#- 2022-05-11 12:25:33 -0400 -#- Pierre Rioux -#- Bourreau/script/cbrain_remote_ctl +cca6623d1141f6f21c9bfcb2c181474927f8adbc -#- 2022-04-26 12:20:30 -0400 -#- Pierre Rioux -#- Bourreau/script/puma_wait_wrapper de3216d5876d326eab6a0b190e4ec24f4c054626 -#- 2011-07-18 17:16:57 -0400 -#- Pierre Rioux -#- Bourreau/script/rails 9c2f8ed85c34be82bd39ace7737020baa301f385 -#- 2011-09-18 21:09:54 -0400 -#- Pierre Rioux -#- Bourreau/script/show_cbrain_rev +ece4059c3f7a6a67920d99a58a366ae782e79096 -#- 2022-05-11 12:25:33 -0400 -#- Pierre Rioux -#- Bourreau/script/watcher 731baec7c2995ac50041d9e28fe7e25344f5fba1 -#- 2016-06-21 12:52:11 -0400 -#- Tristan A.A -#- Bourreau/spec/boutiques/boutiquesTestApp.rb c1775e1f31c5a67c8f6eca63ef01299ef1902230 -#- 2020-09-23 12:56:58 -0400 -#- Pierre Rioux -#- Bourreau/spec/boutiques/boutiques_tester_spec.rb 731baec7c2995ac50041d9e28fe7e25344f5fba1 -#- 2016-06-21 12:52:11 -0400 -#- Tristan A.A -#- Bourreau/spec/boutiques/descriptor_test.json @@ -247,52 +258,52 @@ c5f94d0afdef502d91dc3d4f7e4dc789d3344ed0 -#- 2020-05-08 12:36:03 -0400 -#- candi 495db4c873eeb3294c04187047ce1c3a1ffc6a0e -#- 2020-09-23 11:54:45 -0400 -#- MontrealSergiy -#- BrainPortal/app/assets/stylesheets/SCSS_intro_tutorial.md 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/application.css 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/boutiques.css -5ebe8b1b59dd889d5bfc24857ba55754394527b9 -#- 2021-11-25 15:31:16 -0500 -#- Natacha Beck -#- BrainPortal/app/assets/stylesheets/cbrain.css.erb +af638f35f79135a3bc47e2451284000c92f43ee6 -#- 2023-01-24 13:11:36 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/cbrain.css.erb 25dcb04652c4d89bf42e309573ef87c430d28365 -#- 2019-12-19 18:06:25 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/chosen_1.8.7.scss.erb 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/dynamic-table.css.erb -2c9a78c7bae975c15f0fb81472992af41b952399 -#- 2021-11-23 13:13:18 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/neurohub.scss.erb +5cc2e8488608abd4a90eae0a18cd865f8159147a -#- 2022-04-18 09:34:44 -0400 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/neurohub.scss.erb a3cbb977f0a16799006c5a5621aa03a21a79d684 -#- 2022-01-13 13:16:05 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/noc.css 6bfb9ee4872597d9d6025f19c33c4bd0dbad1943 -#- 2020-04-02 11:05:34 -0400 -#- candicecz -#- BrainPortal/app/assets/stylesheets/normalize.css 7831421f48428776b8930222b39a19e2ec0859dd -#- 2020-02-19 18:11:44 -0500 -#- Pierre Rioux -#- BrainPortal/app/assets/stylesheets/userfiles.css.erb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/channels/application_cable/channel.rb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/channels/application_cable/connection.rb a10eb11c8c6a02be7ff7812c9015db9f71f6183d -#- 2020-05-25 14:17:10 -0400 -#- candicecz -#- BrainPortal/app/controllers/access_profiles_controller.rb -a46bed2d34bdabdae4f5d245ceeb42414d3feab3 -#- 2021-12-16 11:57:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/application_controller.rb -14f4e56338eece4f06fa9499f90ddf305df38d04 -#- 2021-01-29 12:37:07 -0500 -#- MontrealSergiy -#- BrainPortal/app/controllers/bourreaux_controller.rb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/application_controller.rb +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/bourreaux_controller.rb 1caba8617c594cea068b3cfd4e9d3e83b19b1566 -#- 2021-06-23 19:48:45 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/carmin_controller.rb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/concerns/.keep -6f15a264d630eaedd1a35e802bda1135dea0d60d -#- 2020-02-21 14:37:55 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/controls_controller.rb +82579a68d2919f9748b110b9bb9adbaa56709d13 -#- 2022-04-23 11:01:17 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/controls_controller.rb c1275dbe2f97b577a66c42251ecd0fc038e5f9c6 -#- 2019-10-22 14:23:14 -0400 -#- Natacha Beck -#- BrainPortal/app/controllers/custom_filters_controller.rb -167804e09092aeee3048592447d95c6ad31b09a7 -#- 2021-12-02 12:39:34 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/data_providers_controller.rb +c8d2b2b55957a43fbb1cd407cbcd4c1da02118b3 -#- 2022-09-22 12:29:35 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/data_providers_controller.rb +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/disk_quotas_controller.rb a501bd2327c66d20f5f72f761a099d32157f2436 -#- 2019-05-07 14:26:03 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/exception_logs_controller.rb -a40f9246549c5fc488ce81a46cc312e7daa4b085 -#- 2020-10-26 13:03:08 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/groups_controller.rb +ca0d9a40fa5d6ed3dbfb8bea7b6d0e203b022f5a -#- 2022-05-03 17:00:47 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/groups_controller.rb 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- BrainPortal/app/controllers/help_documents_controller.rb 44b3c2352579546659c9e395423c42e45230ce10 -#- 2020-10-26 13:12:26 -0400 -#- MontrealSergiy -#- BrainPortal/app/controllers/invitations_controller.rb -fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/controllers/messages_controller.rb -fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/controllers/neurohub_application_controller.rb -df83dc571792dcee95506b370f2a25f29ba4a627 -#- 2020-07-23 23:30:08 -0400 -#- Natacha Beck -#- BrainPortal/app/controllers/neurohub_portal_controller.rb +5cc2e8488608abd4a90eae0a18cd865f8159147a -#- 2022-04-18 09:34:44 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/messages_controller.rb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/neurohub_application_controller.rb +7c73cae7f8a1a8e78a91ec90566fcd7158620577 -#- 2022-04-16 13:08:30 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/neurohub_portal_controller.rb 783f532abb91f341b2725b6263209513a463a372 -#- 2021-11-24 15:39:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_invitations_controller.rb -46f47333b1158fecbf484793db1799315aa2b9f7 -#- 2021-04-27 20:53:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_loris_hooks_controller.rb +65de15a23183bd21f5fb70cbc77f62cdd2dd07c0 -#- 2023-01-17 17:15:09 -0500 -#- Natacha Beck -#- BrainPortal/app/controllers/nh_loris_hooks_controller.rb fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/controllers/nh_messages_controller.rb 783f532abb91f341b2725b6263209513a463a372 -#- 2021-11-24 15:39:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_projects_controller.rb -ebea9d509fbad76c57a7ec56ca0d68505c50307a -#- 2021-09-28 17:16:47 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_sessions_controller.rb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_sessions_controller.rb cbb1e4b160e0b779057cf67d2c6212e974bd80f6 -#- 2020-11-19 15:46:15 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_signups_controller.rb 4258713e1770ed84ebb86d8a9e741e7f4c196949 -#- 2020-04-28 14:59:37 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_storages_controller.rb -ebea9d509fbad76c57a7ec56ca0d68505c50307a -#- 2021-09-28 17:16:47 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_users_controller.rb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/nh_users_controller.rb bbe84171a95aa429a2ad10f9fac21e8a57462aa5 -#- 2022-01-21 09:29:09 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/noc_controller.rb -a20bce6401ec83f4467059cc0b87614712f3ad25 -#- 2021-05-26 12:36:46 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/portal_controller.rb +3ea9d1a35be7d67be6fbd2e1f7f02a7ea52fc6fb -#- 2023-01-20 14:25:17 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/portal_controller.rb ffad932287be1a353dc897abc17d2627e32f6c81 -#- 2022-01-24 15:33:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/resource_usage_controller.rb -2b5dbbafc9dfd1007fa5b1a0e0a252083bafd49a -#- 2021-12-17 09:13:58 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/service_controller.rb 81add9599705a5219617814a4fec5cb023004cf9 -#- 2017-12-07 19:19:03 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/session_data_controller.rb -3c42d39e29a5851da9f8c6d5ca47e79cf866d4a9 -#- 2021-10-18 10:19:27 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/sessions_controller.rb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/sessions_controller.rb 829927d60e29ac174138747cd8ffc81590422280 -#- 2021-11-08 11:24:04 -0500 -#- Natacha Beck -#- BrainPortal/app/controllers/signups_controller.rb 46f29825aa58db78d614bdeb34a2e4af87427a4b -#- 2018-02-06 17:52:47 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/sites_controller.rb 24c25b3474801590604484e5f76fda4a954fea13 -#- 2020-04-08 17:02:58 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/tags_controller.rb -1915a6d98691933156527eaca30d704236345853 -#- 2022-01-27 15:28:13 -0500 -#- Serge -#- BrainPortal/app/controllers/tasks_controller.rb -ad34090533e1fa43108204c3dbd7d26a278a6916 -#- 2021-09-13 17:01:28 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/tool_configs_controller.rb +0e38917df7e388b14bcdb9b1d0a573f71e837983 -#- 2023-01-20 12:39:24 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/tasks_controller.rb +c84b685e74aa92693567347baaefe565c8396797 -#- 2022-05-24 16:19:40 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/tool_configs_controller.rb b97cfc3e896d6dee5e8c9da5690e9f24010d1205 -#- 2020-11-19 16:58:45 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/tools_controller.rb -2efd64c70bc056b6dfcaf3aa77add099ab6acf18 -#- 2021-06-07 16:31:10 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/userfiles_controller.rb -c5a7e4e8cf1a5dad8624faacca12e7aa6b9df1e8 -#- 2021-11-25 16:02:10 -0500 -#- Natacha Beck -#- BrainPortal/app/controllers/users_controller.rb +0e38917df7e388b14bcdb9b1d0a573f71e837983 -#- 2023-01-20 12:39:24 -0500 -#- Pierre Rioux -#- BrainPortal/app/controllers/userfiles_controller.rb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/controllers/users_controller.rb c2782dfcd26855ebc5e4ee0daa5da2ae3ec5b982 -#- 2020-06-19 11:07:27 -0400 -#- candicecz -#- BrainPortal/app/helpers/access_profiles_helper.rb 97ce78b45fd7e7ce3f23ef750d260456443b9410 -#- 2013-08-28 13:52:22 -0400 -#- Natacha Beck -#- BrainPortal/app/helpers/access_report_helper.rb 4c3482ba334d2ebb9d02d2e207e54df235cd14e9 -#- 2021-04-28 12:48:55 -0400 -#- Natacha Beck -#- BrainPortal/app/helpers/ajax_widget_helper.rb @@ -300,13 +311,14 @@ e9d5d84ac542f1bc35cdc42a04b09970e218f234 -#- 2020-04-08 20:05:24 -0400 -#- Montr 012d0c6ab442f0da03c4055610a48bbb9f860930 -#- 2021-04-29 10:11:47 -0400 -#- Pierre Rioux -#- BrainPortal/app/helpers/basic_helper.rb 70403d4a9074bc93d2028f67d66b459719a75cb4 -#- 2015-05-06 15:23:08 -0400 -#- Natacha Beck -#- BrainPortal/app/helpers/data_providers_helper.rb 06fbd979ae2102fc95ed1e98c30e040ddb2142ad -#- 2018-11-16 15:36:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/date_range_panel_helper.rb +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/disk_quotas_helper.rb ff28ae45e27417463a39d17b532c8f7ae7289658 -#- 2012-08-07 14:58:59 -0400 -#- Tarek Sherif -#- BrainPortal/app/helpers/disk_usage_report_helper.rb 151f5958d1202e06df847e9ebf2b4fe421d4d1b8 -#- 2015-05-20 17:01:09 -0400 -#- Remi Bernard -#- BrainPortal/app/helpers/documentation_helper.rb -7cd009c566f003b7d49e20f68f91d3f3c1ba32a8 -#- 2018-02-28 14:08:00 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/dynamic_form_helper.rb +7b1c3d3fbf2aba16b4a43a725b35435381d87ab1 -#- 2022-06-20 16:00:22 -0400 -#- Natacha Beck -#- BrainPortal/app/helpers/dynamic_form_helper.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/app/helpers/dynamic_table_helper.rb 0dc7a51c369ecb01b996228ae8bcd7efac0cb81c -#- 2020-07-29 16:14:09 -0400 -#- Pierre Rioux -#- BrainPortal/app/helpers/groups_helper.rb 2885a6c65c431d285169a4cf1efb5a7e0ade92d0 -#- 2020-09-14 22:44:08 -0400 -#- Natacha Beck -#- BrainPortal/app/helpers/neurohub_view_helper.rb -56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/resource_link_helper.rb +24c9c136762b46140a61b8d8a0707985d4cace8b -#- 2022-03-04 15:31:11 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/resource_link_helper.rb 44c705595afd45489dc105b25f6e4808b4b9ce73 -#- 2020-01-23 18:10:57 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/resource_usage_helper.rb 2c0d185a06616beab34267d21557986b1eba6a84 -#- 2016-11-14 16:16:13 -0500 -#- Andrew Doyle -#- BrainPortal/app/helpers/rich_ui_helper.rb 118049e28ce1a6a41bdc7e9927d07b97e897ce1b -#- 2020-02-13 16:37:08 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/scope_helper.rb @@ -314,7 +326,7 @@ ff28ae45e27417463a39d17b532c8f7ae7289658 -#- 2012-08-07 14:58:59 -0400 -#- Tarek 0a7d467515e3c86d98114d036d514763c4118929 -#- 2018-09-05 14:58:10 -0400 -#- Pierre Rioux -#- BrainPortal/app/helpers/show_table_helper.rb ffb7fe821ec57c0b7b7005b7c4fe3c1a4ecb7fc7 -#- 2020-11-09 10:25:49 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/switcher_helper.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/app/helpers/table_maker_helper.rb -005b7c9c957e9e96bb119b4ebdefd6e9164f39ed -#- 2021-09-07 15:24:56 -0400 -#- Pierre Rioux -#- BrainPortal/app/helpers/task_form_helper.rb +b1fd630316dd36e8668da552df7d29862da1cbe2 -#- 2022-05-12 10:02:52 -0400 -#- Pierre Rioux -#- BrainPortal/app/helpers/task_form_helper.rb 9dd6f2bcc9c05cbc62627705492fa398b44d5e88 -#- 2018-01-05 13:27:45 -0500 -#- Pierre Rioux -#- BrainPortal/app/helpers/tasks_helper.rb 5ca19b087dc4241edc2b07ae323c7c0df6e896aa -#- 2021-04-28 15:08:19 -0400 -#- Pierre Rioux -#- BrainPortal/app/helpers/userfiles_helper.rb 79de48d5acc566ef8ef60bb6edfbac6e5709b2a5 -#- 2021-11-17 16:07:12 -0500 -#- Natacha Beck -#- BrainPortal/app/helpers/users_helper.rb @@ -327,26 +339,28 @@ ffb7fe821ec57c0b7b7005b7c4fe3c1a4ecb7fc7 -#- 2020-11-09 10:25:49 -0500 -#- Pierr 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/access_profile.rb 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/active_record_log.rb 0dc7a51c369ecb01b996228ae8bcd7efac0cb81c -#- 2020-07-29 16:14:09 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/admin_user.rb -98eee53ecd68e4d4d33a413f4d1f63a3ced18a45 -#- 2021-04-28 13:13:09 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/application_record.rb +1c156d565c54f3a539a0566993c1f0a12900dd72 -#- 2022-11-16 14:10:31 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/application_record.rb 79de48d5acc566ef8ef60bb6edfbac6e5709b2a5 -#- 2021-11-17 16:07:12 -0500 -#- Natacha Beck -#- BrainPortal/app/models/automated_user.rb -598063ef6c013421ed34a9202fb6ab9e19e1fe33 -#- 2020-12-17 15:05:26 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/bourreau.rb -a16ee1122476fe433314f20182e00ed239dab202 -#- 2022-01-27 15:20:20 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/boutiques_cluster_task.rb -eb56ee2f0b1ac2a09ab18d23dfdb3a031c3fd5da -#- 2021-11-25 12:21:22 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/boutiques_portal_task.rb +1c156d565c54f3a539a0566993c1f0a12900dd72 -#- 2022-11-16 14:10:31 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/bourreau.rb +870b96c0be49bf28d9ede25034a608b19386f4e8 -#- 2022-08-26 13:37:12 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/boutiques_cluster_task.rb +af638f35f79135a3bc47e2451284000c92f43ee6 -#- 2023-01-24 13:11:36 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/boutiques_portal_task.rb d652f273e94faf068ba1729143c03ef4148f0e02 -#- 2016-12-13 17:03:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/brain_portal.rb f04ea435ded1531cf7bec78f1e1cb618b9edd8da -#- 2019-07-07 15:39:29 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/carmin_path_data_provider.rb 4aa8aade0b5a9bd94c98b727c280b0eb95785878 -#- 2020-08-13 16:05:10 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/cbrain_session.rb -607c7be5c8d021277d406843b706b462fcba46d2 -#- 2020-09-18 14:43:27 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/cbrain_task.rb -87fbdab73daca0abc4c95b6d08475a16acb3b153 -#- 2021-12-08 15:49:03 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/cluster_task.rb +ba92b49d171adfc4984b3b97e16237e49d96de13 -#- 2022-08-09 15:40:20 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/cbrain_task.rb +f176c35d273d4df414595aedb30ce7af9370c92c -#- 2023-01-12 17:15:09 -0500 -#- Serge -#- BrainPortal/app/models/cluster_task.rb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/concerns/.keep 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- BrainPortal/app/models/control.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/app/models/core_admin.rb fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/models/count_resource_usage_for_user_message.rb a7258273e3f1161fa2edba04497ae8c1c4ac3fbd -#- 2019-09-14 15:30:35 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/cputime_resource_usage_for_cbrain_task.rb c1275dbe2f97b577a66c42251ecd0fc038e5f9c6 -#- 2019-10-22 14:23:14 -0400 -#- Natacha Beck -#- BrainPortal/app/models/custom_filter.rb -f0951d2bdb11e6b21443f77ad4702de8699e2e8f -#- 2021-10-29 11:51:10 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/data_provider.rb +8bdce9ff21d84eebed3853aa27cdae05c0b11ae7 -#- 2022-10-06 16:51:55 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/data_provider.rb +8a3a3a735518cd5bd354778debdaad876a5ad4f1 -#- 2022-03-28 10:44:23 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/data_usage.rb +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/disk_quota.rb 7bb34c90f66d188335a42f84edc2e3a4f3655ea7 -#- 2020-01-31 10:35:02 -0500 -#- Natacha Beck -#- BrainPortal/app/models/en_cbrain_local_data_provider.rb 5db22e629b5084bb909e3ce0d9939e9599753fc3 -#- 2016-06-03 17:57:00 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/en_cbrain_smart_data_provider.rb -56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/en_cbrain_ssh_data_provider.rb +a8ea3faecd580086e71fa074e92c01b927bb3cfa -#- 2022-03-04 15:57:03 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/en_cbrain_ssh_data_provider.rb 4551c18d1922bbd8cc3200c69cba1033c16bb24a -#- 2012-10-31 16:54:43 -0400 -#- Tarek Sherif -#- BrainPortal/app/models/everyone_group.rb 744f6276a1f2af2685eebe5d90ba66d4d6852391 -#- 2021-02-15 10:59:45 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/exception_log.rb 98c8a30a2419e154647df3439cb2ab04c82108f1 -#- 2020-04-11 16:32:34 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/file_info.rb @@ -370,19 +384,20 @@ c0e63e53355bb84ce0dd58e1b21950f4e585f463 -#- 2021-02-26 10:19:42 -0500 -#- Pierr 50cc041cbe6c2d231b4e667c6011feec74b7cf6e -#- 2021-02-12 10:32:31 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/multilevel_ssh_data_provider.rb 3da63c4dfc5b8de61c0074ad8449d97abd2a49cb -#- 2020-07-29 15:49:04 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/normal_user.rb 23e38a508f5bc025b05649f3b7f85f62a6460c02 -#- 2016-07-05 09:28:03 -0400 -#- Andrew Doyle -#- BrainPortal/app/models/portal_agent_locker.rb -a5bc8f90a9625964b001fb8229ba6b07e4c7a81f -#- 2021-08-05 11:18:31 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/portal_task.rb +05478d8123f2cacbad66835e9acb9f4995e057ca -#- 2022-07-28 18:21:02 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/portal_task.rb dd5d17dcf9bf7ccb14b93d9c22dc5e5961b269e0 -#- 2020-03-06 13:22:57 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/remote_command.rb -cbb1e4b160e0b779057cf67d2c6212e974bd80f6 -#- 2020-11-19 15:46:15 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/remote_resource.rb +ea973f14d98d8bfe5af9f9b8a658a7371b7d7e9e -#- 2022-10-14 10:36:32 -0400 -#- Natacha Beck -#- BrainPortal/app/models/remote_resource.rb 13aa69704cbed3f572ebc929b1fd0d41beadb69f -#- 2020-03-15 10:59:04 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/remote_resource_info.rb b9be50ddddf682be63e2240800e5080dc7762922 -#- 2019-09-16 12:47:51 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/resource_usage.rb e58779eae25346bd5f8e46998f735a23630389f2 -#- 2021-08-22 15:47:02 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/restricted_hash.rb 56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/s3_data_provider.rb -167804e09092aeee3048592447d95c6ad31b09a7 -#- 2021-12-02 12:39:34 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/s3_flat_data_provider.rb +c8d2b2b55957a43fbb1cd407cbcd4c1da02118b3 -#- 2022-09-22 12:29:35 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/s3_flat_data_provider.rb +aacfa25ed1aaa071be10bb03016672afe174618e -#- 2022-08-23 14:47:22 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/s3_multi_level_data_provider.rb 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/sanity_check.rb f41ca70703517a6620bcf8ae7a28be29364b1b7a -#- 2017-12-19 21:18:14 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/scratch_data_provider.rb cbb1e4b160e0b779057cf67d2c6212e974bd80f6 -#- 2020-11-19 15:46:15 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/signup.rb -f0951d2bdb11e6b21443f77ad4702de8699e2e8f -#- 2021-10-29 11:51:10 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/sing_bindmount_data_provider.rb -3b8e433c9f3f1faaf49cbd4adc2a3c95b0041a86 -#- 2021-12-01 18:22:44 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/sing_squashfs_data_provider.rb +08d7fb50e8a209f988d99f8ec3bf489c44562644 -#- 2022-10-24 14:54:03 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/sing_bindmount_data_provider.rb +08d7fb50e8a209f988d99f8ec3bf489c44562644 -#- 2022-10-24 14:54:03 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/sing_squashfs_data_provider.rb 99005918548df8f362af68710b886b9e34557245 -#- 2019-10-09 16:47:51 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/site.rb 3a2e9701e23bc5e58552b7d9dd5fa6335c3addf1 -#- 2017-03-06 15:17:06 -0500 -#- Andrew Doyle -#- BrainPortal/app/models/site_group.rb 3da63c4dfc5b8de61c0074ad8449d97abd2a49cb -#- 2020-07-29 15:49:04 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/site_manager.rb @@ -390,8 +405,8 @@ a7258273e3f1161fa2edba04497ae8c1c4ac3fbd -#- 2019-09-14 15:30:35 -0400 -#- Pierr a7258273e3f1161fa2edba04497ae8c1c4ac3fbd -#- 2019-09-14 15:30:35 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/space_resource_usage_for_cbrain_task.rb a7258273e3f1161fa2edba04497ae8c1c4ac3fbd -#- 2019-09-14 15:30:35 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/space_resource_usage_for_userfile.rb 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/ssh_agent_unlocking_event.rb -f0951d2bdb11e6b21443f77ad4702de8699e2e8f -#- 2021-10-29 11:51:10 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/ssh_data_provider.rb -f46666350ad7ff4074edabb2b5fd75b883d680c0 -#- 2021-06-07 16:39:19 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/ssh_data_provider_base.rb +4ec24acef6766d341e1830661b896380a9a9ff25 -#- 2022-09-07 13:07:30 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/ssh_data_provider.rb +a8ea3faecd580086e71fa074e92c01b927bb3cfa -#- 2022-03-04 15:57:03 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/ssh_data_provider_base.rb f7d6e9efadcd75edcb5bcd156fed5801395c2b6f -#- 2019-05-06 12:52:54 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/sync_status.rb c1e59cb4ac72a9ff66e76a42c607a269a043ee4d -#- 2019-10-30 16:14:47 -0400 -#- Natacha Beck -#- BrainPortal/app/models/system_group.rb 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/tag.rb @@ -399,12 +414,12 @@ c1e59cb4ac72a9ff66e76a42c607a269a043ee4d -#- 2019-10-30 16:14:47 -0400 -#- Natac 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/task_vm_allocation.rb a7258273e3f1161fa2edba04497ae8c1c4ac3fbd -#- 2019-09-14 15:30:35 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/time_resource_usage.rb ad34090533e1fa43108204c3dbd7d26a278a6916 -#- 2021-09-13 17:01:28 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/tool.rb -7f2485c84e8da3aabdb4953e9abf86446303a342 -#- 2022-01-21 10:49:08 -0500 -#- Serge -#- BrainPortal/app/models/tool_config.rb +ab3682e7537ad9d8cf5869c2d6e36234b0fec0ac -#- 2023-01-17 16:34:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/tool_config.rb 3da63c4dfc5b8de61c0074ad8449d97abd2a49cb -#- 2020-07-29 15:49:04 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/user.rb ff28ae45e27417463a39d17b532c8f7ae7289658 -#- 2012-08-07 14:58:59 -0400 -#- Tarek Sherif -#- BrainPortal/app/models/user_group.rb -a46bed2d34bdabdae4f5d245ceeb42414d3feab3 -#- 2021-12-16 11:57:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/userfile.rb +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/userfile.rb 24c25b3474801590604484e5f76fda4a954fea13 -#- 2020-04-08 17:02:58 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/userfile_custom_filter.rb -f46666350ad7ff4074edabb2b5fd75b883d680c0 -#- 2021-06-07 16:39:19 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/userkey_flat_dir_ssh_data_provider.rb +a8ea3faecd580086e71fa074e92c01b927bb3cfa -#- 2022-03-04 15:57:03 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/userkey_flat_dir_ssh_data_provider.rb 56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/vault_local_data_provider.rb 5db22e629b5084bb909e3ce0d9939e9599753fc3 -#- 2016-06-03 17:57:00 -0400 -#- Pierre Rioux -#- BrainPortal/app/models/vault_smart_data_provider.rb 56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/models/vault_ssh_data_provider.rb @@ -416,17 +431,17 @@ d88c55764bb7f3d79446d68c5f5f87c28c941124 -#- 2016-11-16 16:38:09 -0500 -#- Andre c878118e1fc3773ce415d116762ea271fae54a0c -#- 2016-06-01 15:59:54 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/access_profiles/index.html.erb c878118e1fc3773ce415d116762ea271fae54a0c -#- 2016-06-01 15:59:54 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/access_profiles/index.js.erb a10eb11c8c6a02be7ff7812c9015db9f71f6183d -#- 2020-05-25 14:17:10 -0400 -#- candicecz -#- BrainPortal/app/views/access_profiles/show.html.erb -ad7428f4d166daf281e6714c1fe28ffb554362fd -#- 2019-10-08 12:08:40 -0400 -#- Natacha Beck -#- BrainPortal/app/views/bourreaux/_bourreaux_display.html.erb +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/_bourreaux_display.html.erb 492f459fb80ce1fdafb46d4f4812a85157a721cc -#- 2016-03-01 17:19:45 -0500 -#- Remi Bernard -#- BrainPortal/app/views/bourreaux/_load_info.html.erb -2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/_notes.html.erb +ea973f14d98d8bfe5af9f9b8a658a7371b7d7e9e -#- 2022-10-14 10:36:32 -0400 -#- Natacha Beck -#- BrainPortal/app/views/bourreaux/_notes.html.erb 1ea323bdc258568b51372dc3bce4fa6f2bc4e56a -#- 2019-05-09 12:44:48 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/_runtime_info.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/index.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/index.js.erb -dcf764b91ceb7a61c01dc9dfeb6a3b189f06eef8 -#- 2019-08-30 16:26:13 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/new.html.erb +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/new.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/rr_access.html.erb 8145cfb557797e6e385be56a52ad80b31b4087f5 -#- 2013-04-22 16:35:31 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/rr_access_dp.html.erb 8b22dd68e544ea4a3fe20983831bdf81504b4029 -#- 2016-08-03 16:51:05 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/rr_disk_usage.html.erb -cbb1e4b160e0b779057cf67d2c6212e974bd80f6 -#- 2020-11-19 15:46:15 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/show.html.erb +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/bourreaux/show.html.erb cbb1e4b160e0b779057cf67d2c6212e974bd80f6 -#- 2020-11-19 15:46:15 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/cbrain_mailer/forgotten_password.text.erb cbb1e4b160e0b779057cf67d2c6212e974bd80f6 -#- 2020-11-19 15:46:15 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/cbrain_mailer/general_message.text.erb 679af6df2fff678f1f72c5cf0f26f4045822104e -#- 2020-11-19 20:38:17 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/cbrain_mailer/registration_confirmation.text.erb @@ -444,13 +459,13 @@ f50a0fc3a36e48fc3edc00df3f8215ca93091c47 -#- 2019-05-14 13:19:26 -0400 -#- Pierr 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/custom_filters/new.html.erb 3a60943052a000d16428f0ae0bad2cc6fdda15dc -#- 2019-05-13 18:25:42 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/custom_filters/show.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/custom_filters/update.js.erb -5e53bcf09c1d8c4a97393e0c35452798c15112f8 -#- 2016-06-28 09:22:43 -0400 -#- Andrew Doyle -#- BrainPortal/app/views/data_providers/_data_providers_table.html.erb +e3bb119fe27c3b38a9b8fef427679abb77e1b69f -#- 2022-03-16 15:17:04 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/_data_providers_table.html.erb 69c1e37560a98c028c7ef7dca5c5b973bc758085 -#- 2016-03-02 20:04:47 -0500 -#- Remi Bernard -#- BrainPortal/app/views/data_providers/_delete_button.html.erb -56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/_dp_browse_table.html.erb +24c9c136762b46140a61b8d8a0707985d4cace8b -#- 2022-03-04 15:31:11 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/_dp_browse_table.html.erb 7bb34c90f66d188335a42f84edc2e3a4f3655ea7 -#- 2020-01-31 10:35:02 -0500 -#- Natacha Beck -#- BrainPortal/app/views/data_providers/_dp_report_table.html.erb 56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/_dp_show_path.html.erb 98c3ed3ed12c28a8cb09a4cdad4fc00a102cbe84 -#- 2019-08-30 13:32:04 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/_dp_types_explained.html.erb -31eb863341ef1c48363dec12d978cfc8c4975803 -#- 2020-02-13 11:12:36 -0500 -#- Natacha Beck -#- BrainPortal/app/views/data_providers/_one_data_provider_table.html.erb +ca0d9a40fa5d6ed3dbfb8bea7b6d0e203b022f5a -#- 2022-05-03 17:00:47 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/_one_data_provider_table.html.erb ed836acefc2c789a888160ab126d52d70ccb41ef -#- 2021-02-23 18:50:37 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/_register_button.html.erb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/app/views/data_providers/_unregister_button.html.erb bcba0a16e2e7c8e018e58a630d65130fefc89632 -#- 2015-09-11 17:10:28 -0400 -#- Remi Bernard -#- BrainPortal/app/views/data_providers/_view_option_button.html.erb @@ -464,6 +479,11 @@ bcba0a16e2e7c8e018e58a630d65130fefc89632 -#- 2015-09-11 17:10:28 -0400 -#- Remi b3392f383d681b2ee6cdd51877c29a5ce6b2cf33 -#- 2020-02-04 15:13:51 -0500 -#- Natacha Beck -#- BrainPortal/app/views/data_providers/report.html.erb a5bafa8c01b857e358d6cbad182e6f9e0cf66a3b -#- 2015-09-28 14:16:46 -0400 -#- Remi Bernard -#- BrainPortal/app/views/data_providers/report.js.erb 167804e09092aeee3048592447d95c6ad31b09a7 -#- 2021-12-02 12:39:34 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/data_providers/show.html.erb +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/disk_quotas/_disk_quotas_table.html.erb +17fb2760f8cd01da44af2c5fe6f3b48963254128 -#- 2023-01-21 13:17:20 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/disk_quotas/index.html.erb +18932a2699c0e2426480b0ddf1cdb86c83f9efb4 -#- 2022-03-16 16:05:00 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/disk_quotas/index.js.erb +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/disk_quotas/report.html.erb +966ca2b5fadee53078e654705ea6bd8340cb3d2b -#- 2023-01-26 14:09:50 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/disk_quotas/show.html.erb 3a42fd7db2bf64ee3acc7c9163de6c8ac376343b -#- 2015-09-21 15:27:17 -0400 -#- Remi Bernard -#- BrainPortal/app/views/exception_logs/_exception_logs_table.html.erb 0d2b8b4f5ab294edb90d29aa9d01e4ff722e4ef2 -#- 2012-05-09 18:32:53 -0400 -#- Tarek Sherif -#- BrainPortal/app/views/exception_logs/index.html.erb 0d2b8b4f5ab294edb90d29aa9d01e4ff722e4ef2 -#- 2012-05-09 18:32:53 -0400 -#- Tarek Sherif -#- BrainPortal/app/views/exception_logs/index.js.erb @@ -472,37 +492,39 @@ a5bafa8c01b857e358d6cbad182e6f9e0cf66a3b -#- 2015-09-28 14:16:46 -0400 -#- Remi f29aed9eed503d97131410219e1f053b71147efb -#- 2016-04-18 16:39:00 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/groups/_users_form.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/groups/index.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/groups/index.js.erb -0dc7a51c369ecb01b996228ae8bcd7efac0cb81c -#- 2020-07-29 16:14:09 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/groups/new.html.erb -0dc7a51c369ecb01b996228ae8bcd7efac0cb81c -#- 2020-07-29 16:14:09 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/groups/show.html.erb +8a3a3a735518cd5bd354778debdaad876a5ad4f1 -#- 2022-03-28 10:44:23 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/groups/new.html.erb +68fda3bd05ad44add796a93654aa2c00ac2578b2 -#- 2022-04-27 11:12:37 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/groups/show.html.erb 151f5958d1202e06df847e9ebf2b4fe421d4d1b8 -#- 2015-05-20 17:01:09 -0400 -#- Remi Bernard -#- BrainPortal/app/views/help_documents/_show.js.erb 151f5958d1202e06df847e9ebf2b4fe421d4d1b8 -#- 2015-05-20 17:01:09 -0400 -#- Remi Bernard -#- BrainPortal/app/views/help_documents/show.html.erb b68ffba9526fbcc0c5346c1e43ee3c308442f0ad -#- 2018-03-08 13:14:36 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/invitations/_new.html.erb -1c7b5b8606692f41f1b3e990017881cfc889e392 -#- 2020-02-10 16:25:46 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_cbrain_application.html.erb +906d7fd181b7f4fea7b37e33d319a7e0ecd39d11 -#- 2022-03-03 11:37:27 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_cbrain_application.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_log_report.html.erb -96c68fac67d0a3d35c786bd83899b9331e013f33 -#- 2020-09-16 14:03:04 -0400 -#- Natacha Beck -#- BrainPortal/app/views/layouts/_neurohub_application.html.erb +906d7fd181b7f4fea7b37e33d319a7e0ecd39d11 -#- 2022-03-03 11:37:27 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_neurohub_application.html.erb fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/views/layouts/_neurohub_flash_message.html.erb c5f94d0afdef502d91dc3d4f7e4dc789d3344ed0 -#- 2020-05-08 12:36:03 -0400 -#- candicecz -#- BrainPortal/app/views/layouts/_neurohub_footer.html.erb fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/views/layouts/_neurohub_navbar.html.erb fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/views/layouts/_neurohub_secondary_navbar.html.erb -df088bc08df24a3e49de2501735cad96feb83b41 -#- 2020-08-12 11:20:14 -0400 -#- candicecz -#- BrainPortal/app/views/layouts/_section_account.html.erb +17fb2760f8cd01da44af2c5fe6f3b48963254128 -#- 2023-01-21 13:17:20 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_section_account.html.erb +08a11f3f0b726831acdf4bc8f4aa2f9ae1b29f9e -#- 2022-03-03 11:56:55 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_section_cookie_notif.html.erb e5206c0d66157cbb257f34abd58f46d1804946cd -#- 2017-04-07 11:26:42 -0400 -#- Andrew Doyle -#- BrainPortal/app/views/layouts/_section_footer.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_section_main.html.erb -b8ac5426fc0046900da1739317e460f0ae579f3b -#- 2019-10-30 18:12:49 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_section_menu.html.erb +c7b2fd8ef47f9de145aa3260b2a993fc80fc01e6 -#- 2022-03-17 12:28:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/_section_menu.html.erb 3d762be4d93689351ee62ee4901c4c57ec96cef1 -#- 2020-03-02 18:27:10 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/application.html.erb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/mailer.html.erb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/layouts/mailer.text.erb 5ba48cffdcecf1b35dee424372b800c2e6361bc4 -#- 2012-08-08 17:26:20 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/messages/_flash_display.html.erb -fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/views/messages/_message_details.html.erb +7c73cae7f8a1a8e78a91ec90566fcd7158620577 -#- 2022-04-16 13:08:30 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/messages/_message_details.html.erb c83e700766f0772625eef7c437fef0be8f85899b -#- 2012-03-12 17:46:13 -0400 -#- Tarek Sherif -#- BrainPortal/app/views/messages/_message_display.html.erb b0c034565dc45e84f9a61175d82c49c103a33e0a -#- 2017-12-07 19:19:05 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/messages/_message_index_display.html.erb fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/views/messages/_message_table_row.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/messages/index.html.erb 9a3e7ff155e53917bdec745bba7edbca7b011d81 -#- 2018-03-08 18:41:52 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/messages/index.js.erb -24c25b3474801590604484e5f76fda4a954fea13 -#- 2020-04-08 17:02:58 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/messages/new.html.erb +5cc2e8488608abd4a90eae0a18cd865f8159147a -#- 2022-04-18 09:34:44 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/messages/new.html.erb +ca0d9a40fa5d6ed3dbfb8bea7b6d0e203b022f5a -#- 2022-05-03 17:00:47 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/messages/new_dashboard.html.erb c5f94d0afdef502d91dc3d4f7e4dc789d3344ed0 -#- 2020-05-08 12:36:03 -0400 -#- candicecz -#- BrainPortal/app/views/neurohub_portal/_tasks_table.html.erb ce0c5e5da6a5a69e2e23f2618aa6d7fa8a732c8d -#- 2020-05-08 12:36:16 -0400 -#- candicecz -#- BrainPortal/app/views/neurohub_portal/search.html.erb 495db4c873eeb3294c04187047ce1c3a1ffc6a0e -#- 2020-09-23 11:54:45 -0400 -#- MontrealSergiy -#- BrainPortal/app/views/neurohub_portal/styleguide.html.erb -5c44be5768187f43bcc769b22d2ce253c692bf54 -#- 2020-11-18 18:08:34 -0500 -#- bryancaron -#- BrainPortal/app/views/neurohub_portal/welcome.html.erb +529311e2ef64175bf6b7a136e7365e97557061a8 -#- 2022-11-17 12:11:59 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/neurohub_portal/welcome.html.erb fa68a44bee394caedb3ec707819777d44e6d64e6 -#- 2020-11-02 12:30:57 -0500 -#- MontrealSergiy -#- BrainPortal/app/views/nh_invitations/_message_details.html.erb c5f94d0afdef502d91dc3d4f7e4dc789d3344ed0 -#- 2020-05-08 12:36:03 -0400 -#- candicecz -#- BrainPortal/app/views/nh_invitations/index.html.erb 295dac6b67cd76f993e8ae42ba5bc40b5a9e47c7 -#- 2020-08-04 12:20:29 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_invitations/new.html.erb @@ -526,6 +548,7 @@ ca4155a38b45e3339b93e3e372d0c9ca71827195 -#- 2021-03-30 13:52:36 -0400 -#- Pierr 783f532abb91f341b2725b6263209513a463a372 -#- 2021-11-24 15:39:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/nh_projects/show.html.erb d7eeb0757589695a8f64bd5b822fda329a343022 -#- 2020-08-05 12:37:58 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_projects/show_license.html.erb ebea9d509fbad76c57a7ec56ca0d68505c50307a -#- 2021-09-28 17:16:47 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_sessions/new.html.erb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_sessions/nh_mandatory_globus.html.erb ca4155a38b45e3339b93e3e372d0c9ca71827195 -#- 2021-03-30 13:52:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_sessions/request_password.html.erb ca4155a38b45e3339b93e3e372d0c9ca71827195 -#- 2021-03-30 13:52:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_sessions/send_password.html.erb 527d1cd174451bce9fc9847c5fa1f2e9ae1550fb -#- 2020-07-28 14:08:14 -0400 -#- Natacha Beck -#- BrainPortal/app/views/nh_signups/confirm.html.erb @@ -534,7 +557,7 @@ c5f94d0afdef502d91dc3d4f7e4dc789d3344ed0 -#- 2020-05-08 12:36:03 -0400 -#- candi c5f94d0afdef502d91dc3d4f7e4dc789d3344ed0 -#- 2020-05-08 12:36:03 -0400 -#- candicecz -#- BrainPortal/app/views/nh_storages/_form.html.erb 05f24b38160e8353631a262bef4b9e7a6895cf2c -#- 2020-05-19 10:46:08 -0400 -#- Xavier Lecours -#- BrainPortal/app/views/nh_storages/_show_key.html.erb 4e801daa4d93c13d4c0759a217bc6e875b374aac -#- 2020-04-20 14:55:54 -0400 -#- candicecz -#- BrainPortal/app/views/nh_storages/edit.html.erb -ca4155a38b45e3339b93e3e372d0c9ca71827195 -#- 2021-03-30 13:52:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_storages/index.html.erb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_storages/index.html.erb c5f94d0afdef502d91dc3d4f7e4dc789d3344ed0 -#- 2020-05-08 12:36:03 -0400 -#- candicecz -#- BrainPortal/app/views/nh_storages/new.html.erb ca4155a38b45e3339b93e3e372d0c9ca71827195 -#- 2021-03-30 13:52:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_storages/registered_report.html.erb ca4155a38b45e3339b93e3e372d0c9ca71827195 -#- 2021-03-30 13:52:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/nh_storages/show.html.erb @@ -547,27 +570,25 @@ ebea9d509fbad76c57a7ec56ca0d68505c50307a -#- 2021-09-28 17:16:47 -0400 -#- Pierr 8a102751e9c818baeb903de230cd8b2b5e36a1ce -#- 2021-10-01 12:34:36 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/noc/dashboard.html.erb a3cbb977f0a16799006c5a5621aa03a21a79d684 -#- 2022-01-13 13:16:05 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/noc/tools.html.erb a7edf096b00faeb56c8a8cc1ec72dc4361c78527 -#- 2022-01-07 12:37:29 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/noc/users.html.erb -47bf3a94efa225c5d929cdc8dc5a6e7c2678f45f -#- 2021-05-20 12:33:05 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/_available_datasets_table.html.erb -a20bce6401ec83f4467059cc0b87614712f3ad25 -#- 2021-05-26 12:36:46 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/_available_tools_table.html.erb +3ea9d1a35be7d67be6fbd2e1f7f02a7ea52fc6fb -#- 2023-01-20 14:25:17 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/portal/_available_datasets_table.html.erb +3ea9d1a35be7d67be6fbd2e1f7f02a7ea52fc6fb -#- 2023-01-20 14:25:17 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/portal/_available_tools_table.html.erb 49ca92947ec1e8cd21399ea40399859b5d8122e3 -#- 2016-09-19 16:12:00 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/_logo_footer.html.erb -f8ffc64b552d5dabb75aad1538a4a278aa7ba118 -#- 2016-09-19 15:26:25 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/about_us.html.erb -3b44321cf90c7ec00a457afe15fd45126669c4b7 -#- 2021-05-20 14:07:30 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/available.html.erb +db8b1958d2c89ab4107cc6912b4969db0b542377 -#- 2022-08-25 15:52:02 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/about_us.html.erb +3ea9d1a35be7d67be6fbd2e1f7f02a7ea52fc6fb -#- 2023-01-20 14:25:17 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/portal/available.html.erb f66a083f2a47d9d0c738766e0eda788395a7cb9b -#- 2018-01-25 17:16:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/portal/credits.html.erb 4421af11b41dbae7f167ee1e17d6c9a599e6aad1 -#- 2018-12-07 17:18:46 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/portal/portal_log.html.erb a71d05d4aabdf3cb9f87a32eed3fea60ac39466d -#- 2021-05-03 15:19:09 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/provenance.html.erb -6a03025c454b0533051f66cb2542c752239c9bfc -#- 2020-06-15 11:51:36 -0400 -#- candicecz -#- BrainPortal/app/views/portal/report.html.erb +b8a938178d5f653de6576d8e4300b52c832673e3 -#- 2022-11-22 11:50:15 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/portal/report.html.erb ab3e2bc93ca3e388d2acdcaa1a1335a805e4452e -#- 2018-09-06 14:09:01 -0400 -#- MontrealSergiy -#- BrainPortal/app/views/portal/search.html.erb 6a1d5e216964ceb945495978b70220a921f27f5e -#- 2012-05-10 15:26:32 -0400 -#- Natacha Beck -#- BrainPortal/app/views/portal/show_license.html.erb +dd23aaaf11031121b0e45af6c71ef7ea864fb038 -#- 2022-10-04 08:41:30 -0400 -#- Natacha Beck -#- BrainPortal/app/views/portal/stats.html.erb b1a0d5a20b2668342a8baa0af2ed4a3b5e8cf1ab -#- 2019-05-07 18:35:15 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/swagger.html.erb -47bf3a94efa225c5d929cdc8dc5a6e7c2678f45f -#- 2021-05-20 12:33:05 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/portal/welcome.html.erb +529311e2ef64175bf6b7a136e7365e97557061a8 -#- 2022-11-17 12:11:59 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/portal/welcome.html.erb eeecb816d8a57fbbf5405eccad7edd4da5aaaf2a -#- 2020-09-14 09:02:24 -0400 -#- Natacha Beck -#- BrainPortal/app/views/resource_usage/_resource_usage_table.html.erb 40cdaed3acfe10dc6e10c8b1dc5b441979d9b5cb -#- 2019-09-17 16:33:21 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/resource_usage/index.html.erb 40cdaed3acfe10dc6e10c8b1dc5b441979d9b5cb -#- 2019-09-17 16:33:21 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/resource_usage/index.js.erb -2b5dbbafc9dfd1007fa5b1a0e0a252083bafd49a -#- 2021-12-17 09:13:58 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/service/detailed_stats.html.erb -18bef8d55dda14f1f4bc664c48061c9fe23ece01 -#- 2019-07-18 17:25:56 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/service/info.html.erb -2b5dbbafc9dfd1007fa5b1a0e0a252083bafd49a -#- 2021-12-17 09:13:58 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/service/stats.html.erb -18bef8d55dda14f1f4bc664c48061c9fe23ece01 -#- 2019-07-18 17:25:56 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/service/support.html.erb -1caba8617c594cea068b3cfd4e9d3e83b19b1566 -#- 2021-06-23 19:48:45 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/sessions/new.html.erb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/sessions/mandatory_globus.html.erb +9ef08aad280ba341812203b83b447609e9417c40 -#- 2022-08-01 10:44:34 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/sessions/new.html.erb d88c55764bb7f3d79446d68c5f5f87c28c941124 -#- 2016-11-16 16:38:09 -0500 -#- Andrew Doyle -#- BrainPortal/app/views/shared/_access_profile_checkbox_table.html.erb 487e2f071d0d55a2d0e3e5d9db7778c856416b7b -#- 2016-01-13 09:05:34 -0500 -#- Remi Bernard -#- BrainPortal/app/views/shared/_active_filters.html.erb d5bdc11809455300581173ec0232ef856c6751d8 -#- 2016-08-01 18:34:55 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/shared/_color_picker.html.erb @@ -608,21 +629,22 @@ fe87bd0ad20874176f2558f31f6782efaaf39cb4 -#- 2021-09-19 16:09:01 -0400 -#- Pierr 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/_utility_interface_file_list.html.erb 67bce169134affbd1f78ed76b2252b08a3969b20 -#- 2019-10-23 14:22:41 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/_zenodo_deposit_form.html.erb 6f3d1f8b120820b1eb3a8f7a8cedfab627acc132 -#- 2011-05-04 20:12:10 +0000 -#- prioux -#- BrainPortal/app/views/tasks/cbrain_plugins -96044e3dbbd7973d2b81d5bb016e04779fe13cb1 -#- 2015-06-03 17:34:43 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/edit.html.erb +af638f35f79135a3bc47e2451284000c92f43ee6 -#- 2023-01-24 13:11:36 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/edit.html.erb 8b748f87ae1c4e871d541a5e9fb6fffd59bd79a0 -#- 2012-10-12 17:51:02 -0400 -#- Tarek Sherif -#- BrainPortal/app/views/tasks/index.html.erb a5bafa8c01b857e358d6cbad182e6f9e0cf66a3b -#- 2015-09-28 14:16:46 -0400 -#- Remi Bernard -#- BrainPortal/app/views/tasks/index.js.erb -692ff77d48c70be9ccc7b7f8d3ee583834e9a507 -#- 2015-01-06 17:17:58 -0500 -#- Natacha Beck -#- BrainPortal/app/views/tasks/new.html.erb -641971b6cea07f3a5f33ac668362a641002c391e -#- 2021-09-16 17:13:37 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/show.html.erb +650c2e666738f7a285eb4e2510ed978f82816e5c -#- 2023-01-24 13:57:55 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/new.html.erb +ebb1d8d24e419f550c259aaaf09e08da37c1cdb3 -#- 2022-07-12 15:52:30 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/show.html.erb 67bce169134affbd1f78ed76b2252b08a3969b20 -#- 2019-10-23 14:22:41 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tasks/zenodo.html.erb -ab3e2bc93ca3e388d2acdcaa1a1335a805e4452e -#- 2018-09-06 14:09:01 -0400 -#- MontrealSergiy -#- BrainPortal/app/views/tool_configs/_by_resource.erb +17fb2760f8cd01da44af2c5fe6f3b48963254128 -#- 2023-01-21 13:17:20 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/_by_resource.erb ab3e2bc93ca3e388d2acdcaa1a1335a805e4452e -#- 2018-09-06 14:09:01 -0400 -#- MontrealSergiy -#- BrainPortal/app/views/tool_configs/_by_user.html.erb 656945deaf11556ae55ed0bd8108a6b308dcdd89 -#- 2018-11-16 12:19:31 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/_env_key_value_pair.erb -1b90a3352d9cd1f2446303fd54138b3c1f0c9ac0 -#- 2022-01-21 10:02:53 -0500 -#- Serge -#- BrainPortal/app/views/tool_configs/_form_fields.html.erb -c4ea5b07f4c6a44368172830f7495a50cc7deb6f -#- 2019-09-17 12:33:10 -0400 -#- Natacha Beck -#- BrainPortal/app/views/tool_configs/_tool_configs_table.html.erb +6fd83c0e23df55660ad68a7f6e2b9e6f874e282c -#- 2022-09-02 15:15:33 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/_form_fields.html.erb +17fb2760f8cd01da44af2c5fe6f3b48963254128 -#- 2023-01-21 13:17:20 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/_tool_configs_table.html.erb +0e17b176f4ce89cd25cff7d6c850c469d2fd8cc3 -#- 2022-05-24 15:19:04 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/boutiques_descriptor.html.erb 3a9dfece076bc8fca9e2ec445057c0b8a3ce03ba -#- 2016-10-07 17:28:40 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/index.html.erb 3a9dfece076bc8fca9e2ec445057c0b8a3ce03ba -#- 2016-10-07 17:28:40 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/index.js.erb a7be35ccedf89f5357792c5216700c1aabab7f91 -#- 2017-01-10 16:52:32 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/report.html.erb -14d7482ee7abebb8d9945ff4acb36b65971cbb2d -#- 2019-10-23 17:54:39 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tool_configs/show.html.erb +f176c35d273d4df414595aedb30ce7af9370c92c -#- 2023-01-12 17:15:09 -0500 -#- Serge -#- BrainPortal/app/views/tool_configs/show.html.erb dcf764b91ceb7a61c01dc9dfeb6a3b189f06eef8 -#- 2019-08-30 16:26:13 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tools/_form_fields.html.erb 7e0ab97e88dc5127c2a57a5be3f5414677fc5013 -#- 2016-01-29 17:22:43 -0500 -#- Remi Bernard -#- BrainPortal/app/views/tools/_tool_config_select.html.erb 3a9dfece076bc8fca9e2ec445057c0b8a3ce03ba -#- 2016-10-07 17:28:40 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/tools/_tools_table.html.erb @@ -647,15 +669,15 @@ fe9898c37839ef55967fd1439be3b9ae315115ab -#- 2021-01-25 11:13:15 -0500 -#- Montr 9c9c841d9abf47170154fa85cda5aed63452028e -#- 2019-12-19 13:36:32 -0500 -#- Natacha Beck -#- BrainPortal/app/views/userfiles/index.html.erb 5af58d9bd3b0595a3214721319c69f62ab98f0f4 -#- 2015-12-14 18:28:29 -0500 -#- Remi Bernard -#- BrainPortal/app/views/userfiles/index.js.erb 38cea8939afc15effccf5d4ca14246aad2653ffe -#- 2019-12-03 19:18:17 -0500 -#- Natacha Beck -#- BrainPortal/app/views/userfiles/quality_control.html.erb -56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/userfiles/show.html.erb +2452e06d395848773e24b6b70ade52d7e8fb1097 -#- 2022-03-24 16:19:50 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/userfiles/show.html.erb 24c25b3474801590604484e5f76fda4a954fea13 -#- 2020-04-08 17:02:58 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/users/_users_table.html.erb d498d11e37f27b588bfe4df9aad9d684178e4a27 -#- 2012-08-08 15:54:19 -0400 -#- Tarek Sherif -#- BrainPortal/app/views/users/change_password.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/users/index.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/users/index.js.erb -61b8eca1774a47160fc79eed1a1ad4d7e8b7dcf5 -#- 2020-08-18 14:04:21 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/users/new.html.erb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/users/new.html.erb 4aa8aade0b5a9bd94c98b727c280b0eb95785878 -#- 2020-08-13 16:05:10 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/users/new_token.html.erb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/app/views/users/request_password.html.erb -fdb2d55b62f8ee84b459ef8dabcfc8ece20beb8c -#- 2021-12-10 15:39:18 -0500 -#- Serge -#- BrainPortal/app/views/users/show.html.erb +02ffac07d8fcd0b19875c496d5dbdb865331415b -#- 2022-07-28 12:49:11 -0400 -#- Pierre Rioux -#- BrainPortal/app/views/users/show.html.erb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/bin/bundle 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/bin/rails 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/bin/rake @@ -663,19 +685,26 @@ fdb2d55b62f8ee84b459ef8dabcfc8ece20beb8c -#- 2021-12-10 15:39:18 -0500 -#- Serge 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/bin/spring 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/bin/update 88b60a0371a8d338c05adc7b711f3575d5edcae8 -#- 2020-07-23 15:05:50 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/.gitignore -f00997eb767f27e6a17f23a6267d2ddbb0d22b0b -#- 2022-01-25 11:56:35 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/new_multi_boutiques_demo.json +537995be81ab061c6f9030dc54f94b471b49fd03 -#- 2022-02-20 14:32:53 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/boutiques_descriptor_maker.json +1c156d565c54f3a539a0566993c1f0a12900dd72 -#- 2022-11-16 14:10:31 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/new_multi_boutiques_demo.json +870b96c0be49bf28d9ede25034a608b19386f4e8 -#- 2022-08-26 13:37:12 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/boutiques_descriptors/rename_output_demo.json c1c5c30eda2b2d4b2c3bf2b82b5b0bc50f738299 -#- 2015-04-20 14:18:23 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/bourreau/bash_scriptor.rb 67bce169134affbd1f78ed76b2252b08a3969b20 -#- 2019-10-23 14:22:41 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/portal/bash_scriptor.rb 41562bbad4c91100c05a806e0d1eee779dff1783 -#- 2015-01-28 18:35:17 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/views/_show_params.html.erb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/bash_scriptor/views/_task_params.html.erb +537995be81ab061c6f9030dc54f94b471b49fd03 -#- 2022-02-20 14:32:53 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/bourreau/boutiques_descriptor_maker_handler.rb +0e38917df7e388b14bcdb9b1d0a573f71e837983 -#- 2023-01-20 12:39:24 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/portal/boutiques_descriptor_maker_handler.rb +537995be81ab061c6f9030dc54f94b471b49fd03 -#- 2022-02-20 14:32:53 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/views/_boutiques_preview.html.erb +537995be81ab061c6f9030dc54f94b471b49fd03 -#- 2022-02-20 14:32:53 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/views/_edit_help.html.erb +537995be81ab061c6f9030dc54f94b471b49fd03 -#- 2022-02-20 14:32:53 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_descriptor_maker_handler/views/_html_input.html.erb 005b7c9c957e9e96bb119b4ebdefd6e9164f39ed -#- 2021-09-07 15:24:56 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/bourreau/boutiques_task.rb 005b7c9c957e9e96bb119b4ebdefd6e9164f39ed -#- 2021-09-07 15:24:56 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/portal/boutiques_task.rb 0ba73d176edc86c246589fea6e7052bac26965ec -#- 2021-09-21 18:19:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_boutiques_group.html.erb -106ff000629d75c16259daf069aa8b26abc7435b -#- 2021-09-24 15:40:01 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_boutiques_input.html.erb +01a23d453aa7a0a4c2515f473794e1a25f02c8b2 -#- 2022-09-28 16:00:29 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_boutiques_input.html.erb ad34090533e1fa43108204c3dbd7d26a278a6916 -#- 2021-09-13 17:01:28 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_boutiques_preview.html.erb -ad34090533e1fa43108204c3dbd7d26a278a6916 -#- 2021-09-13 17:01:28 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_description.html.erb -d6219529c978cea00c7e1998858518774b472f1b -#- 2021-09-17 14:51:01 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_dropdown.html.erb -5ebe8b1b59dd889d5bfc24857ba55754394527b9 -#- 2021-11-25 15:31:16 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_edit_help.html.erb +f1416e7188c24df28b69dce7729e673397487a7e -#- 2022-08-18 15:44:17 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_description.html.erb +e2614e33c75ad7e684815d525f648f588a3ced90 -#- 2022-08-03 13:45:27 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_dropdown.html.erb +4d41236da49b61c7f5590421ce796f8072fc90f2 -#- 2022-09-06 17:33:01 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_edit_help.html.erb 45e104c4833e7cdcbd57a2d3d1164d249a525c80 -#- 2021-12-16 15:05:28 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_form_js.html.erb 4a59d5e0468e9e5bbd5f978696c63e7cad81dc81 -#- 2021-09-16 16:30:42 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_group_checkbox.html.erb ad34090533e1fa43108204c3dbd7d26a278a6916 -#- 2021-09-13 17:01:28 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_html_input.html.erb @@ -684,17 +713,17 @@ b8287da6797c5d19ba413950786249423bcde982 -#- 2021-09-11 22:36:25 -0400 -#- Pierr 0ba73d176edc86c246589fea6e7052bac26965ec -#- 2021-09-21 18:19:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_input_checkbox.html.erb b8287da6797c5d19ba413950786249423bcde982 -#- 2021-09-11 22:36:25 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_input_label.html.erb 0ba73d176edc86c246589fea6e7052bac26965ec -#- 2021-09-21 18:19:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_opt_checkbox.html.erb -641971b6cea07f3a5f33ac668362a641002c391e -#- 2021-09-16 17:13:37 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_show_params.html.erb +b9412989844a95a7f705629ce667f7ce8723c48d -#- 2022-03-16 12:24:43 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_show_params.html.erb 0ba73d176edc86c246589fea6e7052bac26965ec -#- 2021-09-21 18:19:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/boutiques_task/views/_task_params.html.erb e760ff349380178b4b022d6bd22d4efa8fa670cb -#- 2016-11-23 17:17:58 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/cb_serializer/bourreau/cb_serializer.rb b0c034565dc45e84f9a61175d82c49c103a33e0a -#- 2017-12-07 19:19:05 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/cb_serializer/common/cb_serializer.rb c02799a0fc8b7e21be2f0753babc879483701920 -#- 2017-12-07 19:19:03 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/cb_serializer/portal/cb_serializer.rb 757bd09da701ba91ee5bef58dc446fcec6b5f3d4 -#- 2020-05-29 14:50:19 -0400 -#- Candice -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/cb_serializer/views/_show_params.html.erb 41562bbad4c91100c05a806e0d1eee779dff1783 -#- 2015-01-28 18:35:17 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/cb_serializer/views/_task_params.html.erb -cea3baa1e96faebee88a97fa5b93b67843f35c69 -#- 2021-10-22 15:40:16 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/bourreau/diagnostics.rb -67bce169134affbd1f78ed76b2252b08a3969b20 -#- 2019-10-23 14:22:41 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/portal/diagnostics.rb +4250e55864b5524a12af455c52779be979c5a754 -#- 2022-08-10 14:59:24 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/bourreau/diagnostics.rb +4250e55864b5524a12af455c52779be979c5a754 -#- 2022-08-10 14:59:24 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/portal/diagnostics.rb 41562bbad4c91100c05a806e0d1eee779dff1783 -#- 2015-01-28 18:35:17 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/views/_show_params.html.erb -ffd3c8364ff1f0bd4fe1f735128c99b4b3fe46ac -#- 2019-09-18 13:59:29 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/views/_task_params.html.erb +4250e55864b5524a12af455c52779be979c5a754 -#- 2022-08-10 14:59:24 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/views/_task_params.html.erb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/views/public/edit_params_help.html 1eda18f47faaca7f594c2b3e09a4f9d9995df87e -#- 2015-05-08 12:44:50 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/diagnostics/views/public/tool_info.html 82fdc1100fc3681d39114bd3b53adda8f37092ef -#- 2015-05-15 17:01:05 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task/my_cksum/README.txt @@ -731,13 +760,14 @@ d2697ef44b58c5d0912391e1656af82fe5e39d40 -#- 2016-08-12 15:43:49 -0400 -#- Trist e58779eae25346bd5f8e46998f735a23630389f2 -#- 2021-08-22 15:47:02 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/cbrain_task_descriptors/multi_boutiques_demo.json 41562bbad4c91100c05a806e0d1eee779dff1783 -#- 2015-01-28 18:35:17 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/audio_file/audio_file.rb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/bash_source_file/bash_source_file.rb -2439bd526d079923b96c710ac86ac15d36e66317 -#- 2021-12-10 15:54:22 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/cbrain_file_list.rb +65de15a23183bd21f5fb70cbc77f62cdd2dd07c0 -#- 2023-01-17 17:15:09 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/cbrain_file_list.rb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/cbrain_file_list/views/_cb_file_list.html.erb -336746bfee3c75bbbef2d4b9e4def4ad0b14278b -#- 2019-11-21 15:44:29 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/csv_file/csv_file.rb +e5b3af89056a37911b93b2973d14cc1ee528113a -#- 2022-03-18 11:42:04 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/csv_file/csv_file.rb 4c3482ba334d2ebb9d02d2e207e54df235cd14e9 -#- 2021-04-28 12:48:55 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/csv_file/views/_csv_file.html.erb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/dat_file/dat_file.rb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/dot_graph_file/dot_graph_file.rb -e8a3e24808b66dad6cf0b94d234acc7fd94fd713 -#- 2019-05-17 12:50:09 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/file_collection/file_collection.rb +65de15a23183bd21f5fb70cbc77f62cdd2dd07c0 -#- 2023-01-17 17:15:09 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/extended_cbrain_file_list/extended_cbrain_file_list.rb +6b76df0f91b650810954cb5ee67f095803212a18 -#- 2022-04-20 13:41:03 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/file_collection/file_collection.rb 18600dc54a2c03ebf16cd958306efc466e210aaa -#- 2021-11-17 15:37:30 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/file_collection/views/_directory_contents.html.erb 4c3482ba334d2ebb9d02d2e207e54df235cd14e9 -#- 2021-04-28 12:48:55 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/file_collection/views/_file_collection.html.erb 41562bbad4c91100c05a806e0d1eee779dff1783 -#- 2015-01-28 18:35:17 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/file_collection/views/_file_collection.json.erb @@ -757,7 +787,7 @@ f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierr 98eee53ecd68e4d4d33a413f4d1f63a3ced18a45 -#- 2021-04-28 13:13:09 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/mp4_video_file/views/_html5_mp4_video.html.erb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/perl_source_file/perl_source_file.rb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/ruby_source_file/ruby_source_file.rb -af0228ade7fec0c2bcb052ebe0e3ec9469263edd -#- 2018-02-20 17:07:13 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/single_file/single_file.rb +e5b3af89056a37911b93b2973d14cc1ee528113a -#- 2022-03-18 11:42:04 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/single_file/single_file.rb 5e5badcd348083d54047058e8a0a50911a3b08b3 -#- 2019-12-11 14:04:59 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/singularity_image/singularity_image.rb 50a3a6f8e937aad05be47cb7e7693f84ba90fafe -#- 2019-12-03 19:11:19 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/singularity_image/views/_info.html.erb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/tar_archive/tar_archive.rb @@ -765,7 +795,7 @@ f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierr eb5251a923e923218eb839c1cb0104c27f5236f5 -#- 2018-03-06 14:53:23 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/task_raw_workdir/task_raw_workdir.rb e218fa995e594a172604f69d035f16f11d0a142d -#- 2021-06-07 15:47:38 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/task_workdir_archive/task_workdir_archive.rb e236c57be39a0b770a31c6a6e2dfa9851698f3e6 -#- 2016-02-05 16:34:37 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/task_workdir_archive/views/_task_workdir_archive.html.erb -336746bfee3c75bbbef2d4b9e4def4ad0b14278b -#- 2019-11-21 15:44:29 -0500 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/text_file/text_file.rb +e5b3af89056a37911b93b2973d14cc1ee528113a -#- 2022-03-18 11:42:04 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/text_file/text_file.rb 2fdcf21b7c69ba9c41f9c2701255cfc43f2b8e63 -#- 2019-11-21 16:36:49 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/text_file/views/_qc_panel.html.erb 4c3482ba334d2ebb9d02d2e207e54df235cd14e9 -#- 2021-04-28 12:48:55 -0400 -#- Natacha Beck -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/text_file/views/_text_file.html.erb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/tsv_file/tsv_file.rb @@ -773,6 +803,8 @@ f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierr 98eee53ecd68e4d4d33a413f4d1f63a3ced18a45 -#- 2021-04-28 13:13:09 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/xml_file/views/_xml_raw.html.erb bb27117a32906aeb1f6fe7f8b5928a6d31b414ad -#- 2019-09-18 12:11:42 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/xml_file/xml_file.rb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/yaml_file/yaml_file.rb +78bc40e9d7d96e2b48dd607d6456bb47121a5257 -#- 2022-08-23 16:09:57 -0400 -#- tperezdevelopment -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/zip_archive/views/_zip_archive.html.erb +78bc40e9d7d96e2b48dd607d6456bb47121a5257 -#- 2022-08-23 16:09:57 -0400 -#- tperezdevelopment -#- BrainPortal/cbrain_plugins/cbrain-plugins-base/userfiles/zip_archive/zip_archive.rb e5df01b9a679fcbc186eaf8bb180499c9ebd1e4d -#- 2021-09-05 16:41:01 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/installed-plugins/boutiques_descriptors/.gitignore 41562bbad4c91100c05a806e0d1eee779dff1783 -#- 2015-01-28 18:35:17 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/installed-plugins/cbrain_task/.gitignore 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/installed-plugins/cbrain_task/cbrain_task_class_loader.rb @@ -783,7 +815,7 @@ bc6b860360346254d6edcefb1ea20e6fd847eeb8 -#- 2018-02-20 13:20:11 -0500 -#- Pierr 93c790b019c4ac59c3710eefac5d2f0de06fe59c -#- 2020-07-28 17:26:09 -0400 -#- Pierre Rioux -#- BrainPortal/cbrain_plugins/plugins-report.sh 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/config.ru 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- BrainPortal/config/.gitignore -bc6b860360346254d6edcefb1ea20e6fd847eeb8 -#- 2018-02-20 13:20:11 -0500 -#- Pierre Rioux -#- BrainPortal/config/application.rb +db8b1958d2c89ab4107cc6912b4969db0b542377 -#- 2022-08-25 15:52:02 -0400 -#- Pierre Rioux -#- BrainPortal/config/application.rb 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/config/boot.rb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/config/cable.yml 9b921827aed320be83c8901b754065bb5ab5d78b -#- 2021-11-17 15:01:55 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/init_rc.rb @@ -791,15 +823,15 @@ c75f35f789d2533de8aaa917a38e5971ecf2e8f6 -#- 2018-09-27 13:31:00 -0400 -#- Pierr da85d9f66b5803715eff22ed52f9191df5d386b7 -#- 2016-07-27 13:43:08 -0400 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/bourreau_console.rb e6a5841e20ff7f8d87142b8a60edea8d4da40fd1 -#- 2018-11-21 16:20:14 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/current_user_project.rb 56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/fast_finder.rb -843d04f118163f190ab2acb790affb77ef8089ca -#- 2020-12-17 16:32:50 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/interactive_bourreau_control.rb +fa7f840e676874df8e7836ce4d291449f1178061 -#- 2023-01-19 11:21:58 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/interactive_bourreau_control.rb 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/logger_rc.rb e6a5841e20ff7f8d87142b8a60edea8d4da40fd1 -#- 2018-11-21 16:20:14 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/old_bourreau_control.rb -56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/pretty_view.rb +a40d5e3d5be9bdd0c1cbef7b5403948145d85c43 -#- 2022-06-09 10:34:16 -0400 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/pretty_view.rb 16c34d3b2875e63363acd73674c2fc2e7e54b82b -#- 2017-03-09 13:30:38 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/print_log.rb -1575a85046bfe40a34d5640128e287929a7ffd34 -#- 2019-11-15 14:14:24 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/reports.rb +e3bb119fe27c3b38a9b8fef427679abb77e1b69f -#- 2022-03-16 15:17:04 -0400 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/reports.rb 16c34d3b2875e63363acd73674c2fc2e7e54b82b -#- 2017-03-09 13:30:38 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/shortcuts.rb 357582e9163013e4425fe6262d37686e0b4c3205 -#- 2019-07-24 13:50:59 -0400 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/timing.rb -598063ef6c013421ed34a9202fb6ab9e19e1fe33 -#- 2020-12-17 15:05:26 -0500 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/wirble_hirb_looksee.rb +7e46a61f2e6dcde93ad9af94452aa3e93acaa75a -#- 2022-06-08 10:46:39 -0400 -#- Pierre Rioux -#- BrainPortal/config/console_rc/lib/wirble_hirb_looksee.rb c7d4662266d4eb7485c3611989d3a6283d59e014 -#- 2015-03-16 17:17:05 -0400 -#- Pierre Rioux -#- BrainPortal/config/database.yml.TEMPLATE 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/config/environment.rb b0c034565dc45e84f9a61175d82c49c103a33e0a -#- 2017-12-07 19:19:05 -0500 -#- Pierre Rioux -#- BrainPortal/config/environments/development.rb @@ -819,27 +851,27 @@ d652f273e94faf068ba1729143c03ef4148f0e02 -#- 2016-12-13 17:03:01 -0500 -#- Pierr 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/application_controller_renderer.rb 77ed69a4410e36122278072184999daf85638a80 -#- 2020-02-18 17:10:08 -0500 -#- candicecz -#- BrainPortal/config/initializers/assets.rb 4e2f730c46c525e4a7c9a21bf0e0f87a6c37e016 -#- 2011-06-09 13:08:52 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/backtrace_silencers.rb -e5df01b9a679fcbc186eaf8bb180499c9ebd1e4d -#- 2021-09-05 16:41:01 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/cbrain.rb +1c156d565c54f3a539a0566993c1f0a12900dd72 -#- 2022-11-16 14:10:31 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/cbrain.rb 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/cbrain_deprecation_tmp.rb 81744bf3522320596acc7802c5991e64d97d3cb6 -#- 2010-10-19 19:24:57 +0000 -#- prioux -#- BrainPortal/config/initializers/config_portal.rb.TEMPLATE 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/cookies_serializer.rb 1792e25fc57d57e21ab49e090791f9ad434c32c1 -#- 2020-03-06 15:53:10 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/filter_parameter_logging.rb -40cdaed3acfe10dc6e10c8b1dc5b441979d9b5cb -#- 2019-09-17 16:33:21 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/inflections.rb +e3bb119fe27c3b38a9b8fef427679abb77e1b69f -#- 2022-03-16 15:17:04 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/inflections.rb c0c8675ccc3355bc8c104d222b2a200584a749e3 -#- 2017-09-20 13:11:29 -0400 -#- Greg Kiar -#- BrainPortal/config/initializers/initialize_console.rb bf263658c83064a2d5590140920791ede9dcb9ef -#- 2018-11-16 15:05:06 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/listen_temp_monkeypatch.rb 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/mime_types.rb 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/new_framework_defaults.rb f7e64b976d1182ea6e7b7cb6882e474617f39dca -#- 2014-07-17 14:56:13 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/rack_config.rb 4b61c48b800a5dde9d105044ae91c6be013d1746 -#- 2021-09-09 16:03:02 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/session_store.rb -67965179387bf929919dc4174e1aee60c9733008 -#- 2020-08-31 17:30:29 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/validation_portal.rb +05478d8123f2cacbad66835e9acb9f4995e057ca -#- 2022-07-28 18:21:02 -0400 -#- Pierre Rioux -#- BrainPortal/config/initializers/validation_portal.rb ce3ba6484541b1db17fe542aaaba0eeea0b4e6eb -#- 2018-02-07 18:18:10 -0500 -#- Pierre Rioux -#- BrainPortal/config/initializers/wrap_parameters.rb -712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/config/locales/en.yml +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/config/locales/en.yml 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/config/puma.rb -a3cbb977f0a16799006c5a5621aa03a21a79d684 -#- 2022-01-13 13:16:05 -0500 -#- Pierre Rioux -#- BrainPortal/config/routes.rb -1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/config/secrets.yml +17fb2760f8cd01da44af2c5fe6f3b48963254128 -#- 2023-01-21 13:17:20 -0500 -#- Pierre Rioux -#- BrainPortal/config/routes.rb +db8b1958d2c89ab4107cc6912b4969db0b542377 -#- 2022-08-25 15:52:02 -0400 -#- Pierre Rioux -#- BrainPortal/config/secrets.yml 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/config/spring.rb 38d6c88a3f83a7f85bf3bf9b523c7a0b04c6b008 -#- 2022-01-13 15:15:12 -0500 -#- Pierre Rioux -#- BrainPortal/data_dumps/.gitignore -5a676e1c42ef0724672a7346af1d4cd2459f30de -#- 2021-12-31 13:38:00 -0500 -#- Pierre Rioux -#- BrainPortal/data_dumps/README.md +7773447f81899b211353593d4772b4080af46090 -#- 2023-01-12 12:11:06 -0500 -#- Pierre Rioux -#- BrainPortal/data_dumps/README.md 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/db/migrate/20081118191942_create_userfiles.rb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/db/migrate/20081118200238_create_users.rb 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/db/migrate/20081119163831_add_role.rb @@ -1050,27 +1082,40 @@ cbb1e4b160e0b779057cf67d2c6212e974bd80f6 -#- 2020-11-19 15:46:15 -0500 -#- Pierr 56cecada04d2118d3fea297159124cac40db85ea -#- 2021-02-11 14:10:01 -0500 -#- Pierre Rioux -#- BrainPortal/db/migrate/20210207232542_add_browse_path_to_userfile.rb e5df01b9a679fcbc186eaf8bb180499c9ebd1e4d -#- 2021-09-05 16:41:01 -0400 -#- Pierre Rioux -#- BrainPortal/db/migrate/20210905165005_add_descriptor_basename_to_tool_configs.rb 167804e09092aeee3048592447d95c6ad31b09a7 -#- 2021-12-02 12:39:34 -0500 -#- Pierre Rioux -#- BrainPortal/db/migrate/20211202163418_add_cloud_storage_endpoint_to_data_providers.rb -167804e09092aeee3048592447d95c6ad31b09a7 -#- 2021-12-02 12:39:34 -0500 -#- Pierre Rioux -#- BrainPortal/db/schema.rb +4d297f77369622bf11f2a1a74d6e1788f3db2bc5 -#- 2022-03-02 14:54:03 -0500 -#- Pierre Rioux -#- BrainPortal/db/migrate/20220302191553_make_userfile_hidden_flag_not_null.rb +e3bb119fe27c3b38a9b8fef427679abb77e1b69f -#- 2022-03-16 15:17:04 -0400 -#- Pierre Rioux -#- BrainPortal/db/migrate/20220315153324_add_disk_quotas.rb +6c79507aa2f80e5e791a63b09f0c6e86985418cb -#- 2022-03-24 15:28:56 -0400 -#- Pierre Rioux -#- BrainPortal/db/migrate/20220324155533_add_group_data_tracking_table.rb +8a01514bfe8c13a31a92fd6598b543204eeb0b2c -#- 2022-04-18 11:21:17 -0400 -#- Pierre Rioux -#- BrainPortal/db/migrate/20220418144014_add_neurohub_portal_dashboard_messages.rb +b1137ba4ab376d7cd598502a67bb3e58f7ee9a97 -#- 2022-09-14 12:56:33 -0400 -#- Pierre Rioux -#- BrainPortal/db/migrate/20220913183448_rename_recon_all.rb +ea973f14d98d8bfe5af9f9b8a658a7371b7d7e9e -#- 2022-10-14 10:36:32 -0400 -#- Natacha Beck -#- BrainPortal/db/migrate/20221007094232_remove_old_remote_ressources_attributes.rb +ea973f14d98d8bfe5af9f9b8a658a7371b7d7e9e -#- 2022-10-14 10:36:32 -0400 -#- Natacha Beck -#- BrainPortal/db/schema.rb 86770b0be8236b3e1653207839b2c2d641d7885c -#- 2018-03-21 12:01:20 -0400 -#- Pierre Rioux -#- BrainPortal/db/seeds.rb -04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/db/seeds_dev.rb -1915a6d98691933156527eaca30d704236345853 -#- 2022-01-27 15:28:13 -0500 -#- Serge -#- BrainPortal/db/seeds_test_api.rb -04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/db/seeds_test_bourreau.rb +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/db/seeds_dev.rb +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/db/seeds_test_api.rb +e9a724b07b78d57d5685ee67149389e35da4a2f8 -#- 2022-04-14 16:10:22 -0400 -#- Pierre Rioux -#- BrainPortal/db/seeds_test_bourreau.rb a1848dfdd07ce34f15d55e3bed31cac13a7005d1 -#- 2011-09-30 14:35:00 -0400 -#- Pierre Rioux -#- BrainPortal/doc/.gitignore 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/lib/act_rec_log.rb 04c513d423b42a856c4493f105465b16bef3feeb -#- 2018-02-01 16:21:32 -0500 -#- Pierre Rioux -#- BrainPortal/lib/act_rec_meta_data.rb 07974a254ae605ce1a0403aa0ecf4ffa5b1959f4 -#- 2021-01-21 15:46:01 -0500 -#- Natacha Beck -#- BrainPortal/lib/api_helpers.rb 1bf01a4de26b607385517cb0094c08a37ad3aeb0 -#- 2017-12-07 19:18:41 -0500 -#- Pierre Rioux -#- BrainPortal/lib/assets/.keep 04667ab63445de4da4ae45332599edbf2cfdbcd2 -#- 2019-06-07 13:15:35 -0400 -#- Pierre Rioux -#- BrainPortal/lib/authenticated_system.rb +96e7e41e2d7f24c610ce530b1febe69981241865 -#- 2022-06-02 15:24:23 -0400 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_allowed_exit_codes.rb 38d6c88a3f83a7f85bf3bf9b523c7a0b04c6b008 -#- 2022-01-13 15:15:12 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_boot_integrator.rb -6b4c52ac1c309553d427c72d08bd66508789204d -#- 2021-12-15 14:39:46 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_file_name_matcher.rb -0dcb5000189278b5240562f52666a364de1c0f3d -#- 2022-01-03 11:19:32 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_file_type_verifier.rb -f00997eb767f27e6a17f23a6267d2ddbb0d22b0b -#- 2022-01-25 11:56:35 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_output_file_type_setter.rb -a16ee1122476fe433314f20182e00ed239dab202 -#- 2022-01-27 15:20:20 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_post_processing_cleaner.rb -a16ee1122476fe433314f20182e00ed239dab202 -#- 2022-01-27 15:20:20 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_support.rb +17c86cf6dca44a080f5bfea39b94b1237336798e -#- 2022-02-07 13:50:26 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_file_name_matcher.rb +17c86cf6dca44a080f5bfea39b94b1237336798e -#- 2022-02-07 13:50:26 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_file_type_verifier.rb +aacfa25ed1aaa071be10bb03016672afe174618e -#- 2022-08-23 14:47:22 -0400 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_forced_output_browse_path.rb +5110c57685f2d6ab68b29e928c328fde2b7a98a1 -#- 2022-03-12 09:10:22 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_input_cache_cleaner.rb +67594597da1113c4252fa39f3dbe0de804441e2e -#- 2022-10-07 11:23:03 -0400 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_input_subdir_maker.rb +a1bd2e5b64b314f91ecc54dd34a87f4442b07185 -#- 2023-01-17 15:53:22 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_output_cache_cleaner.rb +17c86cf6dca44a080f5bfea39b94b1237336798e -#- 2022-02-07 13:50:26 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_output_file_type_setter.rb +6b7887a8978cff546feaaff536bd5c9ed96a607d -#- 2022-09-14 11:46:58 -0400 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_output_filename_renamer.rb +1c156d565c54f3a539a0566993c1f0a12900dd72 -#- 2022-11-16 14:10:31 -0500 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_post_processing_cleaner.rb +1e5b8adf00e05bb5db204a0711d432d0fed3bce6 -#- 2022-08-29 12:22:50 -0400 -#- Pierre Rioux -#- BrainPortal/lib/boutiques_support.rb 8901480c7544d91f86bbb36a29ec1eec532707a1 -#- 2021-03-01 13:29:25 -0500 -#- MontrealSergiy -#- BrainPortal/lib/browse_provider_file_caching.rb e26b6e4c070db0c6914b5df26e97531043cda4b7 -#- 2019-07-03 16:04:42 -0400 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_carmin_error.rb 70403d4a9074bc93d2028f67d66b459719a75cb4 -#- 2015-05-06 15:23:08 -0400 -#- Natacha Beck -#- BrainPortal/lib/cbrain_checker.rb 970fd506142f19c2b27feb929ff285174556bf47 -#- 2012-08-07 14:59:21 -0400 -#- Tarek Sherif -#- BrainPortal/lib/cbrain_delete_restriction_error.rb +e3bb119fe27c3b38a9b8fef427679abb77e1b69f -#- 2022-03-16 15:17:04 -0400 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_disk_quota_exceeded.rb ff28ae45e27417463a39d17b532c8f7ae7289658 -#- 2012-08-07 14:58:59 -0400 -#- Tarek Sherif -#- BrainPortal/lib/cbrain_error.rb 70403d4a9074bc93d2028f67d66b459719a75cb4 -#- 2015-05-06 15:23:08 -0400 -#- Natacha Beck -#- BrainPortal/lib/cbrain_exception.rb f04ea435ded1531cf7bec78f1e1cb618b9edd8da -#- 2019-07-07 15:39:29 -0400 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_extensions/active_record_extensions/abstract_model_methods.rb @@ -1096,9 +1141,9 @@ bed4171b3c6f15b655aa47bdd5e7a3243b6b642f -#- 2016-08-16 15:10:31 -0400 -#- Andre 2abc783c84c4175708b412761f15f97cdd1511d4 -#- 2021-02-12 16:19:50 -0500 -#- MontrealSergiy -#- BrainPortal/lib/cbrain_file_revision.rb 8966635b69fc29455754e12460c727b9c2fea83e -#- 2020-04-02 21:13:54 -0400 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_license_exception.rb ff28ae45e27417463a39d17b532c8f7ae7289658 -#- 2012-08-07 14:58:59 -0400 -#- Tarek Sherif -#- BrainPortal/lib/cbrain_notice.rb -e5df01b9a679fcbc186eaf8bb180499c9ebd1e4d -#- 2021-09-05 16:41:01 -0400 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_system_checks.rb +537995be81ab061c6f9030dc54f94b471b49fd03 -#- 2022-02-20 14:32:53 -0500 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_system_checks.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/lib/cbrain_task_form_builder.rb -38d52e397d7bd45d552f074d887f5958e4a108fc -#- 2021-11-18 13:09:35 -0500 -#- Natacha Beck -#- BrainPortal/lib/cbrain_task_generators/schema_task_generator.rb +1c156d565c54f3a539a0566993c1f0a12900dd72 -#- 2022-11-16 14:10:31 -0500 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_task_generators/schema_task_generator.rb e0998dc6901bc3a47501d9e45cd4db6e02a1cafb -#- 2019-09-10 17:01:02 -0400 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_task_generators/schemas/boutiques.schema.json e981e4fcadb9f7f3716f90516354441561abcd35 -#- 2021-07-27 11:31:55 -0400 -#- Pierre Rioux -#- BrainPortal/lib/cbrain_task_generators/templates/bourreau.rb.erb 2a20519bd9c525da9b20ae34da1da670434d68b5 -#- 2016-08-11 15:11:28 -0400 -#- Tristan A.A -#- BrainPortal/lib/cbrain_task_generators/templates/edit_help.html.erb @@ -1123,13 +1168,13 @@ e9b89b1e9da949aa642f3ee96d85b1e3dedd9d2b -#- 2021-06-11 16:30:11 -0400 -#- Pierr 7fbed31b2a8fd110c7fbcb18a46cb614c75cdb2d -#- 2015-01-28 22:15:16 -0500 -#- Pierre Rioux -#- BrainPortal/lib/generators/cbrain_task/templates/tool_info.html.erb b2f67b0b609a16cda5877491fbc05153be46a535 -#- 2015-07-28 18:00:36 -0400 -#- Remi Bernard -#- BrainPortal/lib/generators/descriptor_task/USAGE 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/lib/generators/descriptor_task/descriptor_task_generator.rb -c1b63a712fc348d42362204b2215cfaef178c0f8 -#- 2021-11-23 12:17:34 -0500 -#- Pierre Rioux -#- BrainPortal/lib/globus_helpers.rb +a8162d742f2f63a21806893100c5b6aeef45f22b -#- 2022-07-28 13:38:38 -0400 -#- Pierre Rioux -#- BrainPortal/lib/globus_helpers.rb 65d87ed48def4ba99bcb5ad825770395b89dde5d -#- 2016-09-22 12:50:31 -0400 -#- Pierre Rioux -#- BrainPortal/lib/http_user_agent.rb 81d8358efbe761a87e26c96f15c914d06afa8042 -#- 2020-04-12 12:53:13 -0400 -#- Pierre Rioux -#- BrainPortal/lib/identifier_format_validator.rb ff28ae45e27417463a39d17b532c8f7ae7289658 -#- 2012-08-07 14:58:59 -0400 -#- Tarek Sherif -#- BrainPortal/lib/javascript_option_setup.rb 06fbd979ae2102fc95ed1e98c30e040ddb2142ad -#- 2018-11-16 15:36:41 -0500 -#- Pierre Rioux -#- BrainPortal/lib/license_agreements.rb c1c5c30eda2b2d4b2c3bf2b82b5b0bc50f738299 -#- 2015-04-20 14:18:23 -0400 -#- Natacha Beck -#- BrainPortal/lib/message_helpers.rb -24c25b3474801590604484e5f76fda4a954fea13 -#- 2020-04-08 17:02:58 -0400 -#- Pierre Rioux -#- BrainPortal/lib/models_report.rb +af804aea9a4e7e78f78c8aac99016984ee361b56 -#- 2023-01-23 12:29:12 -0500 -#- Pierre Rioux -#- BrainPortal/lib/models_report.rb f8252be341146b311b05ab85184f081cde901ac3 -#- 2016-09-06 16:59:30 -0400 -#- Pierre Rioux -#- BrainPortal/lib/name_format_validator.rb 783f532abb91f341b2725b6263209513a463a372 -#- 2021-11-24 15:39:35 -0500 -#- Pierre Rioux -#- BrainPortal/lib/neurohub_helpers.rb 7ced7a6a6922487da5ddae6dc4b147a9052643d6 -#- 2016-11-18 16:17:10 -0500 -#- Pierre Rioux -#- BrainPortal/lib/no_vm_available_error.rb @@ -1138,12 +1183,12 @@ c1b63a712fc348d42362204b2215cfaef178c0f8 -#- 2021-11-23 12:17:34 -0500 -#- Pierr cbeef44b0e23999ceda76ae6310a02eaef298b8b -#- 2018-01-04 14:18:16 -0500 -#- Pierre Rioux -#- BrainPortal/lib/permission_helpers.rb 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/lib/persistent_selection.rb bacba8bee6a060576aa5ee69f06cd1e079290e24 -#- 2020-03-03 14:11:40 -0500 -#- Pierre Rioux -#- BrainPortal/lib/portal_sanity_checks.rb -1ff37b7d395e1308b99ad6b73258d0809fbe1195 -#- 2021-05-21 14:39:22 -0400 -#- Pierre Rioux -#- BrainPortal/lib/portal_system_checks.rb +05478d8123f2cacbad66835e9acb9f4995e057ca -#- 2022-07-28 18:21:02 -0400 -#- Pierre Rioux -#- BrainPortal/lib/portal_system_checks.rb e58779eae25346bd5f8e46998f735a23630389f2 -#- 2021-08-22 15:47:02 -0400 -#- Pierre Rioux -#- BrainPortal/lib/recoverable_task.rb 88f8c204f5c9d6b6ffc5963436a753009f89aa04 -#- 2019-06-07 13:46:42 -0400 -#- Pierre Rioux -#- BrainPortal/lib/request_helpers.rb 7f9644d1f4b6a900b7c594fd3091c3eb248c5a73 -#- 2020-04-09 12:33:28 -0400 -#- Pierre Rioux -#- BrainPortal/lib/resource_access.rb e58779eae25346bd5f8e46998f735a23630389f2 -#- 2021-08-22 15:47:02 -0400 -#- Pierre Rioux -#- BrainPortal/lib/restartable_task.rb -167804e09092aeee3048592447d95c6ad31b09a7 -#- 2021-12-02 12:39:34 -0500 -#- Pierre Rioux -#- BrainPortal/lib/s3_sdkv3_connection.rb +72401b18b1c507527b93f271fd5ea7a2b59e219d -#- 2022-10-03 13:05:34 -0400 -#- Pierre Rioux -#- BrainPortal/lib/s3_sdkv3_connection.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/lib/scir.rb b7126e2d03761ba2593aeffe891bae7f73e30423 -#- 2018-11-06 11:44:23 -0500 -#- Pierre Rioux -#- BrainPortal/lib/scir_amazon.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/lib/scir_cloud.rb @@ -1155,18 +1200,20 @@ a09f501e345df16f3a05278c7e04c661b0e3739a -#- 2015-12-16 14:14:51 -0500 -#- Trist 5aa3655ab722465ce8981813e50f187e6a1fc954 -#- 2016-09-06 16:33:35 -0400 -#- Pierre Rioux -#- BrainPortal/lib/scir_sharcnet.rb 2aabd59cc9f14ecf79255908d9ef1f66695b2b7c -#- 2020-04-02 10:45:28 -0400 -#- Pierre Rioux -#- BrainPortal/lib/scir_slurm.rb 81add9599705a5219617814a4fec5cb023004cf9 -#- 2017-12-07 19:19:03 -0500 -#- Pierre Rioux -#- BrainPortal/lib/scir_unix.rb -14f4e56338eece4f06fa9499f90ddf305df38d04 -#- 2021-01-29 12:37:07 -0500 -#- MontrealSergiy -#- BrainPortal/lib/session_helpers.rb +ca0d9a40fa5d6ed3dbfb8bea7b6d0e203b022f5a -#- 2022-05-03 17:00:47 -0400 -#- Pierre Rioux -#- BrainPortal/lib/session_helpers.rb e9b89b1e9da949aa642f3ee96d85b1e3dedd9d2b -#- 2021-06-11 16:30:11 -0400 -#- Pierre Rioux -#- BrainPortal/lib/smart_data_provider_interface.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/lib/ssh_agent.rb -1792e25fc57d57e21ab49e090791f9ad434c32c1 -#- 2020-03-06 15:53:10 -0500 -#- Pierre Rioux -#- BrainPortal/lib/ssh_key.rb -f46666350ad7ff4074edabb2b5fd75b883d680c0 -#- 2021-06-07 16:39:19 -0400 -#- Pierre Rioux -#- BrainPortal/lib/ssh_master.rb +0e38917df7e388b14bcdb9b1d0a573f71e837983 -#- 2023-01-20 12:39:24 -0500 -#- Pierre Rioux -#- BrainPortal/lib/ssh_key.rb +885aa4fb7ece5bd1d2da800c081151b77d75ae61 -#- 2022-09-15 13:46:02 -0400 -#- Pierre Rioux -#- BrainPortal/lib/ssh_master.rb 70403d4a9074bc93d2028f67d66b459719a75cb4 -#- 2015-05-06 15:23:08 -0400 -#- Natacha Beck -#- BrainPortal/lib/subclass_validator.rb 01ff22899df226c5831a0ad8c233a1f3f04c77a4 -#- 2018-08-09 15:28:15 -0400 -#- Nicholas Nadeau, P.Eng., AVS -#- BrainPortal/lib/subpath_format_validator.rb +ca0d9a40fa5d6ed3dbfb8bea7b6d0e203b022f5a -#- 2022-05-03 17:00:47 -0400 -#- Pierre Rioux -#- BrainPortal/lib/switch_group_helpers.rb c637666465dfd4deb3339634fbe50198dbca574d -#- 2019-05-06 12:50:19 -0400 -#- Pierre Rioux -#- BrainPortal/lib/tasks/brainportal_docs.rake 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/lib/tasks/brainportal_sanity_check.rake 298b1d87550834b7132cc4b4f884bea42a6fbe90 -#- 2020-08-31 18:03:48 -0400 -#- Pierre Rioux -#- BrainPortal/lib/tasks/cbrain_nagios_checker.rake e5df01b9a679fcbc186eaf8bb180499c9ebd1e4d -#- 2021-09-05 16:41:01 -0400 -#- Pierre Rioux -#- BrainPortal/lib/tasks/cbrain_plugins.rake -ffad932287be1a353dc897abc17d2627e32f6c81 -#- 2022-01-24 15:33:01 -0500 -#- Pierre Rioux -#- BrainPortal/lib/tasks/resource_usage_serialization.rake +4edc60f7bcce78f4dc746ed663c0fabe50270562 -#- 2022-03-04 15:14:28 -0500 -#- Pierre Rioux -#- BrainPortal/lib/tasks/clean_broken_associations.rake +7773447f81899b211353593d4772b4080af46090 -#- 2023-01-12 12:11:06 -0500 -#- Pierre Rioux -#- BrainPortal/lib/tasks/resource_usage_serialization.rake 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/lib/tasks/seed_dev.rake 8e02763d3016d3bebb7cf95026831c5a19f55ebc -#- 2018-10-26 17:13:44 -0400 -#- Pierre Rioux -#- BrainPortal/lib/tasks/seed_test_api.rake 545ee88ef19fed72f3b9ecb5d0b0a370b436d471 -#- 2016-07-19 16:32:40 -0400 -#- Tristan A.A -#- BrainPortal/lib/tasks/seed_test_bourreau.rake @@ -1200,7 +1247,6 @@ b4b701b40cb5b251fd3aa08b8bee06699d5ca18b -#- 2014-06-02 13:58:41 -0400 -#- Pierr 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/public/doc/userfiles/file_formats_overview.html 2eceeb92afa749dd8c5c28082493fcdc0111af95 -#- 2012-01-26 13:46:35 -0500 -#- Pierre Rioux -#- BrainPortal/public/doc/userfiles/tools_overview.html 3573666665d8ae3260c6c8d65234e114a7252fb8 -#- 2012-04-12 18:08:27 -0400 -#- Tarek Sherif -#- BrainPortal/public/images/ajax-loader.gif -df0cb12143a0df57e20c62469b3de1918a558bb2 -#- 2013-10-31 17:03:44 -0400 -#- Tarek Sherif -#- BrainPortal/public/images/brainbrowser-loader.gif 0bef0c3ec375c8ea4fbb9657194481cb01ee8eb9 -#- 2016-09-02 16:53:17 -0400 -#- Pierre Rioux -#- BrainPortal/public/images/credits/CANARIE_h.jpg c230d36eb91f09adeae55578abd7a1ab60b0cc32 -#- 2016-09-16 17:14:59 -0400 -#- Pierre Rioux -#- BrainPortal/public/images/credits/brain_canada_logo.png c230d36eb91f09adeae55578abd7a1ab60b0cc32 -#- 2016-09-16 17:14:59 -0400 -#- Pierre Rioux -#- BrainPortal/public/images/credits/cfi_logo.png @@ -1242,7 +1288,7 @@ cf893e6ad224a34b5c40728dc7656cafb5f44605 -#- 2010-03-26 18:05:31 +0000 -#- tsher d1da3eff0c044c3ed474cde1cc8cbc54352027a9 -#- 2015-06-17 14:24:45 -0400 -#- Remi Bernard -#- BrainPortal/public/images/ui-black-icons.png d1da3eff0c044c3ed474cde1cc8cbc54352027a9 -#- 2015-06-17 14:24:45 -0400 -#- Remi Bernard -#- BrainPortal/public/images/ui-blue-icons.png d1da3eff0c044c3ed474cde1cc8cbc54352027a9 -#- 2015-06-17 14:24:45 -0400 -#- Remi Bernard -#- BrainPortal/public/images/ui-white-icons.png -1f1195e67b4a848f6a67544690da46ea7e8382e6 -#- 2021-05-20 17:46:45 -0400 -#- Pierre Rioux -#- BrainPortal/public/javascripts/cbrain.js +7b1c3d3fbf2aba16b4a43a725b35435381d87ab1 -#- 2022-06-20 16:00:22 -0400 -#- Natacha Beck -#- BrainPortal/public/javascripts/cbrain.js ddf208bc21042093f23f3bb57775287890eeb4e8 -#- 2016-03-08 16:15:37 -0500 -#- Pierre Rioux -#- BrainPortal/public/javascripts/cbrain/bourreaux/fetch_row.js a5ca12d9e18b21f955b703f9cff1602ee0b07884 -#- 2018-02-16 17:32:49 -0500 -#- Pierre Rioux -#- BrainPortal/public/javascripts/cbrain/shared/selection_database.js ddf208bc21042093f23f3bb57775287890eeb4e8 -#- 2016-03-08 16:15:37 -0500 -#- Pierre Rioux -#- BrainPortal/public/javascripts/cbrain/tasks/batch_list.js @@ -1251,7 +1297,7 @@ fe981d227cba3a562019965b47b32b7c1db8a2ee -#- 2016-03-14 16:38:49 -0400 -#- Pierr 2408e08b13343fd759a0d1b5d6b678a19b905310 -#- 2015-08-17 16:05:06 -0400 -#- Pierre Rioux -#- BrainPortal/public/javascripts/dynamic-table.js dcc9f546f0359d737a04feb3ce372da09cee829d -#- 2012-06-28 16:00:14 -0400 -#- Pierre Rioux -#- BrainPortal/public/javascripts/jquery.tablesorter.min.js 9111a041cbe70ebe2b9e3d55353a97c73b244791 -#- 2020-08-12 10:03:34 -0400 -#- Natacha Beck -#- BrainPortal/public/javascripts/nh_upload.js -5df9d47686799e71c803ac66c3a560c5b9a39194 -#- 2020-10-26 16:36:03 -0400 -#- Natacha Beck -#- BrainPortal/public/javascripts/userfiles.js +05f2cff612838e19eb5d31a2e7b6f28df8211057 -#- 2022-10-14 10:34:05 -0400 -#- Natacha Beck -#- BrainPortal/public/javascripts/userfiles.js f2136717fbeb4a5ce0c30ea421d0960ba040a7db -#- 2015-03-23 18:20:40 -0400 -#- Pierre Rioux -#- BrainPortal/public/licenses/.gitignore f2136717fbeb4a5ce0c30ea421d0960ba040a7db -#- 2015-03-23 18:20:40 -0400 -#- Pierre Rioux -#- BrainPortal/public/licenses/README.txt 712239cba27028969ff7cf2f2191af2e6cc0f3f2 -#- 2017-12-07 19:18:58 -0500 -#- Pierre Rioux -#- BrainPortal/public/robots.txt @@ -1269,6 +1315,10 @@ b1a0d5a20b2668342a8baa0af2ed4a3b5e8cf1ab -#- 2019-05-07 18:35:15 -0400 -#- Pierr b1a0d5a20b2668342a8baa0af2ed4a3b5e8cf1ab -#- 2019-05-07 18:35:15 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/cbrain-5.1.1-swagger.yaml 7580b1a79c75c3ea12f9924fb9a7eeb29a56a472 -#- 2019-09-03 16:33:14 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/cbrain-5.1.3-swagger.json 7580b1a79c75c3ea12f9924fb9a7eeb29a56a472 -#- 2019-09-03 16:33:14 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/cbrain-5.1.3-swagger.yaml +7a8c8da62e457ce95502ca494aedf3d5e956a9bd -#- 2022-06-14 12:20:12 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/cbrain-6.2.0-swagger.json +7a8c8da62e457ce95502ca494aedf3d5e956a9bd -#- 2022-06-14 12:20:12 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/cbrain-6.2.0-swagger.yaml +7a8c8da62e457ce95502ca494aedf3d5e956a9bd -#- 2022-06-14 12:20:12 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/cbrain-6.2.0.1-swagger.json +7a8c8da62e457ce95502ca494aedf3d5e956a9bd -#- 2022-06-14 12:20:12 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/cbrain-6.2.0.1-swagger.yaml 518d3b7b73b4f40fa6591087bdc7010184bbe7d5 -#- 2016-09-08 17:52:40 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/css/print.css 518d3b7b73b4f40fa6591087bdc7010184bbe7d5 -#- 2016-09-08 17:52:40 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/css/reset.css 518d3b7b73b4f40fa6591087bdc7010184bbe7d5 -#- 2016-09-08 17:52:40 -0400 -#- Pierre Rioux -#- BrainPortal/public/swagger/css/screen.css @@ -1330,7 +1380,7 @@ dc2eaf46e933770354e47c8eb49ead6b6a14ae81 -#- 2019-05-17 15:12:30 -0400 -#- Pierr a805a234eeed4f234677202fc42c6b46b8256896 -#- 2015-04-08 12:56:35 -0400 -#- Natacha Beck -#- BrainPortal/spec/fixtures/.gitignore 679af6df2fff678f1f72c5cf0f26f4045822104e -#- 2020-11-19 20:38:17 -0500 -#- Pierre Rioux -#- BrainPortal/spec/mailers/cbrain_mailer_spec.rb 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- BrainPortal/spec/models/active_record_log_spec.rb -eb5251a923e923218eb839c1cb0104c27f5236f5 -#- 2018-03-06 14:53:23 -0500 -#- Natacha Beck -#- BrainPortal/spec/models/bourreau_spec.rb +c52ff3d896e061bcfddcd0373562cf4ac5c2ff76 -#- 2022-04-14 16:31:28 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/bourreau_spec.rb 4ae88132ae49c202d5363fe1648d1f4861c81920 -#- 2015-04-29 16:40:11 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/brain_portal_spec.rb b0c034565dc45e84f9a61175d82c49c103a33e0a -#- 2017-12-07 19:19:05 -0500 -#- Pierre Rioux -#- BrainPortal/spec/models/cbrain_session_spec.rb 233bedc7abd20df4500e4edf039f105f4a14f567 -#- 2016-06-20 14:47:22 -0400 -#- Andrew Doyle -#- BrainPortal/spec/models/cbrain_task_spec.rb @@ -1339,7 +1389,7 @@ b0c034565dc45e84f9a61175d82c49c103a33e0a -#- 2017-12-07 19:19:05 -0500 -#- Pierr 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- BrainPortal/spec/models/en_cbrain_local_data_provider_spec.rb 4ae88132ae49c202d5363fe1648d1f4861c81920 -#- 2015-04-29 16:40:11 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/en_cbrain_smart_data_provider_spec.rb 64be7b629b38b421828ec9175374b52f5c1980b0 -#- 2020-03-10 18:35:39 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/en_cbrain_ssh_data_provider_spec.rb -7cfa5c2a0fd77b5c386ad0b212f422527d83f0e1 -#- 2020-04-11 12:34:41 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/file_collection_spec.rb +a0e192db0583bac2ce5942558ffaefeb085243c4 -#- 2022-03-18 11:33:10 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/file_collection_spec.rb 7cfa5c2a0fd77b5c386ad0b212f422527d83f0e1 -#- 2020-04-11 12:34:41 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/file_info_spec.rb 925b9ec5790a125abf7c53eaf10f7de4fd42c02f -#- 2017-12-07 19:18:42 -0500 -#- Natacha Beck -#- BrainPortal/spec/models/flat_dir_local_data_provider_spec.rb af4c4ee7161b371b63740a95926c30adf08e7c69 -#- 2020-03-04 13:30:14 -0500 -#- Natacha Beck -#- BrainPortal/spec/models/group_spec.rb @@ -1348,7 +1398,7 @@ af4c4ee7161b371b63740a95926c30adf08e7c69 -#- 2020-03-04 13:30:14 -0500 -#- Natac a18d0ebf4c40f6030dd233f2810ee2da9cf4b104 -#- 2018-11-06 18:33:45 -0500 -#- Pierre Rioux -#- BrainPortal/spec/models/params_errors_spec.rb a946d8ac3d0e19768e08b4137be79a6daa041560 -#- 2021-08-05 18:30:49 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/portal_task_spec.rb 2abdc9daec787bc883f6d261432afeb9f7f3a5b2 -#- 2021-08-22 19:55:28 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/remote_resource_info_spec.rb -ad7428f4d166daf281e6714c1fe28ffb554362fd -#- 2019-10-08 12:08:40 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/remote_resource_spec.rb +ea973f14d98d8bfe5af9f9b8a658a7371b7d7e9e -#- 2022-10-14 10:36:32 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/remote_resource_spec.rb 785d73ae1ecbc13e4afbde38d457d1ce4debf71d -#- 2021-08-02 16:52:22 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/restricted_hash_spec.rb 4ae88132ae49c202d5363fe1648d1f4861c81920 -#- 2015-04-29 16:40:11 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/single_file_spec.rb 4ae88132ae49c202d5363fe1648d1f4861c81920 -#- 2015-04-29 16:40:11 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/site_spec.rb @@ -1356,7 +1406,7 @@ ad7428f4d166daf281e6714c1fe28ffb554362fd -#- 2019-10-08 12:08:40 -0400 -#- Natac 4ae88132ae49c202d5363fe1648d1f4861c81920 -#- 2015-04-29 16:40:11 -0400 -#- Natacha Beck -#- BrainPortal/spec/models/system_group_spec.rb da3c5b4347357824167d4ebc3519aecc112fc297 -#- 2019-06-12 11:27:46 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/tag_spec.rb d7a8f5649874a35aae0d95641869d553d0b3f8d7 -#- 2019-05-14 12:40:03 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/task_custom_filter_spec.rb -f81239b0450627d83286b438d1fcb3ae5af1a598 -#- 2016-08-18 11:19:43 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/tool_config_spec.rb +f176c35d273d4df414595aedb30ce7af9370c92c -#- 2023-01-12 17:15:09 -0500 -#- Serge -#- BrainPortal/spec/models/tool_config_spec.rb da3c5b4347357824167d4ebc3519aecc112fc297 -#- 2019-06-12 11:27:46 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/tool_spec.rb 7f9644d1f4b6a900b7c594fd3091c3eb248c5a73 -#- 2020-04-09 12:33:28 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/user_spec.rb d7a8f5649874a35aae0d95641869d553d0b3f8d7 -#- 2019-05-14 12:40:03 -0400 -#- Pierre Rioux -#- BrainPortal/spec/models/userfile_custom_filter_spec.rb @@ -1368,7 +1418,7 @@ af4c4ee7161b371b63740a95926c30adf08e7c69 -#- 2020-03-04 13:30:14 -0500 -#- Natac 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- BrainPortal/spec/modules/act_rec_meta_data_spec.rb 4ae88132ae49c202d5363fe1648d1f4861c81920 -#- 2015-04-29 16:40:11 -0400 -#- Natacha Beck -#- BrainPortal/spec/modules/cbrain_deep_clone_spec.rb 233bedc7abd20df4500e4edf039f105f4a14f567 -#- 2016-06-20 14:47:22 -0400 -#- Andrew Doyle -#- BrainPortal/spec/modules/core_models_spec.rb -d7a41d6bec9ffe35217c213c50dc85b80fa26717 -#- 2021-02-12 12:27:08 -0500 -#- Pierre Rioux -#- BrainPortal/spec/modules/github_ci_spec.rb +5a499240b042520db2f8068d4f1ca71d2ba559a2 -#- 2022-10-03 15:16:27 -0400 -#- Natacha Beck -#- BrainPortal/spec/modules/github_ci_spec.rb dc2eaf46e933770354e47c8eb49ead6b6a14ae81 -#- 2019-05-17 15:12:30 -0400 -#- Pierre Rioux -#- BrainPortal/spec/modules/resource_access_spec.rb 233bedc7abd20df4500e4edf039f105f4a14f567 -#- 2016-06-20 14:47:22 -0400 -#- Andrew Doyle -#- BrainPortal/spec/modules/ssh_agent_spec.rb 53a10b65e54e72386777a9effb0370ebf9ad2c86 -#- 2017-12-07 19:19:01 -0500 -#- Natacha Beck -#- BrainPortal/spec/modules/view_scopes_spec.rb @@ -1625,16 +1675,8 @@ c184bd8f84d7984c04b7581a61a6b3abc8f8c5bd -#- 2017-08-23 14:26:13 -0400 -#- Pierr df3e2af099c3247e5b7c350de3afc8f99b0253d8 -#- 2012-01-25 13:10:16 -0500 -#- Pierre Rioux -#- LICENSE.txt 495db4c873eeb3294c04187047ce1c3a1ffc6a0e -#- 2020-09-23 11:54:45 -0400 -#- MontrealSergiy -#- NEUROHUB_README.md 1b38ded2d16ee2b163149d0ae9bea35afdef47bd -#- 2021-01-21 10:06:59 -0500 -#- Pierre Rioux -#- README.md -2bb36b003cc7ae98fe9d7017d399c7270ac27025 -#- 2021-02-26 15:51:43 -0500 -#- Pierre Rioux -#- Release-Notes.md -2ad7e18a2361df9fa2fc5a880cae6f32205775a8 -#- 2020-03-10 17:33:32 -0400 -#- Pierre Rioux -#- Travis/Dockerfile.travis -daa19ab96f0a6463c004742a8d4b0f42b3b3fe05 -#- 2017-04-11 14:32:59 -0400 -#- Pierre Rioux -#- Travis/README.md -c4e436410858283f3ed06eaf65a52d1935253175 -#- 2019-06-12 12:49:49 -0400 -#- Pierre Rioux -#- Travis/bootstrap.sh -aab7619bf65c941f03c5f944779bf31554b80f2f -#- 2016-11-26 19:35:28 -0500 -#- Pierre Rioux -#- Travis/build_container.sh -0c218fa79ac0b288d421141c2c86554e74721639 -#- 2020-04-22 10:46:06 -0400 -#- Pierre Rioux -#- Travis/cb_run_tests.sh -aab7619bf65c941f03c5f944779bf31554b80f2f -#- 2016-11-26 19:35:28 -0500 -#- Pierre Rioux -#- Travis/templates/config_portal.rb.TEST -aab7619bf65c941f03c5f944779bf31554b80f2f -#- 2016-11-26 19:35:28 -0500 -#- Pierre Rioux -#- Travis/templates/database.yml.TEST -22d7196f7fdaf23c990a450599f360d957308c34 -#- 2019-09-03 14:36:24 -0400 -#- Pierre Rioux -#- Travis/travis_ci.sh -2e9aaa24b00460d88f774be8016d3a6477e7faad -#- 2021-02-26 10:20:23 -0500 -#- Pierre Rioux -#- cbrain_file_revisions.csv +c2aec28d58e19e80e8fcc36ce5087aa40fd7d020 -#- 2022-01-28 14:33:02 -0500 -#- Pierre Rioux -#- Release-Notes.md +c2aec28d58e19e80e8fcc36ce5087aa40fd7d020 -#- 2022-01-28 14:33:02 -0500 -#- Pierre Rioux -#- cbrain_file_revisions.csv 6eff5f304c4f97b994d8b6f72992ceaba9516b3b -#- 2021-08-02 16:36:36 -0400 -#- Pierre Rioux -#- license_header.txt a7e0e34d6dbe195cdc06d7bd183bb56f164436db -#- 2016-08-14 14:37:17 -0400 -#- Pierre Rioux -#- script/README.md 2bb36b003cc7ae98fe9d7017d399c7270ac27025 -#- 2021-02-26 15:51:43 -0500 -#- Pierre Rioux -#- script/gen_local_rev_csv.sh diff --git a/license_header.txt b/license_header.txt index 9401f3778..219662d7e 100644 --- a/license_header.txt +++ b/license_header.txt @@ -1,7 +1,7 @@ # # CBRAIN Project # -# Copyright (C) 2008-2021 +# Copyright (C) 2008-2023 # The Royal Institution for the Advancement of Learning # McGill University # diff --git a/script/update_cb_all.sh b/script/update_cb_all.sh index 5e4d081e1..ae524840e 100755 --- a/script/update_cb_all.sh +++ b/script/update_cb_all.sh @@ -11,9 +11,9 @@ function usage { This is CBRAIN's $0 version $VERSION by Pierre Rioux -Usage: $0 [-v] [-[1234567]] path_to_portal_or_bourreau +Usage: $0 [-v] [[-|+][1234567]] path_to_portal_or_bourreau -This script will attempt to update all the GIT repository for a CBRAIN +This script will attempt to update all the GIT repositories for a CBRAIN installation, including those in the plugins. It has to be invoked with a single argument, the path to either the BrainPortal or the Bourreau directory of the CBRAIN distribution. For a portal installation, @@ -31,21 +31,29 @@ These are the steps that are performed: A) for both Bourreau and BrainPortal: - - git pull of the main CBRAIN repo - - git pull of each installed plugins - - bundle install - - rake cbrain:plugins:clean:all - - rake cbrain:plugins:install:all + 1 - git pull of the main CBRAIN repo + 2 - git pull of each installed plugins + 3 - bundle install + 4 - rake cbrain:plugins:clean:all + - rake cbrain:plugins:install:all B) for BrainPortal only: - - rake db:migrate - - rake db:sanity:check - - rake assets:precompile - - chmod -R a+rX BrainPortal/public + 5 - rake db:migrate + 6 - rake db:sanity:check + 7 - rake assets:precompile + - chmod -R a+rX BrainPortal/public Note: you might have to set your RAILS_ENV environment variable for the rake tasks to work properly. + +With option -v, the outputs of the commands run will be shown. + +With option -N, where N is a number between 1 and 7, the script +will run be starting from step N. + +With option +N, where N is a number between 1 and 7, the script +will run ONLY the step N. USAGE exit 20 } @@ -58,8 +66,11 @@ USAGE verbose="" test $# -gt 0 && test "X$1" == "X-v" && shift && verbose="1" +# Which steps to execute +skipto="1" # first step to execute +stopat="99" # last step to execute + # With a -number in argument, will skip to that step -skipto="1" if test $# -gt 0 ; then if test "X$1" = "X-1" -o "X$1" = "X-2" -o "X$1" = "X-3" -o \ "X$1" = "X-4" -o "X$1" = "X-5" -o "X$1" = "X-6" -o \ @@ -69,6 +80,17 @@ if test $# -gt 0 ; then fi fi +# With a +number in argument, execute ONLY that step +if test $# -gt 0 ; then + if test "X$1" = "X+1" -o "X$1" = "X+2" -o "X$1" = "X+3" -o \ + "X$1" = "X+4" -o "X$1" = "X+5" -o "X$1" = "X+6" -o \ + "X$1" = "X+7" ; then # I'm too lazy to check with regex + skipto=$( echo "$1" | tr -d + ) + stopat=$skipto + shift + fi +fi + # Verify we have a path to a CBRAIN install if test $# -ne 1 ; then usage @@ -126,19 +148,21 @@ function runcapture { # ACTUAL UPDATE STEPS #============================================================================ -if test -z "$skipto" -o "$skipto" -le "1" ; then +step=1 +if test $step -ge $skipto -a $step -le $stopat ; then -Step 1: GIT Update CBRAIN Base -runcapture "git pull" +Step $step: GIT Update CBRAIN Base +runcapture "git pull --verbose" runcapture "git fetch --tags" fi #============================================================================ -if test -z "$skipto" -o "$skipto" -le "2" ; then +step=2 +if test $step -ge $skipto -a $step -le $stopat ; then -Step 2: GIT Update CBRAIN Plugins +Step $step: GIT Update CBRAIN Plugins pushd cbrain_plugins >/dev/null || exit for plugin in * ; do test ! -d "$plugin" && continue @@ -147,7 +171,7 @@ for plugin in * ; do test ! -d "$plugin/.git" && continue echo " => $plugin" pushd "$plugin" >/dev/null || exit 20 - runcapture "git pull" + runcapture "git pull --verbose" runcapture "git fetch --tags" popd >/dev/null || exit 20 done @@ -156,17 +180,19 @@ popd >/dev/null || exit 20 fi #============================================================================ -if test -z "$skipto" -o "$skipto" -le "3" ; then +step=3 +if test $step -ge $skipto -a $step -le $stopat ; then -Step 3: Bundle Install +Step $step: Bundle Install runcapture "bundle install" fi #============================================================================ -if test -z "$skipto" -o "$skipto" -le "4" ; then +step=4 +if test $step -ge $skipto -a $step -le $stopat ; then -Step 4: Re-install All Plugins +Step $step: Re-install All Plugins test "$base" == "BrainPortal" && runcapture "rake cbrain:plugins:clean:all" test "$base" == "Bourreau" && runcapture "rake cbrain:plugins:clean:plugins" test "$base" == "BrainPortal" && runcapture "rake cbrain:plugins:install:all" @@ -175,10 +201,11 @@ test "$base" == "Bourreau" && runcapture "rake cbrain:plugins:install:plugins fi #============================================================================ -if test -z "$skipto" -o "$skipto" -le "5" ; then +step=5 +if test $step -ge $skipto -a $step -le $stopat ; then if test "$base" == "BrainPortal" ; then - Step 5: Database Migrations + Step $step: Database Migrations runcapture "rake db:migrate" fi @@ -186,20 +213,22 @@ fi #============================================================================ -if test -z "$skipto" -o "$skipto" -le "6" ; then +step=6 +if test $step -ge $skipto -a $step -le $stopat ; then if test "$base" == "BrainPortal" ; then - Step 6: Database Sanity Checks + Step $step: Database Sanity Checks runcapture "rake db:sanity:check" fi fi #============================================================================ -if test -z "$skipto" -o "$skipto" -le "7" ; then +step=7 +if test $step -ge $skipto -a $step -le $stopat ; then if test "$base" == "BrainPortal" ; then - Step 7: Asset Compilations + Step $step: Asset Compilations runcapture "rake assets:precompile" runcapture "chmod -R a+rX public" fi