Commit 15919022 authored by PiTrem's avatar PiTrem
Browse files

transient notification after response for import/export job submission

lib/import_coll:
  use attachments and tmp_files for processing
parent feaec41d
......@@ -334,28 +334,26 @@ module Chemotion
end
namespace :unshared do
desc "Create an unshared collection"
params do
requires :label, type: String, desc: "Collection label"
end
post do
Collection.create(user_id: current_user.id, label: params[:label])
end
end
namespace :exports do
desc "Create export job"
params do
requires :collections, type: Array[Integer]
requires :format, type: Symbol, values: [:json, :zip, :udm]
requires :nested, type: Boolean
end
post do
collection_ids = params[:collections].uniq
format = params[:format].to_s
nested = params[:nested] == true
if collection_ids.empty?
......@@ -364,71 +362,52 @@ module Chemotion
else
# check if the user is allowed to export these collections
collection_ids.each do |collection_id|
begin
collection = Collection.belongs_to_or_shared_by(current_user.id, current_user.group_ids).find(collection_id)
rescue ActiveRecord::RecordNotFound
error!('401 Unauthorized', 401)
end
collection = Collection.belongs_to_or_shared_by(current_user.id, current_user.group_ids).find_by(id: collection_id)
error!('401 Unauthorized', 401) unless collection
end
end
channel = Channel.find_by(subject: Channel::JOB_START_MSG)
content = channel.msg_template unless channel.nil?
return if content.nil?
content['data'] = format(content['data'], { job_name: 'The collection export job'})
Message.create_msg_notification(channel.id, content, current_user.id, [current_user.id])
# run the asyncronous export job and return its id to the client
ExportCollectionsJob.perform_later(collection_ids, format, nested, current_user.id).job_id
end
desc "Poll export job"
params do
requires :id, type: String
end
get '/:id' do
ActiveJob::Status.get(params[:id])
ExportCollectionsJob.perform_later(collection_ids, params[:format].to_s, nested, current_user.id)
status 204
end
end
namespace :imports do
desc "Create import job"
params do
requires :file, type: File
end
post do
# create an id for the import,
# this is not the job_id, but will be used as file_name
import_id = SecureRandom.uuid
# create the `tmp/imports/` if it does not exist yet
import_path = File.join('tmp', 'import')
FileUtils.mkdir_p(import_path) unless Dir.exist?(import_path)
# store the file as `tmp/imports/<import_id>.zip`
zip_file_path = File.join('tmp', 'import', "#{import_id}.zip")
File.open(zip_file_path, 'wb') do |file|
file.write(params[:file][:tempfile].read)
file = params[:file]
if tempfile = file[:tempfile]
att = Attachment.new(
bucket: file[:container_id],
filename: file[:filename],
key: file[:name],
file_path: file[:tempfile],
created_by: current_user.id,
created_for: current_user.id,
content_type: file[:type]
)
begin
att.save!
ensure
tempfile.close
tempfile.unlink
end
# run the asyncronous import job and return its id to the client
ImportCollectionsJob.perform_later(att, current_user.id)
status 204
end
filename = params[:file][:filename] unless params[:file].nil?
channel = Channel.find_by(subject: Channel::JOB_START_MSG)
content = channel.msg_template unless channel.nil?
return if content.nil?
content['data'] = format(content['data'], { job_name: 'The collection import job'})
Message.create_msg_notification(channel.id, content, current_user.id, [current_user.id])
# run the asyncronous import job and return its id to the client
ImportCollectionsJob.perform_later(import_id, filename, current_user.id).job_id
end
desc "Poll import job"
params do
requires :id, type: String
end
get '/:id' do
ActiveJob::Status.get(params[:id])
end
# desc "Poll import job"
# params do
# requires :id, type: String
# end
# get '/:id' do
# ActiveJob::Status.get(params[:id])
# end
end
end
......
......@@ -20,6 +20,31 @@ class NotificationActions {
uploadErrorNotify(message) {
return message;
}
notifyExImportStatus(type, status) {
const params = {
title: `Collection ${type}`,
message: "The task has been submitted: this might take a while but you will be notified as soon as it is completed.",
level: "info",
dismissible: true,
uid: "export_collection",
position: "tr",
autoDismiss: 5
};
switch(status) {
case 204:
break;
case 401:
params.message = `Unauthorized: you do not have the permission to ${type} this collection`;
params.level = 'error';
break;
default:
params.message = `An issue occured with your ${type} (status ${status}); please contact the administrators of the site if the problem persists.`;
params.level = 'error';
}
this.add(params);
}
}
export default alt.createActions(NotificationActions);
import React from 'react';
import {Button, ButtonToolbar} from 'react-bootstrap';
import UIStore from './../stores/UIStore';
import NotificationActions from '../actions/NotificationActions';
export default class ModalExportCollection extends React.Component {
constructor(props) {
......
......@@ -263,37 +263,8 @@ export default class CollectionsFetcher {
.catch((errorMessage) => { console.log(errorMessage); });
}
static showExportError() {
// TODO move to the right place
NotificationActions.removeByUid('export_collections')
NotificationActions.add({
title: "Error",
message: "An error occured with your export, please contact the administrators of the site if the problem persists.",
level: "error",
dismissible: true,
uid: "export_collections_error",
position: "bl",
autoDismiss: null
});
}
static showImportError() {
// TODO move to the right place
NotificationActions.removeByUid('import_collections')
NotificationActions.add({
title: "Error",
message: "An error occured with your import, please contact the administrators of the site if the problem persists.",
level: "error",
dismissible: true,
uid: "import_collections_error",
position: "bl",
autoDismiss: null
});
}
static createExportJob(params) {
let promise = fetch('/api/v1/collections/exports/', {
return fetch('/api/v1/collections/exports/', {
credentials: 'same-origin',
method: 'POST',
headers: {
......@@ -302,123 +273,24 @@ export default class CollectionsFetcher {
},
body: JSON.stringify(params)
}).then((response) => {
if (response.ok) {
return response.json();
} else {
throw new Error(response.status);
}
}).then((job_id) => {
return job_id;
}).catch((errorMessage) => {
throw new Error(errorMessage);
});
return promise;
}
static pollExportJob(exportId) {
let promise = fetch(`/api/v1/collections/exports/${exportId}`, {
credentials: 'same-origin',
method: 'GET',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
}).then((response) => {
if (response.ok) {
return response.json();
} else {
throw new Error(response.status);
}
}).then((json) => {
if (json.error) {
CollectionsFetcher.showExportError();
} else {
if (json.status == 'completed') {
// remove the notification
NotificationActions.removeByUid('export_collections')
// download the file, headers will prevent the browser from reloading the page
window.location.href = `/zip/${exportId}.zip`;
} else if (json.status == 'queued' || json.status == 'working') {
// continue polling
setTimeout(() => {
CollectionsFetcher.pollExportJob(exportId);
}, 1000);
} else {
CollectionsFetcher.showExportError();
}
}
}).catch((errorMessage) => {
CollectionsFetcher.showExportError();
});
return promise;
NotificationActions.notifyExImportStatus('export', response.status);
if (response.ok) { return response.json(); }
throw new Error(response.status);
}).catch((errorMessage) => { throw new Error(errorMessage); });
}
static createImportJob(params) {
const data = new FormData();
data.append('file', params.file);
var data = new FormData();
data.append("file", params.file);
let promise = fetch('/api/v1/collections/imports/', {
return fetch('/api/v1/collections/imports/', {
credentials: 'same-origin',
method: 'POST',
body: data
}).then((response) => {
if (response.ok) {
return response.json();
} else {
throw new Error(response.status);
}
}).then((job_id) => {
return job_id;
}).catch((errorMessage) => {
throw new Error(errorMessage);
});
return promise;
}
static pollImportJob(importId) {
let promise = fetch(`/api/v1/collections/imports/${importId}`, {
credentials: 'same-origin',
method: 'GET',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
}).then((response) => {
if (response.ok) {
return response.json();
} else {
throw new Error(response.status);
}
}).then((json) => {
if (json.error) {
CollectionsFetcher.showImportError();
} else {
if (json.status == 'completed') {
// remove the notification
NotificationActions.removeByUid('import_collections')
// reload the unshared collections
CollectionActions.fetchUnsharedCollectionRoots()
} else if (json.status == 'queued' || json.status == 'working') {
// continue polling
setTimeout(() => {
CollectionsFetcher.pollImportJob(importId);
}, 1000);
} else {
CollectionsFetcher.showImportError();
}
}
}).catch((errorMessage) => {
CollectionsFetcher.showImportError();
});
return promise;
NotificationActions.notifyExImportStatus('import', response.status);
if (response.ok) { return response.json(); }
throw new Error(response.status);
}).catch((errorMessage) => { throw new Error(errorMessage); });
}
}
......@@ -2,7 +2,7 @@ class CleanExportFilesJob < ActiveJob::Base
queue_as :clean_export_files
def perform(job_id, ext)
file_name = 'public/zip/'+ job_id + '.' + ext
File.delete(file_name) if File.exist?(file_name)
file_path = Rails.public_path.join('zip', job_id + '.' + ext)
File.delete(file_path) if File.exist?(file_path)
end
end
......@@ -3,11 +3,58 @@ class ExportCollectionsJob < ActiveJob::Base
queue_as :export_collections
def perform(collection_ids, format, nested, user_id)
export = Export::ExportCollections.new(self.job_id, collection_ids, format, nested)
export.prepare_data
export.to_file
CleanExportFilesJob.set(queue: "remove_files_#{self.job_id}", wait: 24.hours).perform_later(self.job_id, format)
CollectionMailer.mail_export_completed(self.job_id, collection_ids, format, user_id).deliver_now
after_perform do |job|
if @success
# Sweep file in 24h
CleanExportFilesJob.set(queue: "remove_files_#{job.job_id}", wait: 24.hours)
.perform_later(job.job_id, @extname)
# Email ELNer
CollectionMailer.mail_export_completed(
@user_id,
@labels,
@link,
@expires_at
).deliver_now
# Notify ELNer
channel = Channel.find_by(subject: Channel::COLLECTION_ZIP)
content = channel.msg_template unless channel.nil?
if content.present?
content['data'] = format(
content['data'],
{ col_labels: "[#{@labels.join('], [')}]"[0..40], operation: 'export' }
)
content['url'] = @link
content['url_title'] = 'Download'
Message.create_msg_notification(channel.id, content, @user_id, [@user_id])
end
end
end
def perform(collection_ids, extname, nested, user_id)
@success = true
@collection_ids = collection_ids
@extname = extname
@user_id = user_id
begin
@labels = Collection.where(id: collection_ids[0..9]).pluck(:label)
@link = if Rails.env.production?
"https://#{ENV['HOST'] || ENV['SMTP_DOMAIN']}/zip/#{job_id}.#{extname}"
else
"http://#{ENV['HOST'] || 'localhost:3000'}/zip/#{job_id}.#{extname}"
end
@expires_at = Time.now + 24.hours
export = Export::ExportCollections.new(job_id, collection_ids, extname, nested)
export.prepare_data
export.to_file
rescue StandardError => e
Delayed::Worker.logger.error e
# TODO: Notify Elner
fp = Rails.public_path.join(@extname, "#{job_id}.#{@extname}" )
File.delete(fp) if File.exist?(fp)
@success = false
end
end
end
......@@ -3,20 +3,29 @@ class ImportCollectionsJob < ActiveJob::Base
queue_as :import_collections
def perform(import_id, filename, current_user_id)
import = Import::ImportCollections.new(import_id, current_user_id)
import.extract
import.read
import.import
import.cleanup
channel = Channel.find_by(subject: Channel::COLLECTION_ZIP)
content = channel.msg_template unless channel.nil?
return if content.nil?
content['data'] = format(content['data'], { col_labels: '', operate: 'imported'})
content['data'] = content['data'] + ' File: ' + filename
Message.create_msg_notification(channel.id, content, current_user_id, [current_user_id])
after_perform do |job|
if @success
channel = Channel.find_by(subject: Channel::COLLECTION_ZIP)
content = channel.msg_template unless channel.nil?
if content.present?
content['data'] = format(content['data'], { col_labels: '', operation: 'import'})
content['data'] = content['data'] + ' File: ' + filename
Message.create_msg_notification(channel.id, content, @user_id, [@user_id])
end
end
end
def perform(att, current_user_id)
@user_id = current_user_id
@success = true
begin
import = Import::ImportCollections.new(att, current_user_id)
import.extract
import.import!
rescue => e
Delayed::Worker.logger.error e
# TODO: Message Error
@success = false
end
end
end
# Mailer to email an ELNer about its data export being ready for download
class CollectionMailer < ActionMailer::Base
default from: ENV['DEVISE_SENDER'] || 'eln'
default from: ENV['DEVISE_SENDER'] || 'eln'
def export_mail_content
<<~TXT
Export collection job is completed!
Your Collection(s): [#{@col_labels}] has been exported.
You can download it from below link (only available today!):
#{@url}
TXT
end
def init_export_params(job_id, collection_ids, ext, user_id)
if Rails.env.production?
@proto = "https://"
@host = ENV['SMTP_DOMAIN'] || "complat-eln.ioc.kit.edu"
else
@proto = "http://"
@host = ENV['HOST'] || "localhost:3000"
end
@job_id = job_id
@ext = ext
@user = User.find(user_id)
@col_labels = Collection.where(id: collection_ids).pluck(:label).join(',')
@url = @proto + @host + '/zip/' + job_id + '.' + ext
def mail_export_completed(user_id, labels, link, expires_at)
init_export_params(user_id, labels, link, expires_at)
mail(to: @user.email, subject: "[ELN] Collection export: #{@col_labels}") do |format|
format.html
format.text { render plain: export_mail_content }
end
end
def export_notification
channel = Channel.find_by(subject: Channel::COLLECTION_ZIP)
content = channel.msg_template unless channel.nil?
return if content.nil?
private
content['data'] = format(content['data'], { col_labels: @col_labels, operate: 'exported'})
content['url'] = @url
content['url_title'] = 'Download'
Message.create_msg_notification(channel.id, content, @user.id, [@user.id])
end
def export_mail_content
<<~TXT
Export collection job completed!
Your data has been packed.
Collection#{@s}: #{@col_labels}.
Download link (expires at #{@expires_at}):
def mail_export_completed(job_id, collection_ids, ext, user_id)
init_export_params(job_id, collection_ids, ext, user_id)
export_notification
mail(to: @user.email, subject: "[ELN] Collection has been exported: [" + @col_labels + "]" ) do |format|
format.html
format.text { render plain: export_mail_content }
end
end
#{@link}
TXT
end
def init_export_params(user_id, labels, link, expires_at)
@user = User.find(user_id)
@link = link
@expires_at = expires_at
@s = labels.size > 1 ? 's' : ''
@col_labels = "[#{labels.join('], [')}]"
@col_labels = (@col_labels[0..40] + '...') if @col_labels.size > 40
end
end
......@@ -16,6 +16,6 @@ class Channel < ActiveRecord::Base
GATE_TRANSFER_NOTIFICATION = 'Gate Transfer Completed'
COLLECTION_TAKE_OWNERSHIP = 'Collection Take Ownership'
EDITOR_CALLBACK = 'EditorCallback'
COLLECTION_ZIP = 'Collection ZIP Import and Export'
JOB_START_MSG = 'Create Delayed Job Message'
COLLECTION_ZIP = 'Collection Import and Export'
COLLECTION_ZIP_FAIL = 'Collection Import and Export Failure'
end
class AddCollectionZipNotification < ActiveRecord::Migration
def change
channel = Channel.find_by(subject: Channel::COLLECTION_ZIP)
if (channel.nil?)
attributes = {
subject: Channel::COLLECTION_ZIP,
channel_type: 8,
msg_template: '{"data": "Collection(s): %{col_labels} has been %{operate} successfully.",
"action":"CollectionActions.fetchUnsharedCollectionRoots"
}'
}
Channel.create(attributes)
end
channel = Channel.find_or_create_by(subject: Channel::COLLECTION_ZIP)
attributes = {
subject: Channel::COLLECTION_ZIP,
channel_type: 8,
msg_template: '{"data": "Collection %{operation}: %{col_labels} processed successfully.",
"action":"CollectionActions.fetchUnsharedCollectionRoots",
"level": "success"
}'
}
channel.update(attributes) if channel
channel = Channel.find_or_create_by(subject: Channel::COLLECTION_ZIP_FAIL)
attributes = {
subject: Channel::COLLECTION_ZIP_FAIL,