Skip to content
Snippets Groups Projects
Commit 286f2e49 authored by Gyan Gupta's avatar Gyan Gupta
Browse files

refactor s3 storage service

parent eeb10fa8
No related tags found
1 merge request!253Created asynchronous download
Pipeline #12304 failed
module S3StorageHelper
def file_exist?(bucket)
File.exists?(zip_file_path(bucket))
end
def file_expired?(bucket)
File.ctime(zip_file_path(bucket)) < DateTime.now - 1.day
end
def zip_file_path(bucket)
File.join(
ENV.fetch('DOWNLOAD_PATH', "tmp/downloads"),
"#{bucket}.zip"
)
end
def build_zip(bucket)
base_dir = base_dir(bucket)
zf = ZipFileGenerator.new(base_dir, zip_file_path(bucket))
zf.write
cleanup_zip(base_dir)
end
def cleanup_zip(bucket)
zip_file = zip_file_path(bucket)
FileUtils.rm_rf(zip_file)
end
def base_dir(bucket)
File.join(ENV.fetch('DOWNLOAD_PATH', 'tmp/downloads'), bucket)
end
end
......@@ -2,6 +2,7 @@
require 'fileutils'
require 'csv'
class S3StorageService
include S3StorageHelper
# Reference: https://docs.aws.amazon.com/sdk-for-ruby/v3/developer-guide/
# https://docs.ceph.com/en/latest/radosgw/s3/ruby/
# https://docs.aws.amazon.com/sdk-for-ruby/v3/developer-guide/s3-example-create-buckets.html
......@@ -172,22 +173,21 @@ class S3StorageService
s3_bucket = sanitise_name(bucket)
return false unless bucket_exists?(s3_bucket)
list_of_objects, total_size = list_all_objects(s3_bucket)
base_dir = File.join(ENV.fetch('DOWNLOAD_PATH', 'tmp/downloads'), bucket)
return base_dir if File.exists?(zip_file_path(bucket)) && File.ctime(zip_file_path(bucket)) > DateTime.now - 1.day
return base_dir(bucket) if file_exist?(bucket) && !file_expired?(bucket)
return prepare_zip_file(bucket, s3_bucket, list_of_objects)
end
def prepare_csv_with_download_bucket_links(bucket)
return true if File.exists?(zip_file_path(bucket)) && File.ctime(zip_file_path(bucket)) > DateTime.now - 1.day
return true if file_exist?(bucket) && !file_expired?(bucket)
cleanup_zip(zip_file_path(bucket)) if File.exists?(zip_file_path(bucket)) && File.ctime(zip_file_path(bucket)) < DateTime.now - 1.day
cleanup_zip(bucket) if file_exist?(bucket) && file_expired?(bucket)
s3_bucket = sanitise_name(bucket)
return false unless bucket_exists?(s3_bucket)
list_of_objects, total_size = list_all_objects(s3_bucket)
base_dir = File.join(ENV.fetch('DOWNLOAD_PATH', 'tmp/downloads'), bucket)
base_dir = base_dir(bucket)
list_of_objects.each do |object|
path = File.join(base_dir, object[:key])
......@@ -204,11 +204,11 @@ class S3StorageService
end
end
build_zip(base_dir, bucket)
build_zip(bucket)
end
def prepare_zip_file(bucket, s3_bucket, list_of_objects)
base_dir = File.join(ENV.fetch('DOWNLOAD_PATH', 'tmp/downloads'), bucket)
base_dir = base_dir(bucket)
list_of_objects.each do |object|
path = File.join(base_dir, object[:key])
......@@ -274,22 +274,4 @@ class S3StorageService
CSV.open(file_path, 'w') { |csv| csv << csv_data }
end
end
def build_zip(base_dir, bucket)
zf = ZipFileGenerator.new(base_dir, zip_file_path(bucket))
zf.write
cleanup_zip(base_dir)
end
def zip_file_path(bucket)
File.join(
ENV.fetch('DOWNLOAD_PATH', "tmp/downloads"),
"#{bucket}.zip"
)
end
def cleanup_zip(zip_file)
FileUtils.rm_rf(zip_file)
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment