Skip to content

Commit 12025ca

Browse files
committed
Modify storage class days based on revised document
1 parent bd4fca2 commit 12025ca

2 files changed

Lines changed: 10 additions & 13 deletions

File tree

app/models/conditions_response/backup.rb

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -86,13 +86,8 @@ def self.condition_response(condition, log, use_slack_notification: true)
8686

8787
iterate_and_log_notify_errors(backup_files, 'in backup_files loop, uploading_file_to_s3', log) do |backup_file|
8888
upload_file_to_s3(aws_s3, aws_s3_backup_bucket, aws_backup_bucket_full_prefix, backup_file)
89-
# When we first upload our file to s3, the default storage class is STANDARD
90-
# After 1 month, we want to to transition the object to STANDARD IA,
91-
# then GLACIER after 3 months. This can be changed to meet our needs.
92-
# This will help us save on costs.
93-
# This however has effects on retrieval time for objects which you can see in the link below
94-
# https://aws.amazon.com/s3/storage-classes/#Performance_across_the_S3_Storage_Classes
95-
set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}])
89+
# When we first upload our file to s3, the default storage class is STANDARD_IA
90+
set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 90, storage_class: 'GLACIER'}, {days: 450, storage_class: 'DEEP_ARCHIVE'}])
9691
end
9792

9893
log.record('info', 'Pruning older backups on local storage')
@@ -149,7 +144,7 @@ def self.s3_backup_bucket_full_prefix(today = Date.current)
149144
# @see https://aws.amazon.com/blogs/developer/uploading-files-to-amazon-s3/
150145
def self.upload_file_to_s3(s3, bucket, bucket_folder, file)
151146
obj = s3.bucket(bucket).object(bucket_folder + File.basename(file))
152-
obj.upload_file(file, { tagging: aws_date_tags })
147+
obj.upload_file(file, { tagging: aws_date_tags, storage_class: 'STANDARD_IA' })
153148
end
154149

155150

@@ -295,7 +290,7 @@ def self.iterate_and_log_notify_errors(list, additional_error_info, log, use_sla
295290
end
296291

297292

298-
STORAGE_CLASSES = %w(STANDARD_IA GLACIER DEEP_ARCHIVE).freeze
293+
STORAGE_CLASSES = %w(GLACIER DEEP_ARCHIVE).freeze
299294
class << self
300295
define_method(:storage_class_is_valid?) do |storage_class_list|
301296
unless storage_class_list.empty?
@@ -306,7 +301,7 @@ class << self
306301
end
307302
end
308303

309-
# s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}])
304+
# s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 90, storage_class: 'GLACIER'}, {days: 450, storage_class: 'DEEP_ARCHIVE'}])
310305
def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:)
311306
client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'],
312307
credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY']))
@@ -319,8 +314,9 @@ def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, stor
319314
rules: [
320315
{
321316
expiration: {
317+
# Expire objects after 10 years
322318
date: Time.now,
323-
days: 365,
319+
days: 3650,
324320
expired_object_delete_marker: false
325321
},
326322
filter: {

spec/models/conditions_response/backup_spec.rb

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -699,10 +699,10 @@ def create_faux_backup_file(backups_dir, file_prefix)
699699
FileUtils.remove_entry(temp_backups_dir, true)
700700
end
701701

702-
it 'adds date tags to the object' do
702+
it 'adds date tags and STANDARD_IA storage class to the object' do
703703
expect(mock_bucket_object).to receive(:upload_file)
704704
.with(faux_backup_fn,
705-
{tagging: 'this is the tagging string'})
705+
{storage_class: "STANDARD_IA", tagging: "this is the tagging string"})
706706

707707
expect(described_class).to receive(:aws_date_tags).and_return('this is the tagging string')
708708
Backup.upload_file_to_s3(mock_s3, bucket_name, bucket_full_prefix, faux_backup_fn)
@@ -1372,6 +1372,7 @@ def create_faux_backup_file(backups_dir, file_prefix)
13721372
expect(get_mock_data[0][:id]).to eq 'TestOnly'
13731373
expect(get_mock_data[0][:status]).to eq 'Enabled'
13741374
expect(get_mock_data[0][:filter][:prefix]).to eq 'bucket/top/prefix'
1375+
expect(get_mock_data[0][:expiration][:days]).to eq 365
13751376
expect(get_mock_data[0][:transitions].count).to eq 2
13761377
expect(get_mock_data[0][:transitions]).to eq [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]
13771378
end

0 commit comments

Comments
 (0)