diff --git a/google-cloud-storage/lib/google/cloud/storage/bucket.rb b/google-cloud-storage/lib/google/cloud/storage/bucket.rb index ca8461354bb7..2be3ade74b38 100644 --- a/google-cloud-storage/lib/google/cloud/storage/bucket.rb +++ b/google-cloud-storage/lib/google/cloud/storage/bucket.rb @@ -1410,6 +1410,15 @@ def delete if_metageneration_match: nil, if_metageneration_not_match: nil user_project: user_project end + def ip_filter + @gapi.ip_filter + end + + def ip_filter= new_ip_filter + @gapi.ip_filter = new_ip_filter || {} + patch_gapi! :ip_filter + end + ## # Retrieves a list of files matching the criteria. # diff --git a/google-cloud-storage/lib/google/cloud/storage/project.rb b/google-cloud-storage/lib/google/cloud/storage/project.rb index 47e4ffc32da4..3d971a53af50 100644 --- a/google-cloud-storage/lib/google/cloud/storage/project.rb +++ b/google-cloud-storage/lib/google/cloud/storage/project.rb @@ -477,12 +477,15 @@ def create_bucket bucket_name, user_project: nil, autoclass_enabled: false, enable_object_retention: nil, - hierarchical_namespace: nil + hierarchical_namespace: nil, + ip_filter: nil + params = { name: bucket_name, location: location, custom_placement_config: custom_placement_config, - hierarchical_namespace: hierarchical_namespace + hierarchical_namespace: hierarchical_namespace, + ip_filter: ip_filter }.delete_if { |_, v| v.nil? } new_bucket = Google::Apis::StorageV1::Bucket.new(**params) storage_class = storage_class_for storage_class @@ -496,6 +499,7 @@ def create_bucket bucket_name, b.versioning = versioning unless versioning.nil? b.requester_pays = requester_pays unless requester_pays.nil? b.hierarchical_namespace = hierarchical_namespace unless hierarchical_namespace.nil? + b.ip_filter = ip_filter unless ip_filter.nil? end yield updater if block_given? updater.check_for_changed_labels! diff --git a/google-cloud-storage/samples/acceptance/buckets_test.rb b/google-cloud-storage/samples/acceptance/buckets_test.rb index bb78ba9bbe44..e29644d5d196 100644 --- a/google-cloud-storage/samples/acceptance/buckets_test.rb +++ b/google-cloud-storage/samples/acceptance/buckets_test.rb @@ -21,6 +21,7 @@ require_relative "../storage_create_bucket_class_location" require_relative "../storage_create_bucket_dual_region" require_relative "../storage_create_bucket_hierarchical_namespace" +require_relative "../storage_create_bucket_with_ip_filter" require_relative "../storage_create_bucket_with_object_retention" require_relative "../storage_define_bucket_website_configuration" require_relative "../storage_delete_bucket" @@ -55,6 +56,7 @@ require_relative "../storage_get_autoclass" require_relative "../storage_set_autoclass" require_relative "../storage_move_object" +require 'pry' describe "Buckets Snippets" do let(:storage_client) { Google::Cloud::Storage.new } @@ -62,577 +64,616 @@ let(:retention_period) { rand 1..99 } let(:bucket) { fixture_bucket } - describe "bucket lifecycle" do - it "create_bucket, create_bucket_class_location, list_buckets, get_bucket_metadata, delete_bucket" do - # create_bucket - bucket_name = random_bucket_name - refute storage_client.bucket bucket_name + # describe "bucket lifecycle" do + # it "create_bucket, create_bucket_class_location, list_buckets, get_bucket_metadata, delete_bucket" do + # # create_bucket + # bucket_name = random_bucket_name + # refute storage_client.bucket bucket_name + + # retry_resource_exhaustion do + # assert_output "Created bucket: #{bucket_name}\n" do + # create_bucket bucket_name: bucket_name + # end + # end + + # refute_nil storage_client.bucket bucket_name + + # # create_bucket_class_location + + # secondary_bucket_name = random_bucket_name + # location = "ASIA" + # storage_class = "COLDLINE" + # refute storage_client.bucket secondary_bucket_name + + # retry_resource_exhaustion do + # assert_output "Created bucket #{secondary_bucket_name} in #{location} with #{storage_class} class\n" do + # create_bucket_class_location bucket_name: secondary_bucket_name + # end + # end + + # secondary_bucket = storage_client.bucket secondary_bucket_name + # refute_nil secondary_bucket + # assert_equal location, secondary_bucket.location + # assert_equal storage_class, secondary_bucket.storage_class + + # # list_buckets + # out, _err = capture_io do + # list_buckets + # end + + # assert_includes out, "ruby-storage-samples-" + + # # get_bucket_metadata + # out, _err = capture_io do + # get_bucket_metadata bucket_name: bucket_name + # end + + # assert_includes out, bucket_name + + # # delete_bucket + # assert_output "Deleted bucket: #{bucket_name}\n" do + # delete_bucket bucket_name: bucket_name + # end + + + # refute storage_client.bucket bucket_name + + # delete_bucket_helper bucket_name + # delete_bucket_helper secondary_bucket_name + # end + # end + + # describe "storage_create_bucket_dual_region" do + # it "creates dual region bucket" do + # location = "US" + # region_1 = "US-EAST1" + # region_2 = "US-WEST1" + # location_type = "dual-region" + # bucket_name = random_bucket_name + # refute storage_client.bucket bucket_name + + # expected = "Bucket #{bucket_name} created:\n" + # expected += "- location: #{location}\n" + # expected += "- location_type: #{location_type}\n" + # expected += "- custom_placement_config:\n" + # expected += " - data_locations: #{[region_1, region_2]}\n" + + # retry_resource_exhaustion do + # assert_output expected do + # StorageCreateBucketDualRegion.new.storage_create_bucket_dual_region bucket_name: bucket_name, + # region_1: region_1, + # region_2: region_2 + # end + # end + + # refute_nil storage_client.bucket bucket_name + + # delete_bucket_helper bucket_name + # end + # end + + # describe "storage_create_bucket_hierarchical_namespace" do + # it "creates hierarchical namespace enabled bucket" do + # bucket_name = random_bucket_name + # refute storage_client.bucket bucket_name + + # expected = "Created bucket #{bucket_name} with Hierarchical Namespace enabled.\n" + + # retry_resource_exhaustion do + # assert_output expected do + # create_bucket_hierarchical_namespace bucket_name: bucket_name + # end + # end + + # refute_nil storage_client.bucket bucket_name + + # delete_bucket_helper bucket_name + # end + # end + + describe "storage_create_bucket_ip_filter" do - retry_resource_exhaustion do - assert_output "Created bucket: #{bucket_name}\n" do - create_bucket bucket_name: bucket_name - end - end - - refute_nil storage_client.bucket bucket_name - - # create_bucket_class_location - - secondary_bucket_name = random_bucket_name - location = "ASIA" - storage_class = "COLDLINE" - refute storage_client.bucket secondary_bucket_name - - retry_resource_exhaustion do - assert_output "Created bucket #{secondary_bucket_name} in #{location} with #{storage_class} class\n" do - create_bucket_class_location bucket_name: secondary_bucket_name - end - end - - secondary_bucket = storage_client.bucket secondary_bucket_name - refute_nil secondary_bucket - assert_equal location, secondary_bucket.location - assert_equal storage_class, secondary_bucket.storage_class - - # list_buckets - out, _err = capture_io do - list_buckets - end - - assert_includes out, "ruby-storage-samples-" - - # get_bucket_metadata - out, _err = capture_io do - get_bucket_metadata bucket_name: bucket_name - end - - assert_includes out, bucket_name - - # delete_bucket - assert_output "Deleted bucket: #{bucket_name}\n" do - delete_bucket bucket_name: bucket_name - end - - - refute storage_client.bucket bucket_name - - delete_bucket_helper bucket_name - delete_bucket_helper secondary_bucket_name - end - end - - describe "storage_create_bucket_dual_region" do - it "creates dual region bucket" do - location = "US" - region_1 = "US-EAST1" - region_2 = "US-WEST1" - location_type = "dual-region" - bucket_name = random_bucket_name - refute storage_client.bucket bucket_name - - expected = "Bucket #{bucket_name} created:\n" - expected += "- location: #{location}\n" - expected += "- location_type: #{location_type}\n" - expected += "- custom_placement_config:\n" - expected += " - data_locations: #{[region_1, region_2]}\n" - - retry_resource_exhaustion do - assert_output expected do - StorageCreateBucketDualRegion.new.storage_create_bucket_dual_region bucket_name: bucket_name, - region_1: region_1, - region_2: region_2 - end - end - - refute_nil storage_client.bucket bucket_name - - delete_bucket_helper bucket_name + before :all do + @bucket_name = random_bucket_name end - end - - describe "storage_create_bucket_hierarchical_namespace" do - it "creates hierarchical namespace enabled bucket" do - bucket_name = random_bucket_name - refute storage_client.bucket bucket_name - - expected = "Created bucket #{bucket_name} with Hierarchical Namespace enabled.\n" - - retry_resource_exhaustion do - assert_output expected do - create_bucket_hierarchical_namespace bucket_name: bucket_name - end - end - - refute_nil storage_client.bucket bucket_name - delete_bucket_helper bucket_name + after :all do + binding.pry + # delete_bucket_helper @bucket_name end - end - - describe "storage_create_bucket_with_object_retention" do - it "creates a bucket with object retention enabled." do - bucket_name = random_bucket_name - refute storage_client.bucket bucket_name - - expected = "Created bucket #{bucket_name} with object retention setting: Enabled\n" + # it "creates IP filter enabled bucket" do + # expected = "Created bucket #{@bucket_name} with IP filter enabled.\n" + # retry_resource_exhaustion do + # assert_output expected do + # create_bucket_with_ip_filter bucket_name: @bucket_name + # end + # end + # end + + # it "removes IP filter of an existing bucket" do + # expected = "Updated IP filter for bucket #{@bucket_name}.\n" + # retry_resource_exhaustion do + # assert_output expected do + # removes_bucket_ip_filter bucket_name: @bucket_name + # end + # end + # end + + it "Updates IP filter of an existing bucket" do + expected = "Updated bucket #{@bucket_name+"-1"} with IP filter enabled.\n" retry_resource_exhaustion do assert_output expected do - create_bucket_with_object_retention bucket_name: bucket_name + update_bucket_with_ip_filter bucket_name: @bucket_name+"-1" end end - - refute_nil storage_client.bucket bucket_name - - file_name = "test_object_retention" - - bucket = storage_client.bucket bucket_name - - out, _err = capture_io do - set_object_retention_policy bucket_name: bucket.name, - content: "hello world", - destination_file_name: file_name - end - - assert_includes out, "Retention policy for file #{file_name}" - - file = bucket.file file_name - file.retention = { - mode: nil, - retain_until_time: nil, - override_unlocked_retention: true - } - delete_bucket_helper bucket_name end end - describe "autoclass" do - it "get_autoclass, set_autoclass" do - bucket_name = random_bucket_name - refute storage_client.bucket bucket_name - - storage_client.create_bucket bucket_name, autoclass_enabled: true - - assert_output(/autoclass config set to true./) do - get_autoclass bucket_name: bucket_name - end - - assert_output(/autoclass terminal storage class set to NEARLINE./) do - get_autoclass bucket_name: bucket_name - end - - assert_output(/autoclass terminal storage class set to ARCHIVE./) do - set_autoclass bucket_name: bucket_name, toggle: true, terminal_storage_class: "ARCHIVE" - end - - assert_output(/autoclass config set to false./) do - set_autoclass bucket_name: bucket_name, toggle: false - end - - delete_bucket_helper bucket_name - end - end - - describe "cors" do - it "cors_configuration, remove_cors_configuration" do - bucket.cors { |c| c.clear } - assert bucket.cors.empty? - - # cors_configuration - assert_output "Set CORS policies for bucket #{bucket.name}\n" do - cors_configuration bucket_name: bucket.name - end - - bucket.refresh! - assert_equal 1, bucket.cors.count - rule = bucket.cors.first - assert_equal ["*"], rule.origin - assert_equal ["PUT", "POST"], rule.methods - assert_equal ["Content-Type", "x-goog-resumable"], rule.headers - assert_equal 3600, rule.max_age - - # remove_cors_configuration - assert_output "Remove CORS policies for bucket #{bucket.name}\n" do - remove_cors_configuration bucket_name: bucket.name - end - bucket.refresh! - assert bucket.cors.empty? - end - end - - describe "requester_pays" do - it "enable_requester_pays, disable_requester_pays, get_requester_pays_status" do - # enable_requester_pays - bucket.requester_pays = false - - assert_output "Requester pays has been enabled for #{bucket.name}\n" do - enable_requester_pays bucket_name: bucket.name - end - bucket.refresh! - assert bucket.requester_pays? - - # get_requester_pays_status - assert_output "Requester pays status is enabled for #{bucket.name}\n" do - get_requester_pays_status bucket_name: bucket.name - end - assert bucket.requester_pays? - - # disable_requester_pays - assert_output "Requester pays has been disabled for #{bucket.name}\n" do - disable_requester_pays bucket_name: bucket.name - end - bucket.refresh! - refute bucket.requester_pays? - - # get_requester_pays_status - assert_output "Requester pays status is disabled for #{bucket.name}\n" do - get_requester_pays_status bucket_name: bucket.name - end - refute bucket.requester_pays? - end - end - - describe "uniform_bucket_level_access" do - it "enable_uniform_bucket_level_access, get_uniform_bucket_level_access, disable_uniform_bucket_level_access" do - # enable_uniform_bucket_level_access - bucket.uniform_bucket_level_access = false - - assert_output "Uniform bucket-level access was enabled for #{bucket.name}.\n" do - enable_uniform_bucket_level_access bucket_name: bucket.name - end - - bucket.refresh! - assert bucket.uniform_bucket_level_access? - - # get_uniform_bucket_level_access - assert_output "Uniform bucket-level access is enabled for #{bucket.name}.\nBucket " \ - "will be locked on #{bucket.uniform_bucket_level_access_locked_at}.\n" do - get_uniform_bucket_level_access bucket_name: bucket.name - end - assert bucket.uniform_bucket_level_access? - - # disable_uniform_bucket_level_access - assert_output "Uniform bucket-level access was disabled for #{bucket.name}.\n" do - disable_uniform_bucket_level_access bucket_name: bucket.name - end - - bucket.refresh! - refute bucket.uniform_bucket_level_access? - - # get_uniform_bucket_level_access - assert_output "Uniform bucket-level access is disabled for #{bucket.name}.\n" do - get_uniform_bucket_level_access bucket_name: bucket.name - end - refute bucket.uniform_bucket_level_access? - - bucket.uniform_bucket_level_access = false - end - end - - describe "default Cloud KMS encryption key" do - it "set_bucket_default_kms_key, bucket_delete_default_kms_key" do - refute bucket.default_kms_key - - # set_bucket_default_kms_key - assert_output "Default KMS key for #{bucket.name} was set to #{kms_key}\n" do - set_bucket_default_kms_key bucket_name: bucket.name, - default_kms_key: kms_key - end - - bucket.refresh! - assert_equal bucket.default_kms_key, kms_key - - # bucket_delete_default_kms_key - assert_output "Default KMS key was removed from #{bucket.name}\n" do - bucket_delete_default_kms_key bucket_name: bucket.name - end - - bucket.refresh! - refute bucket.default_kms_key - end - end - - describe "get bucket class and location data" do - bucket_name = random_bucket_name - location = "US" - storage_class = "COLDLINE" - - it "get_bucket_class_and_location" do - storage_client.create_bucket bucket_name, - location: location, - storage_class: storage_class - expected_output = "Bucket #{bucket_name} storage class is " \ - "#{storage_class}, and the location is #{location}\n" - assert_output expected_output do - get_bucket_class_and_location bucket_name: bucket_name - end - end - end - - describe "labels" do - it "add_bucket_label, remove_bucket_label" do - # add_bucket_label - label_key = "label_key" - label_value = "label_value" - - assert_output "Added label #{label_key} with value #{label_value} to #{bucket.name}\n" do - add_bucket_label bucket_name: bucket.name, - label_value: label_value, - label_key: label_key - end - - bucket.refresh! - assert_equal bucket.labels[label_key], label_value - - # remove_bucket_label - assert_output "Deleted label #{label_key} from #{bucket.name}\n" do - remove_bucket_label bucket_name: bucket.name, - label_key: label_key - end - - bucket.refresh! - assert bucket.labels[label_key].empty? - end - end - - describe "lifecycle management" do - let(:bucket) { create_bucket_helper random_bucket_name } - after { delete_bucket_helper bucket.name } - - it "enable_bucket_lifecycle_management, disable_bucket_lifecycle_management" do - # enable_bucket_lifecycle_management - out, _err = capture_io do - enable_bucket_lifecycle_management bucket_name: bucket.name - end - - assert_includes out, "Lifecycle management is enabled" - - # disable_bucket_lifecycle_management - out, _err = capture_io do - disable_bucket_lifecycle_management bucket_name: bucket.name - end - - assert_includes out, "Lifecycle management is disabled" - end - end - - describe "retention policy" do - let(:bucket) { create_bucket_helper random_bucket_name } - after { delete_bucket_helper bucket.name } - - it "set_retention_policy, get_retention_policy, remove_retention_policy" do - # set_retention_policy - assert_output "Retention period for #{bucket.name} is now #{retention_period} seconds.\n" do - set_retention_policy bucket_name: bucket.name, - retention_period: retention_period - end - - bucket.refresh! - assert_equal bucket.retention_period, retention_period - - # get_retention_policy - out, _err = capture_io do - get_retention_policy bucket_name: bucket.name - end - - assert_includes out, "period: #{retention_period}\n" - - # remove_retention_policy - assert_equal bucket.retention_period, retention_period - assert_output "Retention policy for #{bucket.name} has been removed.\n" do - remove_retention_policy bucket_name: bucket.name - end - - bucket.refresh! - refute bucket.retention_period - - # lock_retention_policy - bucket.retention_period = retention_period - out, _err = capture_io do - lock_retention_policy bucket_name: bucket.name - end - - assert_includes out, "Retention policy for #{bucket.name} is now locked." - bucket.refresh! - assert bucket.retention_policy_locked? - - # remove_retention_policy - assert_output "Policy is locked and retention policy can't be removed.\n" do - remove_retention_policy bucket_name: bucket.name - end - end - end - - describe "default_event_based_hold" do - it "enable_default_event_based_hold, get_default_event_based_hold, disable_default_event_based_hold" do - # enable_default_event_based_hold - assert_output "Default event-based hold was enabled for #{bucket.name}.\n" do - enable_default_event_based_hold bucket_name: bucket.name - end - - bucket.refresh! - assert bucket.default_event_based_hold? - - # get_default_event_based_hold - assert_output "Default event-based hold is enabled for #{bucket.name}.\n" do - get_default_event_based_hold bucket_name: bucket.name - end - - # disable_default_event_based_hold - bucket.update do |b| - b.default_event_based_hold = true - end - - assert_output "Default event-based hold was disabled for #{bucket.name}.\n" do - disable_default_event_based_hold bucket_name: bucket.name - end - - bucket.refresh! - refute bucket.default_event_based_hold? - - # get_default_event_based_hold - assert_output "Default event-based hold is not enabled for #{bucket.name}.\n" do - get_default_event_based_hold bucket_name: bucket.name - end - end - end - - describe "storage_class" do - it "change_default_storage_class" do - assert_equal "STANDARD", bucket.storage_class - - assert_output "Default storage class for bucket #{bucket.name} has been set to COLDLINE\n" do - change_default_storage_class bucket_name: bucket.name - end - - bucket.refresh! - assert_equal "COLDLINE", bucket.storage_class - # teardown - bucket.storage_class = "STANDARD" - end - end - - describe "versioning" do - it "enable_versioning, disable_versioning" do - # enable_versioning - bucket.versioning = false - - assert_output "Versioning was enabled for bucket #{bucket.name}\n" do - enable_versioning bucket_name: bucket.name - end - bucket.refresh! - assert bucket.versioning? - - # disable_versioning - assert_output "Versioning was disabled for bucket #{bucket.name}\n" do - disable_versioning bucket_name: bucket.name - end - bucket.refresh! - refute bucket.versioning? - end - end - - describe "website_configuration" do - let(:main_page_suffix) { "index.html" } - let(:not_found_page) { "404.html" } - - it "define_bucket_website_configuration" do - expected_out = "Static website bucket #{bucket.name} is set up to use #{main_page_suffix} as the index page " \ - "and #{not_found_page} as the 404 page\n" - - assert_output expected_out do - define_bucket_website_configuration bucket_name: bucket.name, - main_page_suffix: main_page_suffix, - not_found_page: not_found_page - end - - bucket.refresh! - assert_equal main_page_suffix, bucket.website_main - assert_equal not_found_page, bucket.website_404 - end - end - - describe "public_access_prevention" do - it "set_public_access_prevention_enforced, get_public_access_prevention, " \ - "set_public_access_prevention_inherited" do - bucket.public_access_prevention = :inherited - bucket.refresh! - _(bucket.public_access_prevention).must_equal "inherited" - - # set_public_access_prevention_enforced - assert_output "Public access prevention is set to enforced for #{bucket.name}.\n" do - set_public_access_prevention_enforced bucket_name: bucket.name - end - - bucket.refresh! - _(bucket.public_access_prevention).must_equal "enforced" - - # get_public_access_prevention - assert_output "Public access prevention is 'enforced' for #{bucket.name}.\n" do - get_public_access_prevention bucket_name: bucket.name - end - _(bucket.public_access_prevention).must_equal "enforced" - - # set_public_access_prevention_inherited - assert_output "Public access prevention is 'inherited' for #{bucket.name}.\n" do - set_public_access_prevention_inherited bucket_name: bucket.name - end - - bucket.refresh! - _(bucket.public_access_prevention).must_equal "inherited" - bucket.public_access_prevention = :inherited - end - end - - describe "storage move file" do - let(:source_file) { "file_1_name_#{SecureRandom.hex}.txt" } - let(:destination_file) { "file_2_name_#{SecureRandom.hex}.txt" } - let :hns_bucket do - hierarchical_namespace = Google::Apis::StorageV1::Bucket::HierarchicalNamespace.new enabled: true - storage_client.create_bucket random_bucket_name do |b| - b.uniform_bucket_level_access = true - b.hierarchical_namespace = hierarchical_namespace - end - end - let :create_source_file do - file_content = "A" * (3 * 1024 * 1024) # 3 MB of 'A' characters - file = StringIO.new file_content - hns_bucket.create_file file, source_file - end - it "file is moved and old file is deleted" do - create_source_file - out, _err = capture_io do - move_object bucket_name: hns_bucket.name, source_file_name: source_file, destination_file_name: destination_file - end - assert_includes out, "New File #{destination_file} created\n" - refute_nil(hns_bucket.file(destination_file)) - assert_nil(hns_bucket.file(source_file)) - end - - it "raises error if source and destination are having same filename" do - create_source_file - exception = assert_raises Google::Cloud::InvalidArgumentError do - move_object bucket_name: hns_bucket.name, source_file_name: source_file, destination_file_name: source_file - end - assert_equal "invalid: Source and destination object names must be different.", exception.message - end - end - - describe "list buckets with partial success" do - it 'returns a list of bucket names if return_partial_success_flag is true' do - result = list_buckets_with_partial_success return_partial_success_flag: true - assert_kind_of Array, result - assert result.all? { |n| n.is_a? String }, "expected all items to be String" - end - - it 'returns nil for unreachable if return_partial_success_flag is false' do - result = list_buckets_with_partial_success return_partial_success_flag: false - assert_nil result - end - - it 'returns nil for unreachable if return_partial_success_flag is not passed' do - result = list_buckets_with_partial_success return_partial_success_flag: nil - assert_nil result - end - end + # describe "storage_create_bucket_with_object_retention" do + # it "creates a bucket with object retention enabled." do + # bucket_name = random_bucket_name + # refute storage_client.bucket bucket_name + + # expected = "Created bucket #{bucket_name} with object retention setting: Enabled\n" + + # retry_resource_exhaustion do + # assert_output expected do + # create_bucket_with_object_retention bucket_name: bucket_name + # end + # end + + # refute_nil storage_client.bucket bucket_name + + # file_name = "test_object_retention" + + # bucket = storage_client.bucket bucket_name + + # out, _err = capture_io do + # set_object_retention_policy bucket_name: bucket.name, + # content: "hello world", + # destination_file_name: file_name + # end + + # assert_includes out, "Retention policy for file #{file_name}" + + # file = bucket.file file_name + # file.retention = { + # mode: nil, + # retain_until_time: nil, + # override_unlocked_retention: true + # } + # delete_bucket_helper bucket_name + # end + # end + + # describe "autoclass" do + # it "get_autoclass, set_autoclass" do + # bucket_name = random_bucket_name + # refute storage_client.bucket bucket_name + + # storage_client.create_bucket bucket_name, autoclass_enabled: true + + # assert_output(/autoclass config set to true./) do + # get_autoclass bucket_name: bucket_name + # end + + # assert_output(/autoclass terminal storage class set to NEARLINE./) do + # get_autoclass bucket_name: bucket_name + # end + + # assert_output(/autoclass terminal storage class set to ARCHIVE./) do + # set_autoclass bucket_name: bucket_name, toggle: true, terminal_storage_class: "ARCHIVE" + # end + + # assert_output(/autoclass config set to false./) do + # set_autoclass bucket_name: bucket_name, toggle: false + # end + + # delete_bucket_helper bucket_name + # end + # end + + # describe "cors" do + # it "cors_configuration, remove_cors_configuration" do + # bucket.cors { |c| c.clear } + # assert bucket.cors.empty? + + # # cors_configuration + # assert_output "Set CORS policies for bucket #{bucket.name}\n" do + # cors_configuration bucket_name: bucket.name + # end + + # bucket.refresh! + # assert_equal 1, bucket.cors.count + # rule = bucket.cors.first + # assert_equal ["*"], rule.origin + # assert_equal ["PUT", "POST"], rule.methods + # assert_equal ["Content-Type", "x-goog-resumable"], rule.headers + # assert_equal 3600, rule.max_age + + # # remove_cors_configuration + # assert_output "Remove CORS policies for bucket #{bucket.name}\n" do + # remove_cors_configuration bucket_name: bucket.name + # end + # bucket.refresh! + # assert bucket.cors.empty? + # end + # end + + # describe "requester_pays" do + # it "enable_requester_pays, disable_requester_pays, get_requester_pays_status" do + # # enable_requester_pays + # bucket.requester_pays = false + + # assert_output "Requester pays has been enabled for #{bucket.name}\n" do + # enable_requester_pays bucket_name: bucket.name + # end + # bucket.refresh! + # assert bucket.requester_pays? + + # # get_requester_pays_status + # assert_output "Requester pays status is enabled for #{bucket.name}\n" do + # get_requester_pays_status bucket_name: bucket.name + # end + # assert bucket.requester_pays? + + # # disable_requester_pays + # assert_output "Requester pays has been disabled for #{bucket.name}\n" do + # disable_requester_pays bucket_name: bucket.name + # end + # bucket.refresh! + # refute bucket.requester_pays? + + # # get_requester_pays_status + # assert_output "Requester pays status is disabled for #{bucket.name}\n" do + # get_requester_pays_status bucket_name: bucket.name + # end + # refute bucket.requester_pays? + # end + # end + + # describe "uniform_bucket_level_access" do + # it "enable_uniform_bucket_level_access, get_uniform_bucket_level_access, disable_uniform_bucket_level_access" do + # # enable_uniform_bucket_level_access + # bucket.uniform_bucket_level_access = false + + # assert_output "Uniform bucket-level access was enabled for #{bucket.name}.\n" do + # enable_uniform_bucket_level_access bucket_name: bucket.name + # end + + # bucket.refresh! + # assert bucket.uniform_bucket_level_access? + + # # get_uniform_bucket_level_access + # assert_output "Uniform bucket-level access is enabled for #{bucket.name}.\nBucket " \ + # "will be locked on #{bucket.uniform_bucket_level_access_locked_at}.\n" do + # get_uniform_bucket_level_access bucket_name: bucket.name + # end + # assert bucket.uniform_bucket_level_access? + + # # disable_uniform_bucket_level_access + # assert_output "Uniform bucket-level access was disabled for #{bucket.name}.\n" do + # disable_uniform_bucket_level_access bucket_name: bucket.name + # end + + # bucket.refresh! + # refute bucket.uniform_bucket_level_access? + + # # get_uniform_bucket_level_access + # assert_output "Uniform bucket-level access is disabled for #{bucket.name}.\n" do + # get_uniform_bucket_level_access bucket_name: bucket.name + # end + # refute bucket.uniform_bucket_level_access? + + # bucket.uniform_bucket_level_access = false + # end + # end + + # describe "default Cloud KMS encryption key" do + # it "set_bucket_default_kms_key, bucket_delete_default_kms_key" do + # refute bucket.default_kms_key + + # # set_bucket_default_kms_key + # assert_output "Default KMS key for #{bucket.name} was set to #{kms_key}\n" do + # set_bucket_default_kms_key bucket_name: bucket.name, + # default_kms_key: kms_key + # end + + # bucket.refresh! + # assert_equal bucket.default_kms_key, kms_key + + # # bucket_delete_default_kms_key + # assert_output "Default KMS key was removed from #{bucket.name}\n" do + # bucket_delete_default_kms_key bucket_name: bucket.name + # end + + # bucket.refresh! + # refute bucket.default_kms_key + # end + # end + + # describe "get bucket class and location data" do + # bucket_name = random_bucket_name + # location = "US" + # storage_class = "COLDLINE" + + # it "get_bucket_class_and_location" do + # storage_client.create_bucket bucket_name, + # location: location, + # storage_class: storage_class + # expected_output = "Bucket #{bucket_name} storage class is " \ + # "#{storage_class}, and the location is #{location}\n" + # assert_output expected_output do + # get_bucket_class_and_location bucket_name: bucket_name + # end + # end + # end + + # describe "labels" do + # it "add_bucket_label, remove_bucket_label" do + # # add_bucket_label + # label_key = "label_key" + # label_value = "label_value" + + # assert_output "Added label #{label_key} with value #{label_value} to #{bucket.name}\n" do + # add_bucket_label bucket_name: bucket.name, + # label_value: label_value, + # label_key: label_key + # end + + # bucket.refresh! + # assert_equal bucket.labels[label_key], label_value + + # # remove_bucket_label + # assert_output "Deleted label #{label_key} from #{bucket.name}\n" do + # remove_bucket_label bucket_name: bucket.name, + # label_key: label_key + # end + + # bucket.refresh! + # assert bucket.labels[label_key].empty? + # end + # end + + # describe "lifecycle management" do + # let(:bucket) { create_bucket_helper random_bucket_name } + # after { delete_bucket_helper bucket.name } + + # it "enable_bucket_lifecycle_management, disable_bucket_lifecycle_management" do + # # enable_bucket_lifecycle_management + # out, _err = capture_io do + # enable_bucket_lifecycle_management bucket_name: bucket.name + # end + + # assert_includes out, "Lifecycle management is enabled" + + # # disable_bucket_lifecycle_management + # out, _err = capture_io do + # disable_bucket_lifecycle_management bucket_name: bucket.name + # end + + # assert_includes out, "Lifecycle management is disabled" + # end + # end + + # describe "retention policy" do + # let(:bucket) { create_bucket_helper random_bucket_name } + # after { delete_bucket_helper bucket.name } + + # it "set_retention_policy, get_retention_policy, remove_retention_policy" do + # # set_retention_policy + # assert_output "Retention period for #{bucket.name} is now #{retention_period} seconds.\n" do + # set_retention_policy bucket_name: bucket.name, + # retention_period: retention_period + # end + + # bucket.refresh! + # assert_equal bucket.retention_period, retention_period + + # # get_retention_policy + # out, _err = capture_io do + # get_retention_policy bucket_name: bucket.name + # end + + # assert_includes out, "period: #{retention_period}\n" + + # # remove_retention_policy + # assert_equal bucket.retention_period, retention_period + # assert_output "Retention policy for #{bucket.name} has been removed.\n" do + # remove_retention_policy bucket_name: bucket.name + # end + + # bucket.refresh! + # refute bucket.retention_period + + # # lock_retention_policy + # bucket.retention_period = retention_period + # out, _err = capture_io do + # lock_retention_policy bucket_name: bucket.name + # end + + # assert_includes out, "Retention policy for #{bucket.name} is now locked." + # bucket.refresh! + # assert bucket.retention_policy_locked? + + # # remove_retention_policy + # assert_output "Policy is locked and retention policy can't be removed.\n" do + # remove_retention_policy bucket_name: bucket.name + # end + # end + # end + + # describe "default_event_based_hold" do + # it "enable_default_event_based_hold, get_default_event_based_hold, disable_default_event_based_hold" do + # # enable_default_event_based_hold + # assert_output "Default event-based hold was enabled for #{bucket.name}.\n" do + # enable_default_event_based_hold bucket_name: bucket.name + # end + + # bucket.refresh! + # assert bucket.default_event_based_hold? + + # # get_default_event_based_hold + # assert_output "Default event-based hold is enabled for #{bucket.name}.\n" do + # get_default_event_based_hold bucket_name: bucket.name + # end + + # # disable_default_event_based_hold + # bucket.update do |b| + # b.default_event_based_hold = true + # end + + # assert_output "Default event-based hold was disabled for #{bucket.name}.\n" do + # disable_default_event_based_hold bucket_name: bucket.name + # end + + # bucket.refresh! + # refute bucket.default_event_based_hold? + + # # get_default_event_based_hold + # assert_output "Default event-based hold is not enabled for #{bucket.name}.\n" do + # get_default_event_based_hold bucket_name: bucket.name + # end + # end + # end + + # describe "storage_class" do + # it "change_default_storage_class" do + # assert_equal "STANDARD", bucket.storage_class + + # assert_output "Default storage class for bucket #{bucket.name} has been set to COLDLINE\n" do + # change_default_storage_class bucket_name: bucket.name + # end + + # bucket.refresh! + # assert_equal "COLDLINE", bucket.storage_class + # # teardown + # bucket.storage_class = "STANDARD" + # end + # end + + # describe "versioning" do + # it "enable_versioning, disable_versioning" do + # # enable_versioning + # bucket.versioning = false + + # assert_output "Versioning was enabled for bucket #{bucket.name}\n" do + # enable_versioning bucket_name: bucket.name + # end + # bucket.refresh! + # assert bucket.versioning? + + # # disable_versioning + # assert_output "Versioning was disabled for bucket #{bucket.name}\n" do + # disable_versioning bucket_name: bucket.name + # end + # bucket.refresh! + # refute bucket.versioning? + # end + # end + + # describe "website_configuration" do + # let(:main_page_suffix) { "index.html" } + # let(:not_found_page) { "404.html" } + + # it "define_bucket_website_configuration" do + # expected_out = "Static website bucket #{bucket.name} is set up to use #{main_page_suffix} as the index page " \ + # "and #{not_found_page} as the 404 page\n" + + # assert_output expected_out do + # define_bucket_website_configuration bucket_name: bucket.name, + # main_page_suffix: main_page_suffix, + # not_found_page: not_found_page + # end + + # bucket.refresh! + # assert_equal main_page_suffix, bucket.website_main + # assert_equal not_found_page, bucket.website_404 + # end + # end + + # describe "public_access_prevention" do + # it "set_public_access_prevention_enforced, get_public_access_prevention, " \ + # "set_public_access_prevention_inherited" do + # bucket.public_access_prevention = :inherited + # bucket.refresh! + # _(bucket.public_access_prevention).must_equal "inherited" + + # # set_public_access_prevention_enforced + # assert_output "Public access prevention is set to enforced for #{bucket.name}.\n" do + # set_public_access_prevention_enforced bucket_name: bucket.name + # end + + # bucket.refresh! + # _(bucket.public_access_prevention).must_equal "enforced" + + # # get_public_access_prevention + # assert_output "Public access prevention is 'enforced' for #{bucket.name}.\n" do + # get_public_access_prevention bucket_name: bucket.name + # end + # _(bucket.public_access_prevention).must_equal "enforced" + + # # set_public_access_prevention_inherited + # assert_output "Public access prevention is 'inherited' for #{bucket.name}.\n" do + # set_public_access_prevention_inherited bucket_name: bucket.name + # end + + # bucket.refresh! + # _(bucket.public_access_prevention).must_equal "inherited" + # bucket.public_access_prevention = :inherited + # end + # end + + # describe "storage move file" do + # let(:source_file) { "file_1_name_#{SecureRandom.hex}.txt" } + # let(:destination_file) { "file_2_name_#{SecureRandom.hex}.txt" } + # let :hns_bucket do + # hierarchical_namespace = Google::Apis::StorageV1::Bucket::HierarchicalNamespace.new enabled: true + # storage_client.create_bucket random_bucket_name do |b| + # b.uniform_bucket_level_access = true + # b.hierarchical_namespace = hierarchical_namespace + # end + # end + # let :create_source_file do + # file_content = "A" * (3 * 1024 * 1024) # 3 MB of 'A' characters + # file = StringIO.new file_content + # hns_bucket.create_file file, source_file + # end + # it "file is moved and old file is deleted" do + # create_source_file + # out, _err = capture_io do + # move_object bucket_name: hns_bucket.name, source_file_name: source_file, destination_file_name: destination_file + # end + # assert_includes out, "New File #{destination_file} created\n" + # refute_nil(hns_bucket.file(destination_file)) + # assert_nil(hns_bucket.file(source_file)) + # end + + # it "raises error if source and destination are having same filename" do + # create_source_file + # exception = assert_raises Google::Cloud::InvalidArgumentError do + # move_object bucket_name: hns_bucket.name, source_file_name: source_file, destination_file_name: source_file + # end + # assert_equal "invalid: Source and destination object names must be different.", exception.message + # end + # end + + # describe "list buckets with partial success" do + # it 'returns a list of bucket names if return_partial_success_flag is true' do + # result = list_buckets_with_partial_success return_partial_success_flag: true + # assert_kind_of Array, result + # assert result.all? { |n| n.is_a? String }, "expected all items to be String" + # end + + # it 'returns nil for unreachable if return_partial_success_flag is false' do + # result = list_buckets_with_partial_success return_partial_success_flag: false + # assert_nil result + # end + + # it 'returns nil for unreachable if return_partial_success_flag is not passed' do + # result = list_buckets_with_partial_success return_partial_success_flag: nil + # assert_nil result + # end + # end end diff --git a/google-cloud-storage/samples/storage_create_bucket_with_ip_filter.rb b/google-cloud-storage/samples/storage_create_bucket_with_ip_filter.rb new file mode 100644 index 000000000000..6215e7ba102d --- /dev/null +++ b/google-cloud-storage/samples/storage_create_bucket_with_ip_filter.rb @@ -0,0 +1,167 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START storage_create_bucket_with_ip_filter] +def create_bucket_with_ip_filter bucket_name: + # The ID to give your GCS bucket + # bucket_name = "your-unique-bucket-name" + + require "google/cloud/storage" + + storage = Google::Cloud::Storage.new + + ip_filter = Google::Apis::StorageV1::Bucket::IpFilter.new( + mode: "Enabled", + public_network_source: Google::Apis::StorageV1::Bucket::IpFilter::PublicNetworkSource.new( + allowed_ip_cidr_ranges: [ + "0.0.0.0/0", "::/0" + ] + ), + allow_all_service_agent_access: false + ) + + bucket = storage.create_bucket bucket_name do |b| + b.ip_filter = ip_filter + b.uniform_bucket_level_access = true + end + + binding.pry + + puts "Created bucket #{bucket_name} with IP filter enabled." +end + +def removes_bucket_ip_filter bucket_name: + # The ID of your GCS bucket + # bucket_name = "your-unique-bucket-name" + + require "google/cloud/storage" + + storage = Google::Cloud::Storage.new + bucket = storage.bucket bucket_name + + ip_filter = Google::Apis::StorageV1::Bucket::IpFilter.new( + mode: "Disabled" + ) + bucket.update do |b| + b.ip_filter = ip_filter + end + + puts "Updated IP filter for bucket #{bucket_name}." +end + +def update_bucket_with_ip_filter bucket_name: + # The ID to give your GCS bucket + # bucket_name = "your-unique-bucket-name" + + require "google/cloud/resource_manager/v3" + storage = Google::Cloud::Storage.new + + ip_filter = Google::Apis::StorageV1::Bucket::IpFilter.new( + mode: "Enabled", + public_network_source: Google::Apis::StorageV1::Bucket::IpFilter::PublicNetworkSource.new( + allowed_ip_cidr_ranges: [ + "0.0.0.0/0", "::/0" + ] + ), + allow_all_service_agent_access: false + ) + +# require "google/cloud/resource_manager" + +# Direct client initialization +client = Google::Cloud::ResourceManager::V3::Projects::Client.new + +project_id = storage.project +resource_name = "projects/#{project_id}" + +# 1. Get the current polGoogle::Cloud::Iam::V1::IAM::Client.newicy +# binding.pry +policy = client.get_iam_policy resource: resource_name +# binding.pry + +custom_role = "storage.buckets.exemptFromIpFilter" +custom_role_path = "#{resource_name}/roles/#{custom_role}" +service_account = "serviceAccount:#{storage.service_account_email}" +#{serviceAccount:storage-sdk-vendor@storage-sdk-vendor.iam.gserviceaccount.com} + +binding.pry +custom_role_binding = Google::Iam::V1::Binding.new( + role: custom_role_path, + members: [service_account] +) +policy.bindings << custom_role_binding + +client.set_iam_policy resource: resource_name, policy: policy +# Google::Cloud::PermissionDeniedError: 7:Permission 'resourcemanager.projects.setIamPolicy' denied on resource '//cloudresourcemanager.googleapis.com/projects/storage-sdk-vendor' (or it may not exist).. debug_error_string:{UNKNOWN:Error received from peer ipv4:74.125.23.95:443 {grpc_message:"Permission \'resourcemanager.projects.setIamPolicy\' denied on resource \'//cloudresourcemanager.googleapis.com/projects/storage-sdk-vendor\' (or it may not exist).", grpc_status:7}} (Google::Cloud::PermissionDeniedError) + + +# # Optional: Log the change +# puts "Updating project policy to include #{custom_role} for #{service_account}" +# end + +puts "Project policy updated successfully." + + # project_id = "storage-sdk-vendor" + # role_id = "GcsIpExemptRole" + # binding.pry + + # # Define the Custom Role + # role = Google::Cloud::Iam::V1::Role.new( + # title: "GCS IP Exemption Role", + # included_permissions: ["storage.buckets.exemptFromIpFilter"], + # stage: :GA + # ) + # resource = "projects/#{project_id}" + # storage_project = storage.project + # resource = "projects/#{storage_project}/roles/#{role_id}" + # storage_project.policy do |p| + # p.add "roles/#{role}", member + # end + + # # Call test_iam_permissions. + # # In Ruby, you can pass a Hash that matches the request structure. + # response = client.test_iam_permissions( + # resource: resource, + # permissions: ["storage.buckets.exemptFromIpFilter"] + # ) + + # # The response object contains a 'permissions' array of granted permissions + # puts "Granted permissions: #{response.permissions}" + + # response.permissions + + # # role = "projects/#{storage.project}/roles/GcsIpExemptRole" + # member = "serviceAccount:insecure-cloudtop-shared-user@cloudtop-prod-asia-east.iam.gserviceaccount.com" + + # bucket.policy requested_policy_version: 3 do |policy| + # policy.bindings.insert role: "roles/storage.buckets.exemptFromIpFilter", members: ["serviceAccount:insecure-cloudtop-shared-user@cloudtop-prod-asia-east.iam.gserviceaccount.com"] + # end + # # bucket = storage.create_bucket bucket_name + # # bucket.policy do |p| + # # p.add "roles/storage.buckets.exemptFromIpFilter", "serviceAccount:insecure-cloudtop-shared-user@cloudtop-prod-asia-east.iam.gserviceaccount.com" + # # end + # # binding.pry + + bucket.update do |b| + b.ip_filter = ip_filter + end + + + puts "Updated bucket #{bucket_name} with IP filter enabled." +end +# [END storage_create_bucket_with_ip_filter] + +if $PROGRAM_NAME == __FILE__ + create_bucket_with_ip_filter bucket_name: ARGV.shift +end