Class RightAws::S3Interface
In: lib/s3/right_s3_interface.rb
Parent: RightAwsBase
RuntimeError AwsError AwsNoChange RightAWSParser RightErrorResponseParser RightHttp2xxParser AcfInterface SqsInterface SqsGen2Interface S3Interface Ec2 SdbInterface RightAwsBase ActiveSdbConnect ActiveSdb SqsGen2 S3 S3Generator Sqs RightDummyParser AWSErrorHandler AwsBenchmarkingBlock AwsUtils RightSaxParserCallback lib/sqs/right_sqs_interface.rb lib/sqs/right_sqs_gen2.rb lib/s3/right_s3.rb lib/acf/right_acf_interface.rb lib/sqs/right_sqs_gen2_interface.rb lib/sqs/right_sqs.rb lib/sdb/right_sdb_interface.rb lib/sdb/active_sdb.rb lib/ec2/right_ec2.rb lib/s3/right_s3_interface.rb lib/awsbase/right_awsbase.rb RightAwsBaseInterface VERSION RightAws dot/m_13_0.png

Methods

Included Modules

RightAwsBaseInterface

Constants

USE_100_CONTINUE_PUT_SIZE = 1_000_000
DEFAULT_HOST = 's3.amazonaws.com'
DEFAULT_PORT = 443
DEFAULT_PROTOCOL = 'https'
DEFAULT_SERVICE = '/'
REQUEST_TTL = 30
DEFAULT_EXPIRES_AFTER = 1 * 24 * 60 * 60
ONE_YEAR_IN_SECONDS = 365 * 24 * 60 * 60
AMAZON_HEADER_PREFIX = 'x-amz-'
AMAZON_METADATA_PREFIX = 'x-amz-meta-'

Public Class methods

[Source]

    # File lib/s3/right_s3_interface.rb, line 46
46:     def self.bench_s3
47:       @@bench.service
48:     end

[Source]

    # File lib/s3/right_s3_interface.rb, line 43
43:     def self.bench_xml
44:       @@bench.xml
45:     end

Creates new RightS3 instance.

 s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', {:multi_thread => true, :logger => Logger.new('/tmp/x.log')}) #=> #<RightAws::S3Interface:0xb7b3c27c>

Params is a hash:

   {:server       => 's3.amazonaws.com'   # Amazon service host: 's3.amazonaws.com'(default)
    :port         => 443                  # Amazon service port: 80 or 443(default)
    :protocol     => 'https'              # Amazon service protocol: 'http' or 'https'(default)
    :multi_thread => true|false           # Multi-threaded (connection per each thread): true or false(default)
    :logger       => Logger Object}       # Logger instance: logs to STDOUT if omitted }

[Source]

    # File lib/s3/right_s3_interface.rb, line 63
63:     def initialize(aws_access_key_id=nil, aws_secret_access_key=nil, params={})
64:       init({ :name             => 'S3', 
65:              :default_host     => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).host   : DEFAULT_HOST, 
66:              :default_port     => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).port   : DEFAULT_PORT,
67:              :default_service  => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).path   : DEFAULT_SERVICE,
68:              :default_protocol => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).scheme : DEFAULT_PROTOCOL }, 
69:            aws_access_key_id     || ENV['AWS_ACCESS_KEY_ID'], 
70:            aws_secret_access_key || ENV['AWS_SECRET_ACCESS_KEY'], 
71:            params)
72:     end

Public Instance methods

Retrieve bucket location

 s3.create_bucket('my-awesome-bucket-us')        #=> true
 puts s3.bucket_location('my-awesome-bucket-us') #=> '' (Amazon's default value assumed)

 s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
 puts s3.bucket_location('my-awesome-bucket-eu')            #=> 'EU'

[Source]

     # File lib/s3/right_s3_interface.rb, line 215
215:     def bucket_location(bucket, headers={})
216:       req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}?location"))
217:       request_info(req_hash, S3BucketLocationParser.new)
218:     rescue
219:       on_exception
220:     end

Removes all keys from bucket. Returns true or an exception.

 s3.clear_bucket('my_awesome_bucket') #=> true

[Source]

     # File lib/s3/right_s3_interface.rb, line 764
764:     def clear_bucket(bucket)
765:       incrementally_list_bucket(bucket) do |results|
766:         results[:contents].each { |key| delete(bucket, key[:key]) }
767:       end
768:       true
769:     rescue
770:       on_exception
771:     end

Copy an object.

 directive: :copy    - copy meta-headers from source (default value)
            :replace - replace meta-headers by passed ones

 # copy a key with meta-headers
 s3.copy('b1', 'key1', 'b1', 'key1_copy') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:25:22.000Z"}

 # copy a key, overwrite meta-headers
 s3.copy('b1', 'key2', 'b1', 'key2_copy', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:26:22.000Z"}

see: docs.amazonwebservices.com/AmazonS3/2006-03-01/UsingCopyingObjects.html

     http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html

[Source]

     # File lib/s3/right_s3_interface.rb, line 632
632:     def copy(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
633:       dest_key ||= src_key
634:       headers['x-amz-metadata-directive'] = directive.to_s.upcase
635:       headers['x-amz-copy-source']        = "#{src_bucket}/#{CGI::escape src_key}"
636:       req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{dest_bucket}/#{CGI::escape dest_key}"))
637:       request_info(req_hash, S3CopyParser.new)
638:     rescue
639:       on_exception
640:     end

Creates new bucket. Returns true or an exception.

 # create a bucket at American server
 s3.create_bucket('my-awesome-bucket-us') #=> true
 # create a bucket at European server
 s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true

[Source]

     # File lib/s3/right_s3_interface.rb, line 195
195:     def create_bucket(bucket, headers={})
196:       data = nil
197:       unless headers[:location].blank?
198:         data = "<CreateBucketConfiguration><LocationConstraint>#{headers[:location].to_s.upcase}</LocationConstraint></CreateBucketConfiguration>"
199:       end
200:       req_hash = generate_rest_request('PUT', headers.merge(:url=>bucket, :data => data))
201:       request_info(req_hash, RightHttp2xxParser.new)
202:     rescue Exception => e
203:         # if the bucket exists AWS returns an error for the location constraint interface. Drop it
204:       e.is_a?(RightAws::AwsError) && e.message.include?('BucketAlreadyOwnedByYou') ? true  : on_exception
205:     end

Generates link for ‘CreateBucket’.

 s3.create_bucket_link('my_awesome_bucket') #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 851
851:     def create_bucket_link(bucket, expires=nil, headers={})
852:       generate_link('PUT', headers.merge(:url=>bucket), expires)
853:     rescue
854:       on_exception
855:     end

Deletes key. Returns true or an exception.

 s3.delete('my_awesome_bucket', 'log/curent/1.log') #=> true

[Source]

     # File lib/s3/right_s3_interface.rb, line 612
612:     def delete(bucket, key='', headers={})
613:       req_hash = generate_rest_request('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
614:       request_info(req_hash, RightHttp2xxParser.new)
615:     rescue
616:       on_exception
617:     end

Deletes new bucket. Bucket must be empty! Returns true or an exception.

 s3.delete_bucket('my_awesome_bucket')  #=> true

See also: force_delete_bucket method

[Source]

     # File lib/s3/right_s3_interface.rb, line 259
259:     def delete_bucket(bucket, headers={})
260:       req_hash = generate_rest_request('DELETE', headers.merge(:url=>bucket))
261:       request_info(req_hash, RightHttp2xxParser.new)
262:     rescue
263:       on_exception
264:     end

Generates link for ‘DeleteBucket’.

 s3.delete_bucket_link('my_awesome_bucket') #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 861
861:     def delete_bucket_link(bucket, expires=nil,  headers={})
862:       generate_link('DELETE', headers.merge(:url=>bucket), expires)
863:     rescue
864:       on_exception
865:     end

Deletes all keys where the ‘folder_key’ may be assumed as ‘folder’ name. Returns an array of string keys that have been deleted.

 s3.list_bucket('my_awesome_bucket').map{|key_data| key_data[:key]} #=> ['test','test/2/34','test/3','test1','test1/logs']
 s3.delete_folder('my_awesome_bucket','test')                       #=> ['test','test/2/34','test/3']

[Source]

     # File lib/s3/right_s3_interface.rb, line 789
789:     def delete_folder(bucket, folder_key, separator='/')
790:       folder_key.chomp!(separator)
791:       allkeys = []
792:       incrementally_list_bucket(bucket, { 'prefix' => folder_key }) do |results|
793:         keys = results[:contents].map{ |s3_key| s3_key[:key][/^#{folder_key}($|#{separator}.*)/] ? s3_key[:key] : nil}.compact
794:         keys.each{ |key| delete(bucket, key) }
795:         allkeys << keys
796:       end
797:       allkeys
798:     rescue
799:       on_exception
800:     end

Generates link for ‘DeleteObject’.

 s3.delete_link('my_awesome_bucket',key) #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 920
920:     def delete_link(bucket, key, expires=nil, headers={})
921:       generate_link('DELETE', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
922:     rescue
923:       on_exception
924:     end

Deletes all keys in bucket then deletes bucket. Returns true or an exception.

 s3.force_delete_bucket('my_awesome_bucket')

[Source]

     # File lib/s3/right_s3_interface.rb, line 777
777:     def force_delete_bucket(bucket)
778:       clear_bucket(bucket)
779:       delete_bucket(bucket)
780:     rescue
781:       on_exception
782:     end

Retrieves object data from Amazon. Returns a hash or an exception.

 s3.get('my_awesome_bucket', 'log/curent/1.log') #=>

     {:object  => "Ola-la!",
      :headers => {"last-modified"     => "Wed, 23 May 2007 09:08:04 GMT",
                   "content-type"      => "",
                   "etag"              => "\"000000000096f4ee74bc4596443ef2a4\"",
                   "date"              => "Wed, 23 May 2007 09:08:03 GMT",
                   "x-amz-id-2"        => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
                   "x-amz-meta-family" => "Woho556!",
                   "x-amz-request-id"  => "0000000C246D770C",
                   "server"            => "AmazonS3",
                   "content-length"    => "7"}}

If a block is provided, yields incrementally to the block as the response is read. For large responses, this function is ideal as the response can be ‘streamed’. The hash containing header fields is still returned. Example: foo = File.new(’./chunder.txt’, File::CREAT|File::RDWR) rhdr = s3.get(‘aws-test’, ‘Cent5V1_7_1.img.part.00’) do |chunk|

  foo.write(chunk)

end foo.close

[Source]

     # File lib/s3/right_s3_interface.rb, line 515
515:     def get(bucket, key, headers={}, &block)
516:       req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
517:       request_info(req_hash, S3HttpResponseBodyParser.new, &block)
518:     rescue
519:       on_exception
520:     end

Retieves the ACL (access control policy) for a bucket or object. Returns a hash of headers and xml doc with ACL data. See: docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html.

 s3.get_acl('my_awesome_bucket', 'log/curent/1.log') #=>
   {:headers => {"x-amz-id-2"=>"B3BdDMDUz+phFF2mGBH04E46ZD4Qb9HF5PoPHqDRWBv+NVGeA3TOQ3BkVvPBjgxX",
                 "content-type"=>"application/xml;charset=ISO-8859-1",
                 "date"=>"Wed, 23 May 2007 09:40:16 GMT",
                 "x-amz-request-id"=>"B183FA7AB5FBB4DD",
                 "server"=>"AmazonS3",
                 "transfer-encoding"=>"chunked"},
    :object  => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner>
                 <ID>16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Owner>
                 <AccessControlList><Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\"><ID>
                 16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Grantee>
                 <Permission>FULL_CONTROL</Permission></Grant></AccessControlList></AccessControlPolicy>" }

[Source]

     # File lib/s3/right_s3_interface.rb, line 683
683:     def get_acl(bucket, key='', headers={})
684:       key = key.blank? ? '' : "/#{CGI::escape key}"
685:       req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
686:       request_info(req_hash, S3HttpResponseBodyParser.new) 
687:     rescue
688:       on_exception
689:     end

Generates link for ‘GetACL’.

 s3.get_acl_link('my_awesome_bucket',key) #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 931
931:     def get_acl_link(bucket, key='', headers={})
932:       return generate_link('GET', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}?acl"))
933:     rescue
934:       on_exception
935:     end

Retieves the ACL (access control policy) for a bucket or object. Returns a hash of {:owner, :grantees}

 s3.get_acl_parse('my_awesome_bucket', 'log/curent/1.log') #=>

 { :grantees=>
   { "16...2a"=>
     { :display_name=>"root",
       :permissions=>["FULL_CONTROL"],
       :attributes=>
        { "xsi:type"=>"CanonicalUser",
          "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}},
    "http://acs.amazonaws.com/groups/global/AllUsers"=>
      { :display_name=>"AllUsers",
        :permissions=>["READ"],
        :attributes=>
         { "xsi:type"=>"Group",
           "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}}},
  :owner=>
    { :id=>"16..2a",
      :display_name=>"root"}}

[Source]

     # File lib/s3/right_s3_interface.rb, line 713
713:     def get_acl_parse(bucket, key='', headers={})
714:       key = key.blank? ? '' : "/#{CGI::escape key}"
715:       req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
716:       acl = request_info(req_hash, S3AclParser.new(:logger => @logger))
717:       result = {}
718:       result[:owner]    = acl[:owner]
719:       result[:grantees] = {}
720:       acl[:grantees].each do |grantee|
721:         key = grantee[:id] || grantee[:uri]
722:         if result[:grantees].key?(key)
723:           result[:grantees][key][:permissions] << grantee[:permissions]
724:         else
725:           result[:grantees][key] = 
726:             { :display_name => grantee[:display_name] || grantee[:uri].to_s[/[^\/]*$/],
727:               :permissions  => grantee[:permissions].to_a,
728:               :attributes   => grantee[:attributes] }
729:         end
730:       end
731:       result
732:     rescue
733:       on_exception
734:     end

Retieves the ACL (access control policy) for a bucket. Returns a hash of headers and xml doc with ACL data.

[Source]

     # File lib/s3/right_s3_interface.rb, line 746
746:     def get_bucket_acl(bucket, headers={})
747:       return get_acl(bucket, '', headers)
748:     rescue
749:       on_exception
750:     end

Generates link for ‘GetBucketACL’.

 s3.get_acl_link('my_awesome_bucket',key) #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 951
951:     def get_bucket_acl_link(bucket, headers={})
952:       return get_acl_link(bucket, '', headers)
953:     rescue
954:       on_exception
955:     end

Generates link for ‘GetObject’.

if a bucket comply with virtual hosting naming then retuns a link with the bucket as a part of host name:

 s3.get_link('my-awesome-bucket',key) #=> https://my-awesome-bucket.s3.amazonaws.com:443/asia%2Fcustomers?Signature=nh7...

otherwise returns an old style link (the bucket is a part of path):

 s3.get_link('my_awesome_bucket',key) #=> https://s3.amazonaws.com:443/my_awesome_bucket/asia%2Fcustomers?Signature=QAO...

see docs.amazonwebservices.com/AmazonS3/2006-03-01/VirtualHosting.html

[Source]

     # File lib/s3/right_s3_interface.rb, line 900
900:     def get_link(bucket, key, expires=nil, headers={})
901:       generate_link('GET', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
902:     rescue
903:       on_exception
904:     end

Retrieves the logging configuration for a bucket. Returns a hash of {:enabled, :targetbucket, :targetprefix}

s3.interface.get_logging_parse(:bucket => "asset_bucket")

  => {:enabled=>true, :targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/"}

[Source]

     # File lib/s3/right_s3_interface.rb, line 229
229:     def get_logging_parse(params)
230:       AwsUtils.mandatory_arguments([:bucket], params)
231:       AwsUtils.allow_only([:bucket, :headers], params)
232:       params[:headers] = {} unless params[:headers]
233:       req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}?logging"))
234:       request_info(req_hash, S3LoggingParser.new)
235:     rescue
236:       on_exception
237:     end

Retrieves object data only (headers are omitted). Returns string or an exception.

 s3.get('my_awesome_bucket', 'log/curent/1.log') #=> 'Ola-la!'

[Source]

     # File lib/s3/right_s3_interface.rb, line 806
806:     def get_object(bucket, key, headers={})
807:       get(bucket, key, headers)[:object]
808:     rescue
809:       on_exception
810:     end

Retrieves object metadata. Returns a hash of http_response_headers.

 s3.head('my_awesome_bucket', 'log/curent/1.log') #=>
   {"last-modified"     => "Wed, 23 May 2007 09:08:04 GMT",
    "content-type"      => "",
    "etag"              => "\"000000000096f4ee74bc4596443ef2a4\"",
    "date"              => "Wed, 23 May 2007 09:08:03 GMT",
    "x-amz-id-2"        => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
    "x-amz-meta-family" => "Woho556!",
    "x-amz-request-id"  => "0000000C246D770C",
    "server"            => "AmazonS3",
    "content-length"    => "7"}

[Source]

     # File lib/s3/right_s3_interface.rb, line 601
601:     def head(bucket, key, headers={})
602:       req_hash = generate_rest_request('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
603:       request_info(req_hash, S3HttpResponseHeadParser.new)
604:     rescue
605:       on_exception
606:     end

Generates link for ‘HeadObject’.

 s3.head_link('my_awesome_bucket',key) #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 910
910:     def head_link(bucket, key, expires=nil,  headers={})
911:       generate_link('HEAD', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
912:     rescue
913:       on_exception
914:     end

Incrementally list the contents of a bucket. Yields the following hash to a block:

 s3.incrementally_list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) yields
  {
    :name => 'bucketname',
    :prefix => 'subfolder/',
    :marker => 'fileN.jpg',
    :max_keys => 234,
    :delimiter => '/',
    :is_truncated => true,
    :next_marker => 'fileX.jpg',
    :contents => [
      { :key => "file1",
        :last_modified => "2007-05-18T07:00:59.000Z",
        :e_tag => "000000000059075b964b07152d234b70",
        :size => 3,
        :storage_class => "STANDARD",
        :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
        :owner_display_name => "root"
      }, { :key, ...}, ... {:key, ...}
    ]
    :common_prefixes => [
      "prefix1",
      "prefix2",
      ...,
      "prefixN"
    ]
  }

[Source]

     # File lib/s3/right_s3_interface.rb, line 317
317:     def incrementally_list_bucket(bucket, options={}, headers={}, &block)
318:       internal_options = options.symbolize_keys
319:       begin 
320:         internal_bucket = bucket.dup
321:         internal_bucket  += '?'+internal_options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless internal_options.blank?
322:         req_hash = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
323:         response = request_info(req_hash, S3ImprovedListBucketParser.new(:logger => @logger))
324:         there_are_more_keys = response[:is_truncated]
325:         if(there_are_more_keys)
326:           internal_options[:marker] = decide_marker(response)
327:           total_results = response[:contents].length + response[:common_prefixes].length
328:           internal_options['max-keys''max-keys'] ? (internal_options['max-keys''max-keys'] -= total_results) : nil 
329:         end
330:         yield response
331:       end while there_are_more_keys && under_max_keys(internal_options)
332:       true
333:     rescue
334:       on_exception
335:     end

docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?BucketRestrictions.html

[Source]

     # File lib/s3/right_s3_interface.rb, line 105
105:     def is_dns_bucket?(bucket_name)
106:       bucket_name = bucket_name.to_s
107:       return nil unless (3..63) === bucket_name.size
108:       bucket_name.split('.').each do |component|
109:         return nil unless component[/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/]
110:       end
111:       true
112:     end

Returns an array of customer‘s buckets. Each item is a hash.

 s3.list_all_my_buckets #=>
   [{:owner_id           => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
     :owner_display_name => "root",
     :name               => "bucket_name",
     :creation_date      => "2007-04-19T18:47:43.000Z"}, ..., {...}]

[Source]

     # File lib/s3/right_s3_interface.rb, line 181
181:     def list_all_my_buckets(headers={})
182:       req_hash = generate_rest_request('GET', headers.merge(:url=>''))
183:       request_info(req_hash, S3ListAllMyBucketsParser.new(:logger => @logger))
184:     rescue
185:       on_exception
186:     end

Generates link for ‘ListAllMyBuckets’.

 s3.list_all_my_buckets_link #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 841
841:     def list_all_my_buckets_link(expires=nil, headers={})
842:       generate_link('GET', headers.merge(:url=>''), expires)
843:     rescue
844:       on_exception
845:     end

Returns an array of bucket‘s keys. Each array item (key data) is a hash.

 s3.list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) #=>
   [{:key                => "test1",
     :last_modified      => "2007-05-18T07:00:59.000Z",
     :owner_id           => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
     :owner_display_name => "root",
     :e_tag              => "000000000059075b964b07152d234b70",
     :storage_class      => "STANDARD",
     :size               => 3,
     :service=> {'is_truncated' => false,
                 'prefix'       => "t",
                 'marker'       => "",
                 'name'         => "my_awesome_bucket",
                 'max-keys'     => "5"}, ..., {...}]

[Source]

     # File lib/s3/right_s3_interface.rb, line 282
282:     def list_bucket(bucket, options={}, headers={})
283:       bucket  += '?'+options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
284:       req_hash = generate_rest_request('GET', headers.merge(:url=>bucket))
285:       request_info(req_hash, S3ListBucketParser.new(:logger => @logger))
286:     rescue
287:       on_exception
288:     end

Generates link for ‘ListBucket’.

 s3.list_bucket_link('my_awesome_bucket') #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 871
871:     def list_bucket_link(bucket, options=nil, expires=nil, headers={})
872:       bucket += '?' + options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
873:       generate_link('GET', headers.merge(:url=>bucket), expires)
874:     rescue
875:       on_exception
876:     end

Move an object.

 directive: :copy    - copy meta-headers from source (default value)
            :replace - replace meta-headers by passed ones

 # move bucket1/key1 to bucket1/key2
 s3.move('bucket1', 'key1', 'bucket1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:27:22.000Z"}

 # move bucket1/key1 to bucket2/key2 with new meta-headers assignment
 s3.copy('bucket1', 'key1', 'bucket2', 'key2', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:28:22.000Z"}

[Source]

     # File lib/s3/right_s3_interface.rb, line 652
652:     def move(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
653:       copy_result = copy(src_bucket, src_key, dest_bucket, dest_key, directive, headers)
654:       # delete an original key if it differs from a destination one
655:       delete(src_bucket, src_key) unless src_bucket == dest_bucket && src_key == dest_key
656:       copy_result
657:     end

Saves object to Amazon. Returns true or an exception. Any header starting with AMAZON_METADATA_PREFIX is considered user metadata. It will be stored with the object and returned when you retrieve the object. The total size of the HTTP request, not including the body, must be less than 4 KB.

 s3.put('my_awesome_bucket', 'log/current/1.log', 'Ola-la!', 'x-amz-meta-family'=>'Woho556!') #=> true

This method is capable of ‘streaming’ uploads; that is, it can upload data from a file or other IO object without first reading all the data into memory. This is most useful for large PUTs - it is difficult to read a 2 GB file entirely into memory before sending it to S3. To stream an upload, pass an object that responds to ‘read’ (like the read method of IO) and to either ‘lstat’ or ‘size’. For files, this means streaming is enabled by simply making the call:

 s3.put(bucket_name, 'S3keyname.forthisfile',  File.open('localfilename.dat'))

If the IO object you wish to stream from responds to the read method but doesn‘t implement lstat or size, you can extend the object dynamically to implement these methods, or define your own class which defines these methods. Be sure that your class returns ‘nil’ from read() after having read ‘size’ bytes. Otherwise S3 will drop the socket after ‘Content-Length’ bytes have been uploaded, and HttpConnection will interpret this as an error.

This method now supports very large PUTs, where very large is > 2 GB.

For Win32 users: Files and IO objects should be opened in binary mode. If a text mode IO object is passed to PUT, it will be converted to binary mode.

[Source]

     # File lib/s3/right_s3_interface.rb, line 392
392:     def put(bucket, key, data=nil, headers={})
393:       # On Windows, if someone opens a file in text mode, we must reset it so
394:       # to binary mode for streaming to work properly
395:       if(data.respond_to?(:binmode))
396:         data.binmode
397:       end
398:       if (data.respond_to?(:lstat) && data.lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
399:          (data.respond_to?(:size)  && data.size       >= USE_100_CONTINUE_PUT_SIZE)
400:         headers['expect'] = '100-continue'
401:       end
402:       req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data))
403:       request_info(req_hash, RightHttp2xxParser.new)
404:     rescue
405:       on_exception
406:     end

Sets the ACL on a bucket or object.

[Source]

     # File lib/s3/right_s3_interface.rb, line 737
737:     def put_acl(bucket, key, acl_xml_doc, headers={})
738:       key = key.blank? ? '' : "/#{CGI::escape key}"
739:       req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}#{key}?acl", :data=>acl_xml_doc))
740:       request_info(req_hash, S3HttpResponseBodyParser.new)
741:     rescue
742:       on_exception
743:     end

Generates link for ‘PutACL’.

 s3.put_acl_link('my_awesome_bucket',key) #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 941
941:     def put_acl_link(bucket, key='', headers={})
942:       return generate_link('PUT', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}?acl"))
943:     rescue
944:       on_exception
945:     end

Sets the ACL on a bucket only.

[Source]

     # File lib/s3/right_s3_interface.rb, line 753
753:     def put_bucket_acl(bucket, acl_xml_doc, headers={})
754:       return put_acl(bucket, '', acl_xml_doc, headers)
755:     rescue
756:       on_exception
757:     end

Generates link for ‘PutBucketACL’.

 s3.put_acl_link('my_awesome_bucket',key) #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 961
961:     def put_bucket_acl_link(bucket, acl_xml_doc, headers={})
962:       return put_acl_link(bucket, '', acl_xml_doc, headers)
963:     rescue
964:       on_exception
965:     end

Generates link for ‘PutObject’.

 s3.put_link('my_awesome_bucket',key, object) #=> url string

[Source]

     # File lib/s3/right_s3_interface.rb, line 882
882:     def put_link(bucket, key, data=nil, expires=nil, headers={})
883:       generate_link('PUT', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}", :data=>data), expires)
884:     rescue
885:       on_exception
886:     end

Sets logging configuration for a bucket from the XML configuration document.

  params:
   :bucket
   :xmldoc

[Source]

     # File lib/s3/right_s3_interface.rb, line 243
243:     def put_logging(params)  
244:       AwsUtils.mandatory_arguments([:bucket,:xmldoc], params)
245:       AwsUtils.allow_only([:bucket,:xmldoc, :headers], params)
246:       params[:headers] = {} unless params[:headers]
247:       req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}?logging", :data => params[:xmldoc]))
248:       request_info(req_hash, S3TrueParser.new)
249:     rescue
250:       on_exception
251:     end

Rename an object.

 # rename bucket1/key1 to bucket1/key2
 s3.rename('bucket1', 'key1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:29:22.000Z"}

[Source]

     # File lib/s3/right_s3_interface.rb, line 664
664:     def rename(src_bucket, src_key, dest_key, headers={})
665:       move(src_bucket, src_key, src_bucket, dest_key, :copy, headers)
666:     end

New experimental API for retrieving objects, introduced in RightAws 1.8.1. retrieve_object is similar in function to the older function get. It allows for optional verification of object md5 checksums on retrieval. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.

If the optional :md5 argument is provided, retrieve_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.

The optional argument of :headers allows the caller to specify arbitrary request header values. Mandatory arguments:

  :bucket - the bucket in which the object is stored
  :key    - the object address (or path) within the bucket

Optional arguments:

  :headers - hash of additional HTTP headers to include with the request
  :md5     - MD5 checksum against which to verify the retrieved object

 s3.retrieve_object(:bucket => "foobucket", :key => "foo")
   => {:verified_md5=>false,
       :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
                  "x-amz-id-2"=>"2Aj3TDz6HP5109qly//18uHZ2a1TNHGLns9hyAtq2ved7wmzEXDOPGRHOYEa3Qnp",
                  "content-type"=>"",
                  "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                  "date"=>"Tue, 30 Sep 2008 00:52:44 GMT",
                  "x-amz-request-id"=>"EE4855DE27A2688C",
                  "server"=>"AmazonS3",
                  "content-length"=>"10"},
       :object=>"polemonium"}

 s3.retrieve_object(:bucket => "foobucket", :key => "foo", :md5=>'a507841b1bc8115094b00bbe8c1b2954')
   => {:verified_md5=>true,
       :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
                  "x-amz-id-2"=>"mLWQcI+VuKVIdpTaPXEo84g0cz+vzmRLbj79TS8eFPfw19cGFOPxuLy4uGYVCvdH",
                  "content-type"=>"", "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                  "date"=>"Tue, 30 Sep 2008 00:53:08 GMT",
                  "x-amz-request-id"=>"6E7F317356580599",
                  "server"=>"AmazonS3",
                  "content-length"=>"10"},
       :object=>"polemonium"}

If a block is provided, yields incrementally to the block as the response is read. For large responses, this function is ideal as the response can be ‘streamed’. The hash containing header fields is still returned.

[Source]

     # File lib/s3/right_s3_interface.rb, line 563
563:     def retrieve_object(params, &block)
564:       AwsUtils.mandatory_arguments([:bucket, :key], params)
565:       AwsUtils.allow_only([:bucket, :key, :headers, :md5], params)
566:       params[:headers] = {} unless params[:headers]
567:       req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}"))
568:       resp = request_info(req_hash, S3HttpResponseBodyParser.new, &block)
569:       resp[:verified_md5] = false
570:       if(params[:md5] && (resp[:headers]['etag'].gsub(/\"/,'') == params[:md5]))
571:         resp[:verified_md5] = true
572:       end
573:       resp
574:     rescue
575:       on_exception
576:     end

Identical in function to retrieve_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the ‘md5’ argument. If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict. This call is implemented as a wrapper around retrieve_object and the user may gain different semantics by creating a custom wrapper.

[Source]

     # File lib/s3/right_s3_interface.rb, line 581
581:     def retrieve_object_and_verify(params, &block)
582:       AwsUtils.mandatory_arguments([:md5], params)
583:       resp = retrieve_object(params, &block)
584:       return resp if resp[:verified_md5]
585:       raise AwsError.new("Retrieved object failed MD5 checksum verification: #{resp.inspect}")
586:     end

New experimental API for uploading objects, introduced in RightAws 1.8.1. store_object is similar in function to the older function put, but returns the full response metadata. It also allows for optional verification of object md5 checksums on upload. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments. The hash of the response headers contains useful information like the Amazon request ID and the object ETag (MD5 checksum).

If the optional :md5 argument is provided, store_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.

The optional argument of :headers allows the caller to specify arbitrary request header values.

s3.store_object(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )

  => {"x-amz-id-2"=>"SVsnS2nfDaR+ixyJUlRKM8GndRyEMS16+oZRieamuL61pPxPaTuWrWtlYaEhYrI/",
      "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
      "date"=>"Mon, 29 Sep 2008 18:57:46 GMT",
      :verified_md5=>true,
      "x-amz-request-id"=>"63916465939995BA",
      "server"=>"AmazonS3",
      "content-length"=>"0"}

s3.store_object(:bucket => "foobucket", :key => "foo", :data => "polemonium" )

  => {"x-amz-id-2"=>"MAt9PLjgLX9UYJ5tV2fI/5dBZdpFjlzRVpWgBDpvZpl+V+gJFcBMW2L+LBstYpbR",
      "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
      "date"=>"Mon, 29 Sep 2008 18:58:56 GMT",
      :verified_md5=>false,
      "x-amz-request-id"=>"3B25A996BC2CDD3B",
      "server"=>"AmazonS3",
      "content-length"=>"0"}

[Source]

     # File lib/s3/right_s3_interface.rb, line 438
438:     def store_object(params)
439:       AwsUtils.allow_only([:bucket, :key, :data, :headers, :md5], params)
440:       AwsUtils.mandatory_arguments([:bucket, :key, :data], params)
441:       params[:headers] = {} unless params[:headers]
442:           
443:       params[:data].binmode if(params[:data].respond_to?(:binmode)) # On Windows, if someone opens a file in text mode, we must reset it to binary mode for streaming to work properly
444:       if (params[:data].respond_to?(:lstat) && params[:data].lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
445:          (params[:data].respond_to?(:size)  && params[:data].size       >= USE_100_CONTINUE_PUT_SIZE)
446:         params[:headers]['expect'] = '100-continue'
447:       end
448:       
449:       req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}", :data=>params[:data]))
450:       resp = request_info(req_hash, S3HttpResponseHeadParser.new)
451:       if(params[:md5])
452:         resp[:verified_md5] = (resp['etag'].gsub(/\"/, '') == params[:md5]) ? true : false
453:       else
454:         resp[:verified_md5] = false
455:       end
456:       resp
457:     rescue
458:       on_exception
459:     end

Identical in function to store_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the ‘md5’ argument. If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict. This call is implemented as a wrapper around store_object and the user may gain different semantics by creating a custom wrapper.

s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )

  => {"x-amz-id-2"=>"IZN3XsH4FlBU0+XYkFTfHwaiF1tNzrm6dIW2EM/cthKvl71nldfVC0oVQyydzWpb",
      "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
      "date"=>"Mon, 29 Sep 2008 18:38:32 GMT",
      :verified_md5=>true,
      "x-amz-request-id"=>"E8D7EA4FE00F5DF7",
      "server"=>"AmazonS3",
      "content-length"=>"0"}

s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2953", :data => "polemonium" )

  RightAws::AwsError: Uploaded object failed MD5 checksum verification: {"x-amz-id-2"=>"HTxVtd2bf7UHHDn+WzEH43MkEjFZ26xuYvUzbstkV6nrWvECRWQWFSx91z/bl03n",
                                                                         "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
                                                                         "date"=>"Mon, 29 Sep 2008 18:38:41 GMT",
                                                                         :verified_md5=>false,
                                                                         "x-amz-request-id"=>"0D7ADE09F42606F2",
                                                                         "server"=>"AmazonS3",
                                                                         "content-length"=>"0"}

[Source]

     # File lib/s3/right_s3_interface.rb, line 482
482:     def store_object_and_verify(params)
483:       AwsUtils.mandatory_arguments([:md5], params)
484:       r = store_object(params)
485:       r[:verified_md5] ? (return r) : (raise AwsError.new("Uploaded object failed MD5 checksum verification: #{r.inspect}"))
486:     end

Private Instance methods

[Source]

     # File lib/s3/right_s3_interface.rb, line 339
339:     def decide_marker(response)
340:       return response[:next_marker].dup if response[:next_marker]
341:       last_key = response[:contents].last[:key]
342:       last_prefix = response[:common_prefixes].last
343:       if(!last_key)
344:         return nil if(!last_prefix)
345:         last_prefix.dup
346:       elsif(!last_prefix)
347:         last_key.dup
348:       else
349:         last_key > last_prefix ? last_key.dup : last_prefix.dup
350:       end
351:     end

[Source]

     # File lib/s3/right_s3_interface.rb, line 353
353:     def under_max_keys(internal_options)
354:       internal_options['max-keys''max-keys'] ? internal_options['max-keys''max-keys'] > 0 : true
355:     end

[Validate]