Skip to content

Commit

Permalink
Merge pull request #58 from yaxia/dev
Browse files Browse the repository at this point in the history
Changes for v0.11.2-preview
  • Loading branch information
vinjiang authored Oct 31, 2016
2 parents 61c7227 + 2231cfa commit 9c95c2e
Show file tree
Hide file tree
Showing 6 changed files with 65 additions and 46 deletions.
10 changes: 9 additions & 1 deletion ChangeLog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
2016.10 - version 0.11.2-preview

ALL
* Fixed the issue where it retries on HTTP 4xx errors.

BLOB
* Fixed the issue of wrong "Content-Encoding". [#49](https://github.com/Azure/azure-storage-ruby/issues/49)

2016.09 - version 0.11.1-preview

ALL
Expand All @@ -6,7 +14,7 @@ ALL
* Added the retry for the connection reset error.

BLOB
* Fixed the issue where "list_blobs" doesn't work when delimiter is specified. (https://github.com/Azure/azure-storage-ruby/issues/41)
* Fixed the issue where "list_blobs" doesn't work when delimiter is specified. [#41](https://github.com/Azure/azure-storage-ruby/issues/41)

2016.08 - version 0.11.0-preview

Expand Down
28 changes: 15 additions & 13 deletions lib/azure/storage/blob/blob_service.rb
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ module Azure::Storage

module Blob
class BlobService < StorageService
include Azure::Storage::Core::Utility
include Azure::Storage::Blob
include Azure::Storage::Blob::Container

Expand All @@ -43,24 +44,25 @@ def initialize(options = {})
super(signer, client_config.storage_account_name, options)
@host = client.storage_blob_host
end

def call(method, uri, body=nil, headers={}, options={})
# Force the request.body to the content encoding of specified in the header
# (content encoding probably shouldn't be used this way)
if headers && !body.nil?
if headers['Content-Encoding'].nil?
Service::StorageService.with_header headers, 'Content-Encoding', body.encoding.to_s
if headers && !body.nil? && !(body.encoding.to_s <=> 'ASCII_8BIT')
if headers['x-ms-blob-content-type'].nil?
Service::StorageService.with_header headers, 'x-ms-blob-content-type', "text/plain; charset=#{body.encoding.to_s}"
else
body.force_encoding(headers['Content-Encoding'])
charset = parse_charset_from_content_type(headers['x-ms-blob-content-type'])
body.force_encoding(charset)
end
end

response = super

# Force the response.body to the content encoding of specified in the header.
# content-encoding is echo'd back for the blob and is used to store the encoding of the octet stream
if !response.nil? && !response.body.nil? && response.headers['content-encoding']
response.body.force_encoding(response.headers['content-encoding'])
# Force the response.body to the content charset of specified in the header.
# Content-Type is echo'd back for the blob and is used to store the encoding of the octet stream
if !response.nil? && !response.body.nil? && response.headers['Content-Type']
charset = parse_charset_from_content_type(response.headers['Content-Type'])
response.body.force_encoding(charset) if charset && charset.length > 0
end

response
Expand Down Expand Up @@ -99,12 +101,12 @@ def call(method, uri, body=nil, headers={}, options={})
# * +:request_id+ - String. Provides a client-generated, opaque value with a 1 KB character limit that is recorded
# in the analytics logs when storage analytics logging is enabled.
#
# See: https://msdn.microsoft.com/en-us/library/azure/dd179352.aspx
#
# NOTE: Metadata requested with the :metadata parameter must have been stored in
# accordance with the naming restrictions imposed by the 2009-09-19 version of the Blob
# service. Beginning with that version, all metadata names must adhere to the naming
# conventions for C# identifiers.
#
# See: http://msdn.microsoft.com/en-us/library/aa664670(VS.71).aspx
# conventions for C# identifiers. See: https://msdn.microsoft.com/en-us/library/aa664670(VS.71).aspx
#
# Any metadata with invalid names which were previously stored, will be returned with the
# key "x-ms-invalid-name" in the metadata hash. This may contain multiple values and be an
Expand Down
39 changes: 20 additions & 19 deletions lib/azure/storage/core/filter/retry_filter.rb
Original file line number Diff line number Diff line change
Expand Up @@ -115,11 +115,12 @@ def should_retry_on_local_error?(retry_data)
# incrementing counter, timestamp, etc). The retry_data object
# will be the same instance throughout the lifetime of the request.
def should_retry_on_error?(response, retry_data)
response = response || retry_data[:error].http_response if retry_data[:error] && retry_data[:error].respond_to?('http_response')
unless response
retry_data[:retryable] = false unless retry_data[:error]
return retry_data[:retryable]
end

# If a request sent to the secondary location fails with 404 (Not Found), it is possible
# that the resource replication is not finished yet. So, in case of 404 only in the secondary
# location, the failure should still be retryable.
Expand All @@ -133,7 +134,7 @@ def should_retry_on_error?(response, retry_data)
else
retry_data[:status_code] = nil
end
end
end

# Non-timeout Cases
if (retry_data[:status_code] >= 300 && retry_data[:status_code] != 408)
Expand All @@ -142,26 +143,26 @@ def should_retry_on_error?(response, retry_data)
retry_data[:retryable] = false;
return false;
end
end

# When absorb_conditional_errors_on_retry is set (for append blob)
if (retry_data[:request_options] && retry_data[:request_options][:absorb_conditional_errors_on_retry])
if (retry_data[:status_code] == 412)
# When appending block with precondition failure and their was a server error before, we ignore the error.
if (retry_data[:last_server_error])
retry_data[:error] = nil;

# When absorb_conditional_errors_on_retry is set (for append blob)
if (retry_data[:request_options] && retry_data[:request_options][:absorb_conditional_errors_on_retry])
if (retry_data[:status_code] == 412)
# When appending block with precondition failure and their was a server error before, we ignore the error.
if (retry_data[:last_server_error])
retry_data[:error] = nil;
retry_data[:retryable] = true;
else
retry_data[:retryable] = false;
end
elsif (retry_data[:retryable] && retry_data[:status_code] >= 500 && retry_data[:status_code] < 600)
# Retry on the server error
retry_data[:retryable] = true;
else
retry_data[:retryable] = false;
retry_data[:last_server_error] = true;
end
elsif (retry_data[:retryable] && retry_data[:status_code] >= 500 && retry_data[:status_code] < 600)
# Retry on the server error
retry_data[:retryable] = true;
retry_data[:last_server_error] = true;
elsif (retry_data[:status_code] < 500)
# No retry on the client error
retry_data[:retryable] = false;
end
elsif (retry_data[:status_code] < 500)
# No retry on the client error
retry_data[:retryable] = false;
end
end

Expand Down
7 changes: 7 additions & 0 deletions lib/azure/storage/core/utility.rb
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,13 @@ def get_certificate(private_key_file)
def initialize_external_logger(logger)
Loggerx.initialize_external_logger(logger)
end

def parse_charset_from_content_type(content_type)
if (content_type && content_type.length > 0)
charset = content_type.split(';').delete_if { |attribute| !attribute.lstrip.start_with?('charset=') }.map { |x| x.lstrip }[0]
charset['charset='.length...charset.length] if charset
end
end
end

# Logger
Expand Down
2 changes: 1 addition & 1 deletion lib/azure/storage/version.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class Version
# Fields represent the parts defined in http://semver.org/
MAJOR = 0 unless defined? MAJOR
MINOR = 11 unless defined? MINOR
UPDATE = 1 unless defined? UPDATE
UPDATE = 2 unless defined? UPDATE
PRE = 'preview' unless defined? PRE

class << self
Expand Down
25 changes: 13 additions & 12 deletions test/integration/blob/blob_gb18030_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -150,58 +150,59 @@
}
end

it 'Read/Write Blob Block Content UTF-8' do
it 'Read/Write Blob Block Content UTF-8 with auto charset' do
GB18030TestStrings.get.each { |k,v|
blob_name = 'Read/Write Block Blob Content UTF-8 for ' + k
content = v.encode('UTF-8')

subject.create_block_blob container_name, blob_name, content
blob, returned_content = subject.get_blob container_name, blob_name

returned_content.must_equal content
}
end

it 'Read/Write Blob Block Content GB18030' do
it 'Read/Write Blob Block Content GB18030 with explicit charset' do
GB18030TestStrings.get.each { |k,v|
blob_name = 'Read/Write Block Blob Content GB18030 for ' + k
content = v.encode('GB18030')
options = { :content_encoding=> 'GB18030'}
options = { :content_type => 'text/html; charset=GB18030' }
subject.create_block_blob container_name, blob_name, content, options
blob, returned_content = subject.get_blob container_name, blob_name
returned_content.force_encoding(blob.properties[:content_encoding])
charset = blob.properties[:content_type][blob.properties[:content_type].index('charset=') + 'charset='.length...blob.properties[:content_type].length]
returned_content.force_encoding(charset)
returned_content.must_equal content
}
end

it 'Read/Write Blob Page Content UTF-8' do
it 'Read/Write Blob Page Content UTF-8 with explicit charset' do
GB18030TestStrings.get.each { |k,v|
blob_name = 'Read/Write Page Blob Content UTF-8 for ' + k
options = { :content_encoding=> 'UTF-8'}
options = { :content_type => 'text/html; charset=UTF-8' }
content = v.encode('UTF-8')
while content.bytesize < 512 do
content << 'X'
end
subject.create_page_blob container_name, blob_name, 512, options
subject.put_blob_pages container_name, blob_name, 0, 511, content
blob, returned_content = subject.get_blob container_name, blob_name
returned_content.force_encoding(blob.properties[:content_encoding])
charset = blob.properties[:content_type][blob.properties[:content_type].index('charset=') + 'charset='.length...blob.properties[:content_type].length]
returned_content.force_encoding(charset)
returned_content.must_equal content
}
end

it 'Read/Write Blob Page Content GB18030' do
it 'Read/Write Blob Page Content GB18030 with explicit charset' do
GB18030TestStrings.get.each { |k,v|
blob_name = 'Read/Write Page Blob Content GB18030 for ' + k
options = { :content_encoding=> 'GB18030'}
options = { :content_type => 'text/html; charset=GB18030' }
content = v.encode('GB18030')
while content.bytesize < 512 do
content << 'X'
end
subject.create_page_blob container_name, blob_name, 512, options
subject.put_blob_pages container_name, blob_name, 0, 511, content
blob, returned_content = subject.get_blob container_name, blob_name
returned_content.force_encoding(blob.properties[:content_encoding])
charset = blob.properties[:content_type][blob.properties[:content_type].index('charset=') + 'charset='.length...blob.properties[:content_type].length]
returned_content.force_encoding(charset)
returned_content.must_equal content
}
end
Expand Down

0 comments on commit 9c95c2e

Please sign in to comment.